mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
Improve code coverage and API test cases for Columns and Constraints (Index, Foreign Key, Check, Exclusion). Fixes #5332
This commit is contained in:
parent
4eb17afe4e
commit
8fcf527632
@ -15,6 +15,7 @@ New features
|
||||
Housekeeping
|
||||
************
|
||||
|
||||
| `Issue #5332 <https://redmine.postgresql.org/issues/5332>`_ - Improve code coverage and API test cases for Columns and Constraints (Index, Foreign Key, Check, Exclusion).
|
||||
| `Issue #5344 <https://redmine.postgresql.org/issues/5344>`_ - Improve code coverage and API test cases for Grant Wizard.
|
||||
|
||||
Bug fixes
|
||||
|
@ -0,0 +1,803 @@
|
||||
{
|
||||
"column_create": [
|
||||
{
|
||||
"name": "Create: Add column with valid data",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_column_add_",
|
||||
"cltype": "\"char\"",
|
||||
"attacl": [],
|
||||
"is_primary_key": false,
|
||||
"attnotnull": false,
|
||||
"attlen": null,
|
||||
"attprecision": null,
|
||||
"attoptions": [],
|
||||
"seclabels": [],
|
||||
"description": {
|
||||
"comment": "jsoncomment"
|
||||
}
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add Identity column with Always",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"server_min_version": 100000,
|
||||
"skip_msg": "Identity column are not supported by EPAS/PG 10.0 and below.",
|
||||
"name": "test_column_add_",
|
||||
"cltype": "bigint",
|
||||
"attacl": [],
|
||||
"is_primary_key": false,
|
||||
"attnotnull": true,
|
||||
"attlen": null,
|
||||
"attprecision": null,
|
||||
"attoptions": [],
|
||||
"seclabels": [],
|
||||
"colconstype": "i",
|
||||
"attidentity": "a",
|
||||
"seqincrement": 1,
|
||||
"seqstart": 1,
|
||||
"seqmin": 1,
|
||||
"seqmax": 10,
|
||||
"seqcache": 1,
|
||||
"seqcycle": true
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add Identity column with As Default",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"col_data_type": "bigint",
|
||||
"server_min_version": 100000,
|
||||
"skip_msg": "Identity column are not supported by EPAS/PG 10.0 and below.",
|
||||
"name": "test_column_add_",
|
||||
"cltype": "bigint",
|
||||
"attacl": [],
|
||||
"is_primary_key": false,
|
||||
"attnotnull": true,
|
||||
"attlen": null,
|
||||
"attprecision": null,
|
||||
"attoptions": [],
|
||||
"seclabels": [],
|
||||
"colconstype": "i",
|
||||
"attidentity": "d",
|
||||
"seqincrement": 2,
|
||||
"seqstart": 2,
|
||||
"seqmin": 2,
|
||||
"seqmax": 2000,
|
||||
"seqcache": 1,
|
||||
"seqcycle": true
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add Identity column with Always",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"col_data_type": "bigint",
|
||||
"server_min_version": 120000,
|
||||
"skip_msg": "Generated column are not supported by EPAS/PG 12.0 and below.",
|
||||
"name": "test_column_add_",
|
||||
"cltype": "bigint",
|
||||
"attacl": [],
|
||||
"is_primary_key": false,
|
||||
"attnotnull": true,
|
||||
"attlen": null,
|
||||
"attprecision": null,
|
||||
"attoptions": [],
|
||||
"seclabels": [],
|
||||
"colconstype": "g",
|
||||
"genexpr": "100 * 100"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add column with invalid data - without name",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"cltype": "\"char\"",
|
||||
"attacl": [],
|
||||
"is_primary_key": false,
|
||||
"attnotnull": false,
|
||||
"attlen": null,
|
||||
"attprecision": null,
|
||||
"attoptions": [],
|
||||
"seclabels": [],
|
||||
"description": {
|
||||
"comment": "jsoncomment"
|
||||
}
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the required parameter (Name).",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add column to non-existing table",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_column_add_",
|
||||
"cltype": "\"char\"",
|
||||
"attacl": [],
|
||||
"is_primary_key": false,
|
||||
"attnotnull": false,
|
||||
"attlen": null,
|
||||
"attprecision": null,
|
||||
"attoptions": [],
|
||||
"seclabels": [],
|
||||
"description": {
|
||||
"comment": "jsoncomment"
|
||||
},
|
||||
"table_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "could not find the specified table.",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add column with valid data while server down",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_column_add_",
|
||||
"cltype": "\"char\"",
|
||||
"attacl": [],
|
||||
"is_primary_key": false,
|
||||
"attnotnull": false,
|
||||
"attlen": null,
|
||||
"attprecision": null,
|
||||
"attoptions": [],
|
||||
"seclabels": [],
|
||||
"description": {
|
||||
"comment": "jsoncomment"
|
||||
}
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"column_delete": [
|
||||
{
|
||||
"name": "Delete: Existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Multiple existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Delete: Non-existing column",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"column_id": 9999999
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": "Error: Object not found.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Existing column while server down",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False,'Mocked Internal Server Error')]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Existing column while server down-2",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False,'Mocked Internal Server Error')]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"column_get": [
|
||||
{
|
||||
"name": "Get details: For existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get details: For multiple existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get details: For non-existing column",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"column_id": 999
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "could not find the specified column.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get details: For existing column while server down",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False,'Mocked Internal Server Error')]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get details: For multiple existing column while server down",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False,'Mocked Internal Server Error')]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
}
|
||||
],
|
||||
"column_get_nodes": [
|
||||
{
|
||||
"name": "Get nodes: For existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get nodes: For multiple existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get nodes: For non-existing column",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"column_id": 999
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "could not find the specified column.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"column_sql": [
|
||||
{
|
||||
"name": "Get sql: For existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get nodes: For non-existing column",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"column_id": 999
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "could not find the specified column.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get details: For existing column while server down",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False,'Mocked Internal Server Error')]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"column_dependencies_dependents": [
|
||||
{
|
||||
"name": "Get dependents: For existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": true
|
||||
},
|
||||
{
|
||||
"name": "Get dependencies: For existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": false
|
||||
}
|
||||
],
|
||||
"column_msql": [
|
||||
{
|
||||
"name": "Get msql: Change timestamp array length",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"timestamp(3) with time zone[]",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":6
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE timestamp(%s) with time zone [];\" % (self.schema_name, self.table_name, self.column_name, expected_len)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get msql: Change timestamp length",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"timestamp(4) with time zone",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":7
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE timestamp(%s) with time zone ;\" % (self.schema_name, self.table_name, self.column_name, expected_len)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get msql: Change numeric array precision",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"numeric(5,2)[]",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":5,
|
||||
"new_precision":4
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE numeric(%s, %s)[];\" % (self.schema_name, self.table_name, self.column_name, expected_len, expected_precision)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get msql: Change numeric precision",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"numeric(6,3)",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":6,
|
||||
"new_precision":5
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE numeric(%s, %s);\" % (self.schema_name, self.table_name, self.column_name, expected_len, expected_precision)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get msql: Change numeric array length",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"numeric(6,3)[]",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":8,
|
||||
"old_precision":3
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE numeric(%s, %s)[];\" % (self.schema_name, self.table_name, self.column_name, expected_len, expected_precision)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get msql: Change numeric length",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"numeric(6,4)",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":8,
|
||||
"old_precision":4
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE numeric(%s, %s);\" % (self.schema_name, self.table_name, self.column_name, expected_len, expected_precision)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get msql: Change numeric array len and precision",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"numeric(10,5)[]",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":15,
|
||||
"new_precision":8
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE numeric(%s, %s)[];\" % (self.schema_name, self.table_name, self.column_name, expected_len, expected_precision)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get msql: Change numeric len and precision",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type":"numeric(12,6)",
|
||||
"name": "test_column_msql_"
|
||||
},
|
||||
"test_data": {
|
||||
"new_len":14,
|
||||
"new_precision":9
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": "\"ALTER TABLE %s.%s\\n ALTER COLUMN %s TYPE numeric(%s, %s);\" % (self.schema_name, self.table_name, self.column_name, expected_len, expected_precision)"
|
||||
}
|
||||
}
|
||||
],
|
||||
"column_put": [
|
||||
{
|
||||
"name": "Put: Update existing column comments and null constraints",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type": "char"
|
||||
},
|
||||
"test_data": {
|
||||
"attnotnull": true,
|
||||
"description": "This is test comment for column"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing column to identity column as Always",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type": "bigint",
|
||||
"server_min_version": 100000,
|
||||
"skip_msg": "Identity column are not supported by EPAS/PG 10.0 and below."
|
||||
},
|
||||
"test_data": {
|
||||
"attnotnull": true,
|
||||
"attidentity": "d",
|
||||
"seqincrement": 2,
|
||||
"seqstart": 2,
|
||||
"seqmin": 2,
|
||||
"seqmax": 2000,
|
||||
"seqcache": 1,
|
||||
"colconstype": "i",
|
||||
"seqcycle": true
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing column drop Identity by changing constraint type to NONE",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type": "bigint",
|
||||
"server_min_version": 100000,
|
||||
"skip_msg": "Identity column are not supported by EPAS/PG 10.0 and below.",
|
||||
"create_identity_column":true
|
||||
},
|
||||
"test_data": {
|
||||
"colconstype": "n"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing column data type",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"data_type": "integer"
|
||||
},
|
||||
"test_data": {
|
||||
"cltype": "bigint",
|
||||
"description": "This is test comment for column"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing column data type while server is down",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
"data_type": "integer"
|
||||
},
|
||||
"test_data": {
|
||||
"cltype": "bigint",
|
||||
"description": "This is test comment for column"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False,'Mocked Internal Server Error')]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"column_get_statistics": [
|
||||
{
|
||||
"name": "Get statistics: For existing column",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get statistics: For non-existing column",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"column_id": 999
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "could not find the specified column.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get statistics: For existing column while server down",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False,'Mocked Internal Server Error')]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
]
|
||||
}
|
@ -9,6 +9,7 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -20,108 +21,44 @@ from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from . import utils as columns_utils
|
||||
|
||||
|
||||
class ColumnAddTestCase(BaseTestGenerator):
|
||||
"""This class will add new column under table node."""
|
||||
scenarios = [
|
||||
('Add column', dict(
|
||||
url='/browser/column/obj/',
|
||||
data={
|
||||
'cltype': "\"char\"",
|
||||
'attacl': [],
|
||||
'is_primary_key': False,
|
||||
'attnotnull': False,
|
||||
'attlen': None,
|
||||
'attprecision': None,
|
||||
'attoptions':[],
|
||||
'seclabels':[],
|
||||
})),
|
||||
('Add Identity column with Always', dict(
|
||||
url='/browser/column/obj/',
|
||||
server_min_version=100000,
|
||||
skip_msg='Identity column are not supported by EPAS/PG 10.0 '
|
||||
'and below.',
|
||||
data={
|
||||
'cltype': 'bigint',
|
||||
'attacl': [],
|
||||
'is_primary_key': False,
|
||||
'attnotnull': True,
|
||||
'attlen': None,
|
||||
'attprecision': None,
|
||||
'attoptions': [],
|
||||
'seclabels': [],
|
||||
'colconstype': 'i',
|
||||
'attidentity': 'a',
|
||||
'seqincrement': 1,
|
||||
'seqstart': 1,
|
||||
'seqmin': 1,
|
||||
'seqmax': 10,
|
||||
'seqcache': 1,
|
||||
'seqcycle': True
|
||||
})),
|
||||
('Add Identity column with As Default', dict(
|
||||
url='/browser/column/obj/',
|
||||
col_data_type='bigint',
|
||||
server_min_version=100000,
|
||||
skip_msg='Identity column are not supported by EPAS/PG 10.0 '
|
||||
'and below.',
|
||||
data={
|
||||
'cltype': 'bigint',
|
||||
'attacl': [],
|
||||
'is_primary_key': False,
|
||||
'attnotnull': True,
|
||||
'attlen': None,
|
||||
'attprecision': None,
|
||||
'attoptions': [],
|
||||
'seclabels': [],
|
||||
'colconstype': 'i',
|
||||
'attidentity': 'd',
|
||||
'seqincrement': 2,
|
||||
'seqstart': 2,
|
||||
'seqmin': 2,
|
||||
'seqmax': 2000,
|
||||
'seqcache': 1,
|
||||
'seqcycle': True
|
||||
})),
|
||||
('Add Generated column', dict(
|
||||
url='/browser/column/obj/',
|
||||
col_data_type='bigint',
|
||||
server_min_version=120000,
|
||||
skip_msg='Generated column are not supported by EPAS/PG 12.0 '
|
||||
'and below.',
|
||||
data={
|
||||
'cltype': 'bigint',
|
||||
'attacl': [],
|
||||
'is_primary_key': False,
|
||||
'attnotnull': True,
|
||||
'attlen': None,
|
||||
'attprecision': None,
|
||||
'attoptions': [],
|
||||
'seclabels': [],
|
||||
'colconstype': 'g',
|
||||
'genexpr': '100 * 100'
|
||||
})),
|
||||
]
|
||||
url = '/browser/column/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_create",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
|
||||
if hasattr(self, 'server_min_version'):
|
||||
# Check DB version
|
||||
if "server_min_version" in self.data:
|
||||
server_con = server_utils.connect_server(self, self.server_id)
|
||||
if not server_con["info"] == "Server connected.":
|
||||
raise Exception("Could not connect to server to add "
|
||||
"a table.")
|
||||
if server_con["data"]["version"] < self.server_min_version:
|
||||
self.skipTest(self.skip_msg)
|
||||
if server_con["data"]["version"] < \
|
||||
self.data["server_min_version"]:
|
||||
self.skipTest(self.data["skip_msg"])
|
||||
|
||||
# Create db connection
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -129,6 +66,8 @@ class ColumnAddTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
@ -136,19 +75,34 @@ class ColumnAddTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
"""This function will add column under table node."""
|
||||
self.column_name = "test_column_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.data.update({
|
||||
'name': self.column_name
|
||||
})
|
||||
if "name" in self.data:
|
||||
self.data["name"] = self.data["name"] + (str(uuid.uuid4())[1:8])
|
||||
|
||||
# Add table
|
||||
response = self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.schema_id) + '/' + str(self.table_id) + '/',
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json')
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if self.is_positive_test:
|
||||
response = columns_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
self.assertIsNotNone(columns_utils.verify_column
|
||||
(self.server, self.db_name,
|
||||
self.data["name"]),
|
||||
"Column not found")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = columns_utils.api_create(self)
|
||||
else:
|
||||
if 'table_id' in self.data:
|
||||
self.table_id = self.data['table_id']
|
||||
|
||||
response = columns_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -23,11 +24,17 @@ from . import utils as columns_utils
|
||||
|
||||
class ColumnDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete column under table node."""
|
||||
scenarios = [
|
||||
('Delete column Node URL', dict(url='/browser/column/obj/'))
|
||||
]
|
||||
url = "/browser/column/obj/"
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_delete",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -36,6 +43,8 @@ class ColumnDeleteTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -43,10 +52,14 @@ class ColumnDeleteTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create column
|
||||
self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
@ -54,20 +67,49 @@ class ColumnDeleteTestCase(BaseTestGenerator):
|
||||
self.table_name,
|
||||
self.column_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will drop column under table node."""
|
||||
self.column_name_1 = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
self.column_id_1 = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name_1)
|
||||
# Verify column creation
|
||||
col_response = columns_utils.verify_column(self.server, self.db_name,
|
||||
self.column_name)
|
||||
if not col_response:
|
||||
raise Exception("Could not find the column to drop.")
|
||||
response = self.tester.delete(self.url + str(utils.SERVER_GROUP) +
|
||||
'/' + str(self.server_id) + '/' +
|
||||
str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' +
|
||||
str(self.table_id) + '/' +
|
||||
str(self.column_id),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
|
||||
col_response = columns_utils.verify_column(self.server, self.db_name,
|
||||
self.column_name_1)
|
||||
if not col_response:
|
||||
raise Exception("Could not find the column to drop.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will drop column under table node."""
|
||||
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
self.data["ids"] = [self.column_id, self.column_id_1]
|
||||
response = columns_utils.api_delete(self, "")
|
||||
else:
|
||||
response = columns_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = columns_utils.api_delete(self)
|
||||
else:
|
||||
if 'column_id' in self.data:
|
||||
self.column_id = self.data['column_id']
|
||||
response = columns_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,109 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as columns_utils
|
||||
|
||||
|
||||
class ColumnGetDependenciesDependentsTestCase(BaseTestGenerator):
|
||||
"""This class will get column dependencies/dependents under table node."""
|
||||
url = '/browser/column/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_dependencies_dependents",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create column
|
||||
self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name)
|
||||
|
||||
# Create column
|
||||
self.column_name_1 = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id_1 = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name_1)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the column dependencies/dependents
|
||||
under table node."""
|
||||
if self.is_positive_test:
|
||||
if self.is_dependent:
|
||||
self.url = self.url + 'dependent/'
|
||||
response = columns_utils.api_get(self)
|
||||
else:
|
||||
self.url = self.url + 'dependency/'
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.is_dependent:
|
||||
self.url = self.url + 'dependent/'
|
||||
else:
|
||||
self.url = self.url + 'dependency/'
|
||||
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(
|
||||
self.mock_data["return_value"])):
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -23,11 +24,17 @@ from . import utils as columns_utils
|
||||
|
||||
class ColumnGetTestCase(BaseTestGenerator):
|
||||
"""This class will get column under table node."""
|
||||
scenarios = [
|
||||
('Fetch columns under table node', dict(url='/browser/column/obj/'))
|
||||
]
|
||||
url = '/browser/column/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_get",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -36,6 +43,8 @@ class ColumnGetTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -43,27 +52,56 @@ class ColumnGetTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create column
|
||||
self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name)
|
||||
if self.is_list:
|
||||
# Create column
|
||||
self.column_name_1 = "test_column_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.column_id_1 = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name_1)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the column under table node."""
|
||||
response = self.tester.get(self.url + str(utils.SERVER_GROUP) +
|
||||
'/' + str(self.server_id) + '/' +
|
||||
str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' +
|
||||
str(self.table_id) + '/' +
|
||||
str(self.column_id),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = columns_utils.api_get(self, "")
|
||||
else:
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
if self.is_list:
|
||||
response = columns_utils.api_get(self, "")
|
||||
else:
|
||||
response = columns_utils.api_get(self)
|
||||
else:
|
||||
if 'column_id' in self.data:
|
||||
self.column_id = self.data['column_id']
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -8,7 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -22,13 +22,19 @@ from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as columns_utils
|
||||
|
||||
|
||||
class ColumnDeleteMultipleTestCase(BaseTestGenerator):
|
||||
"""This class will delete column under table node."""
|
||||
scenarios = [
|
||||
('Delete column Node URL', dict(url='/browser/column/obj/'))
|
||||
]
|
||||
class ColumnGetNodesTestCase(BaseTestGenerator):
|
||||
"""This class will get column node/nodes under table node."""
|
||||
url = '/browser/column/nodes/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_get_nodes",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -37,6 +43,8 @@ class ColumnDeleteMultipleTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -44,47 +52,47 @@ class ColumnDeleteMultipleTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create column
|
||||
self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name)
|
||||
|
||||
# Create column
|
||||
self.column_name_1 = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_ids = [columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name),
|
||||
columns_utils.create_column(self.server,
|
||||
self.column_id_1 = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name_1)
|
||||
]
|
||||
|
||||
def runTest(self):
|
||||
"""This function will drop column under table node."""
|
||||
col_response = columns_utils.verify_column(self.server, self.db_name,
|
||||
self.column_name)
|
||||
if not col_response:
|
||||
raise Exception("Could not find the column to drop.")
|
||||
"""This function will fetch the column node/nodes under table node."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = columns_utils.api_get(self, "")
|
||||
else:
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
col_response = columns_utils.verify_column(self.server, self.db_name,
|
||||
self.column_name_1)
|
||||
if not col_response:
|
||||
raise Exception("Could not find the column to drop.")
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if 'column_id' in self.data:
|
||||
self.column_id = self.data['column_id']
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
data = {'ids': self.column_ids}
|
||||
response = self.tester.delete(self.url + str(utils.SERVER_GROUP) +
|
||||
'/' + str(self.server_id) + '/' +
|
||||
str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' +
|
||||
str(self.table_id) + '/',
|
||||
follow_redirects=True,
|
||||
data=json.dumps(data),
|
||||
content_type='html/json'
|
||||
)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
@ -0,0 +1,100 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as columns_utils
|
||||
|
||||
|
||||
class ColumnGetStatisticsTestCase(BaseTestGenerator):
|
||||
"""This class will get column statistics under table node."""
|
||||
url = '/browser/column/stats/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_get_statistics",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create column
|
||||
self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name)
|
||||
|
||||
# Create column
|
||||
self.column_name_1 = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id_1 = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name_1)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the column statistics under table node."""
|
||||
if self.is_positive_test:
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = columns_utils.api_get(self)
|
||||
elif 'column_id' in self.data:
|
||||
self.column_id = self.data['column_id']
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -9,7 +9,6 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -25,80 +24,17 @@ from . import utils as columns_utils
|
||||
|
||||
class ColumnMsqlTestCase(BaseTestGenerator):
|
||||
"""This class will test msql route of column with various combinations."""
|
||||
scenarios = [
|
||||
('msql column change timestamp array length',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='timestamp(3) with time zone[]',
|
||||
new_len=6,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE timestamp({len}) with time zone [];'
|
||||
)),
|
||||
('msql column change timestamp length',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='timestamp(4) with time zone',
|
||||
new_len=7,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE timestamp({len}) with time zone ;'
|
||||
)),
|
||||
('msql column change numeric array precision',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='numeric(5,2)[]',
|
||||
old_len=5,
|
||||
new_precision=4,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE numeric({len}, {precision})[];'
|
||||
)),
|
||||
('msql column change numeric precision',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='numeric(6,3)',
|
||||
old_len=6,
|
||||
new_precision=5,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE numeric({len}, {precision});'
|
||||
)),
|
||||
('msql column change numeric array length',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='numeric(6,3)[]',
|
||||
new_len=8,
|
||||
old_precision=3,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE numeric({len}, {precision})[];'
|
||||
)),
|
||||
('msql column change numeric length',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='numeric(6,4)',
|
||||
new_len=8,
|
||||
old_precision=4,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE numeric({len}, {precision});'
|
||||
)),
|
||||
('msql column change numeric array len and precision',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='numeric(10,5)[]',
|
||||
new_len=15,
|
||||
new_precision=8,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE numeric({len}, {precision})[];'
|
||||
)),
|
||||
('msql column change numeric len and precision',
|
||||
dict(
|
||||
url='/browser/column/msql/',
|
||||
data_type='numeric(12,6)',
|
||||
new_len=14,
|
||||
new_precision=9,
|
||||
expected_res='ALTER TABLE {schema}.{table}\n ALTER COLUMN '
|
||||
'{column} TYPE numeric({len}, {precision});'
|
||||
))
|
||||
]
|
||||
url = '/browser/column/msql/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_msql",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -107,6 +43,8 @@ class ColumnMsqlTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -114,68 +52,57 @@ class ColumnMsqlTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.column_name = "test_column_msql_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
# Create column
|
||||
self.column_name = self.inventory_data["name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
col_data_type = self.inventory_data["data_type"]
|
||||
self.column_id = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name,
|
||||
self.data_type)
|
||||
col_data_type)
|
||||
|
||||
def runTest(self):
|
||||
col_response = columns_utils.verify_column(self.server, self.db_name,
|
||||
self.column_name)
|
||||
if not col_response:
|
||||
raise Exception("Could not find the column to update.")
|
||||
|
||||
data = {"attnum": self.column_id}
|
||||
|
||||
def runTest(self):
|
||||
url_encode_data = {"attnum": self.column_id}
|
||||
expected_len = None
|
||||
expected_precision = None
|
||||
|
||||
if hasattr(self, 'new_len'):
|
||||
data["attlen"] = self.new_len
|
||||
expected_len = self.new_len
|
||||
if hasattr(self, 'new_precision'):
|
||||
data["attprecision"] = self.new_precision
|
||||
expected_precision = self.new_precision
|
||||
if "new_len" in self.data:
|
||||
expected_len = self.data["new_len"]
|
||||
url_encode_data["attlen"] = expected_len
|
||||
|
||||
response = self.tester.get(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' +
|
||||
str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' +
|
||||
str(self.table_id) + '/' +
|
||||
str(self.column_id) + '?' +
|
||||
urlencode(data),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if "new_precision" in self.data:
|
||||
expected_precision = self.data["new_precision"]
|
||||
url_encode_data["attprecision"] = expected_precision
|
||||
|
||||
response = columns_utils.api_get_msql(self, url_encode_data)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
response_data = json.loads(response.data.decode('utf-8'))
|
||||
|
||||
if not expected_len and hasattr(self, 'old_len'):
|
||||
expected_len = self.old_len
|
||||
if not expected_len and ("old_len" in self.data):
|
||||
expected_len = self.data["old_len"]
|
||||
|
||||
if not expected_precision and hasattr(self, 'old_precision'):
|
||||
expected_precision = self.old_precision
|
||||
if not expected_precision and ("old_precision" in self.data):
|
||||
expected_precision = self.data["old_precision"]
|
||||
|
||||
self.assertEquals(
|
||||
response_data['data'],
|
||||
self.expected_res.format(
|
||||
**dict(
|
||||
[('schema', self.schema_name),
|
||||
('table', self.table_name),
|
||||
('column', self.column_name),
|
||||
('len', expected_len),
|
||||
('precision', expected_precision)
|
||||
]
|
||||
)
|
||||
)
|
||||
)
|
||||
expected_sql = (eval(self.expected_data["test_result_data"]))
|
||||
self.assertEquals(response_data['data'], expected_sql)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -9,6 +9,7 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -25,77 +26,38 @@ from pgadmin.utils import server_utils as server_utils
|
||||
|
||||
class ColumnPutTestCase(BaseTestGenerator):
|
||||
"""This class will update the column under table node."""
|
||||
scenarios = [
|
||||
('Edit column comments and null constraints', dict(
|
||||
url='/browser/column/obj/',
|
||||
col_data_type='char',
|
||||
data={
|
||||
'attnotnull': True,
|
||||
'description': "This is test comment for column"
|
||||
})),
|
||||
('Edit column to Identity column as Always', dict(
|
||||
url='/browser/column/obj/',
|
||||
col_data_type='bigint',
|
||||
server_min_version=100000,
|
||||
skip_msg='Identity column are not supported by EPAS/PG 10.0 '
|
||||
'and below.',
|
||||
data={
|
||||
'attnotnull': True,
|
||||
'attidentity': 'a',
|
||||
'seqincrement': 1,
|
||||
'seqstart': 1,
|
||||
'seqmin': 1,
|
||||
'seqmax': 10,
|
||||
'seqcache': 1,
|
||||
'colconstype': 'i',
|
||||
'seqcycle': True
|
||||
})),
|
||||
('Edit column to Identity column as Default', dict(
|
||||
url='/browser/column/obj/',
|
||||
col_data_type='bigint',
|
||||
server_min_version=100000,
|
||||
skip_msg='Identity column are not supported by EPAS/PG 10.0 '
|
||||
'and below.',
|
||||
data={
|
||||
'attnotnull': True,
|
||||
'attidentity': 'd',
|
||||
'seqincrement': 2,
|
||||
'seqstart': 2,
|
||||
'seqmin': 2,
|
||||
'seqmax': 2000,
|
||||
'seqcache': 1,
|
||||
'colconstype': 'i',
|
||||
'seqcycle': True
|
||||
})),
|
||||
('Edit column Drop Identity by changing constraint type to NONE',
|
||||
dict(url='/browser/column/obj/',
|
||||
col_data_type='bigint',
|
||||
server_min_version=100000,
|
||||
create_identity_column=True,
|
||||
skip_msg='Identity column are not supported by EPAS/PG 10.0 '
|
||||
'and below.',
|
||||
data={'colconstype': 'n'})
|
||||
)
|
||||
]
|
||||
url = '/browser/column/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_put",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
|
||||
if hasattr(self, 'server_min_version'):
|
||||
# Check DB version
|
||||
if "server_min_version" in self.inventory_data:
|
||||
server_con = server_utils.connect_server(self, self.server_id)
|
||||
if not server_con["info"] == "Server connected.":
|
||||
raise Exception("Could not connect to server to add "
|
||||
"a table.")
|
||||
if server_con["data"]["version"] < self.server_min_version:
|
||||
self.skipTest(self.skip_msg)
|
||||
if server_con["data"]["version"] < \
|
||||
self.inventory_data["server_min_version"]:
|
||||
self.skipTest(self.inventory_data["skip_msg"])
|
||||
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -103,43 +65,53 @@ class ColumnPutTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create column
|
||||
self.column_name = "test_column_put_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
if hasattr(self, 'create_identity_column') and \
|
||||
self.create_identity_column:
|
||||
if "create_identity_column" in self.inventory_data:
|
||||
self.column_id = columns_utils.create_identity_column(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.table_name, self.column_name, self.col_data_type)
|
||||
self.table_name, self.column_name,
|
||||
self.inventory_data["data_type"])
|
||||
else:
|
||||
self.column_id = columns_utils.create_column(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.table_name, self.column_name, self.col_data_type)
|
||||
self.table_name, self.column_name,
|
||||
self.inventory_data["data_type"])
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update the column under table node."""
|
||||
# Verify column creation
|
||||
col_response = columns_utils.verify_column(self.server, self.db_name,
|
||||
self.column_name)
|
||||
if not col_response:
|
||||
raise Exception("Could not find the column to update.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update the column under table node."""
|
||||
self.data.update({
|
||||
'attnum': self.column_id,
|
||||
'name': self.column_name,
|
||||
})
|
||||
if self.is_positive_test:
|
||||
response = columns_utils.api_put(self)
|
||||
|
||||
response = self.tester.put(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' +
|
||||
str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' +
|
||||
str(self.table_id) + '/' +
|
||||
str(self.column_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = columns_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,104 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as columns_utils
|
||||
|
||||
|
||||
class ColumnGetSqlTestCase(BaseTestGenerator):
|
||||
"""This class will get column sql under table node."""
|
||||
url = '/browser/column/sql/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("column_sql",
|
||||
columns_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create column
|
||||
self.column_name = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name)
|
||||
|
||||
# Create column
|
||||
self.column_name_1 = "test_column_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.column_id_1 = columns_utils.create_column(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name,
|
||||
self.column_name_1)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the column sql under table node."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = columns_utils.api_get(self, "")
|
||||
else:
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = columns_utils.api_get(self)
|
||||
else:
|
||||
if 'column_id' in self.data:
|
||||
self.column_id = self.data['column_id']
|
||||
response = columns_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -10,9 +10,71 @@
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
import os
|
||||
import json
|
||||
from urllib.parse import urlencode
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
# Load test data from json file.
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/column_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
# api method calls
|
||||
def api_create(self):
|
||||
return self.tester.post("{0}{1}/{2}/{3}/{4}/{5}/".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id),
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json'
|
||||
)
|
||||
|
||||
|
||||
def api_delete(self, column_id=None):
|
||||
if column_id is None:
|
||||
column_id = self.column_id
|
||||
return self.tester.delete("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id, column_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get(self, column_id=None):
|
||||
if column_id is None:
|
||||
column_id = self.column_id
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id, self.schema_id,
|
||||
self.table_id, column_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get_msql(self, url_encode_data):
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}?{7}".
|
||||
format(self.url, utils.SERVER_GROUP, self.server_id,
|
||||
self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.column_id,
|
||||
urlencode(url_encode_data)),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_put(self):
|
||||
return self.tester.put("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP, self.server_id,
|
||||
self.db_id, self.schema_id, self.table_id,
|
||||
self.column_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True)
|
||||
|
||||
|
||||
def create_column(server, db_name, schema_name, table_name, col_name,
|
||||
col_data_type='char'):
|
||||
|
@ -0,0 +1,576 @@
|
||||
{
|
||||
"check_constraint_create": [
|
||||
{
|
||||
"name": "Create: Add valid check constraint to table with convalidated = T.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_checkconstraint_add_",
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": true,
|
||||
"comment": "Create: Add valid check constraint to table api test."
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add valid check constraint to table with convalidated = F.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_checkconstraint_add_",
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": false,
|
||||
"comment": "Create: Add valid check constraint to table api test."
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add check constraint without name.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": true,
|
||||
"comment": "Create: Add check constraint without name api test"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "ERROR: constraint \"none\" for table \"table_checkconstraint_\" does not exist\n",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add check constraint to invalid table name/id.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": true,
|
||||
"comment": "Create: Add check constraint without table name/id. api test",
|
||||
"table_id": 615363
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "could not find the specified table.",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: With valid data while server is down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_checkconstraint_add_",
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": true,
|
||||
"comment": "Create: With valid data while server is down api test."
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Without name in data while server is down .",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": true,
|
||||
"comment": "this is test comment"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: With valid data while server is down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_checkconstraint_add_",
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": true,
|
||||
"comment": "this is test comment"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"check_constraint_delete": [
|
||||
{
|
||||
"name": "Delete: Existing check constraint.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Multiple existing check constraints.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Delete: Non-existing check constraint.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"check_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": "Error: Object not found.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Existing check constraint while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Existing check constraint while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"check_constraint_dependencies_dependents": [
|
||||
{
|
||||
"name": "Get dependents: With existing check constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": true
|
||||
},
|
||||
{
|
||||
"name": "Get dependencies: With existing check constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": false
|
||||
}
|
||||
],
|
||||
"check_constraint_get": [
|
||||
{
|
||||
"name": "Get check constraint details: For existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint details: For existing multiple constraint ids.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint details: For non-existing constraint id.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"check_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the check constraint in the table.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint details: For existing constraint id while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"check_constraint_get_nodes": [
|
||||
{
|
||||
"name": "Get check constraint node: For existing constraint id with convalidated = T.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"query": " \"ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK ( (id > 0)) NOT VALID; COMMENT ON CONSTRAINT %s ON %s.%s IS 'this is test comment'\" % (schema_name, table_name, check_constraint_name,check_constraint_name, schema_name, table_name)"
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint node: For existing constraint id with convalidated = F.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"query": " \"ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK ( (id > 0)); COMMENT ON CONSTRAINT %s ON %s.%s IS 'this is test comment'\" % (schema_name, table_name, check_constraint_name,check_constraint_name, schema_name, table_name)"
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint nodes: For existing multiple constraint ids with convalidated = T.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"query": " \"ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK ( (id > 0)) NOT VALID; COMMENT ON CONSTRAINT %s ON %s.%s IS 'this is test comment'\" % (schema_name, table_name, check_constraint_name,check_constraint_name, schema_name, table_name)"
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint nodes: For existing multiple constraint ids with convalidated = F.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"query": " \"ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK ( (id > 0)); COMMENT ON CONSTRAINT %s ON %s.%s IS 'this is test comment'\" % (schema_name, table_name, check_constraint_name,check_constraint_name, schema_name, table_name)"
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint node: With non-existing constraint id.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"check_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the check constraint.",
|
||||
"test_result_data": {
|
||||
}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"check_constraint_msql": [
|
||||
{
|
||||
"name": "Get check constraint msql: Modifying comment existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"comment": "Testing msql api"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint msql: Modifying comment & convalidated of existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"comment": "Testingmsqlapiconvalidated=f",
|
||||
"convalidated": "false"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"check_constraint_put": [
|
||||
{
|
||||
"name": "Put: Update existing check constraint with convalidated = false",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for index",
|
||||
"convalidated": false
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing check constraint with comment only",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for constraint"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing existing check constraint while server is down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for index"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"check_constraint_sql": [
|
||||
{
|
||||
"name": "Get check constraint sql: With existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint sql: With non-existing constraint id.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"check_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the object on the server.",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get check constraint sql: With existing constraint id while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"check_constraint_validate": [
|
||||
{
|
||||
"name": "Validate check constraint details: For existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Validate check constraint details: For existing constraint id while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Validate check constraint details: For existing constraint id while server down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
@ -7,7 +7,6 @@
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
@ -19,18 +18,25 @@ from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from unittest.mock import patch
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintAddTestCase(BaseTestGenerator):
|
||||
"""This class will add check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Add check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
url = '/browser/check_constraint/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("check_constraint_create",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintAddTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -40,6 +46,7 @@ class CheckConstraintAddTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a check "
|
||||
"constraint.")
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -48,6 +55,8 @@ class CheckConstraintAddTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_add_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
@ -57,19 +66,37 @@ class CheckConstraintAddTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
"""This function will add check constraint to table."""
|
||||
check_constraint_name = "test_checkconstraint_add_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
data = {"name": check_constraint_name,
|
||||
"consrc": " (id > 0)",
|
||||
"convalidated": True,
|
||||
"comment": "this is test comment"}
|
||||
response = self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.schema_id) + '/' + str(self.table_id) + '/',
|
||||
data=json.dumps(data),
|
||||
content_type='html/json')
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if "name" in self.data:
|
||||
check_constraint_name = \
|
||||
self.data["name"] + (str(uuid.uuid4())[1:8])
|
||||
self.data["name"] = check_constraint_name
|
||||
|
||||
if self.is_positive_test:
|
||||
response = check_constraint_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
cross_check_res = check_constraint_utils. \
|
||||
verify_check_constraint(self.server, self.db_name,
|
||||
self.data["name"])
|
||||
self.assertIsNotNone(cross_check_res, "Could not find the newly"
|
||||
" created check constraint.")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = check_constraint_utils.api_create(self)
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
else:
|
||||
if 'table_id' in self.data:
|
||||
self.table_id = self.data['table_id']
|
||||
response = check_constraint_utils.api_create(self)
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -18,19 +19,24 @@ from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as chk_constraint_utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Delete check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
url = '/browser/check_constraint/obj/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("check_constraint_delete",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintDeleteTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -40,6 +46,8 @@ class CheckConstraintDeleteTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to delete a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -48,34 +56,75 @@ class CheckConstraintDeleteTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to delete
|
||||
self.check_constraint_name = "test_checkconstraint_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
chk_constraint_utils.create_check_constraint(
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete check constraint to table."""
|
||||
chk_constraint = chk_constraint_utils.verify_check_constraint(
|
||||
# Cross check constraint creation
|
||||
chk_constraint = check_constraint_utils.verify_check_constraint(
|
||||
self.server, self.db_name, self.check_constraint_name)
|
||||
if not chk_constraint:
|
||||
raise Exception("Could not find the check constraint to delete.")
|
||||
response = self.tester.delete(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
self.check_constraint_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
|
||||
# In case of multiple constraints
|
||||
if self.is_list:
|
||||
# Create constraint to delete
|
||||
self.check_constraint_name_2 = \
|
||||
"test_checkconstraint_get_list_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id_2 = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.table_name,
|
||||
self.check_constraint_name_2)
|
||||
|
||||
# constraint list to delete
|
||||
self.data['ids'] = [self.check_constraint_id,
|
||||
self.check_constraint_id_2]
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete check constraint to table."""
|
||||
if self.is_positive_test:
|
||||
|
||||
if self.is_list:
|
||||
response = check_constraint_utils.api_delete(self, '')
|
||||
else:
|
||||
response = check_constraint_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
cross_check_res = check_constraint_utils.verify_check_constraint(
|
||||
self.server,
|
||||
self.db_name,
|
||||
self.check_constraint_name)
|
||||
self.assertIsNone(cross_check_res,
|
||||
"Deleted constraint still present")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = check_constraint_utils.api_delete(self)
|
||||
elif 'check_constraint_id' in self.data:
|
||||
self.check_constraint_id = self.data["check_constraint_id"]
|
||||
response = check_constraint_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,93 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintGetDependenciesDependentsTestCase(BaseTestGenerator):
|
||||
"""This class will fetch dependents/ dependencies for check constraint to
|
||||
existing table """
|
||||
skip_on_database = ['gpdb']
|
||||
url = '/browser/check_constraint/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios(
|
||||
"check_constraint_dependencies_dependents",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintGetDependenciesDependentsTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to fetch dependents/dependency
|
||||
self.check_constraint_name = "test_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch dependents/ dependencies for check
|
||||
constraint to existing table """
|
||||
if self.is_positive_test:
|
||||
if self.is_dependent:
|
||||
self.url = self.url + 'dependent/'
|
||||
response = check_constraint_utils.api_get(self)
|
||||
else:
|
||||
self.url = self.url + 'dependency/'
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -18,20 +19,24 @@ from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as chk_constraint_utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
url = '/browser/check_constraint/obj/'
|
||||
|
||||
scenarios = [
|
||||
('Fetch check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("check_constraint_get",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintGetTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -41,6 +46,8 @@ class CheckConstraintGetTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -49,30 +56,57 @@ class CheckConstraintGetTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to fetch
|
||||
self.check_constraint_name = "test_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
chk_constraint_utils.create_check_constraint(
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
# In case of multiple constraints
|
||||
if self.is_list:
|
||||
self.check_constraint_name_2 = "test_checkconstraint_get_list_%s" \
|
||||
% (str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id_2 = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.table_name,
|
||||
self.check_constraint_name_2)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch check constraint to table."""
|
||||
response = self.tester.get(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
self.check_constraint_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = check_constraint_utils.api_get(self, '')
|
||||
else:
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = check_constraint_utils.api_get(self)
|
||||
else:
|
||||
if 'check_constraint_id' in self.data:
|
||||
# Non-existing constraint id
|
||||
self.check_constraint_id = self.data["check_constraint_id"]
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,106 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintGetNodesTestCase(BaseTestGenerator):
|
||||
"""This class will fetch nodes check constraint of table."""
|
||||
skip_on_database = ['gpdb']
|
||||
url = '/browser/check_constraint/nodes/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("check_constraint_get_nodes",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintGetNodesTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
if 'query' in self.inventory_data:
|
||||
query = self.inventory_data['query']
|
||||
else:
|
||||
query = None
|
||||
self.check_constraint_name = "test_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name, query)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch nodes check constraint of table."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = check_constraint_utils.api_get(self, '')
|
||||
else:
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = check_constraint_utils.api_get(self)
|
||||
elif 'check_constraint_id' in self.data:
|
||||
# Non-existing constraint id
|
||||
self.check_constraint_id = self.data["check_constraint_id"]
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,105 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintGetMsqlTestCase(BaseTestGenerator):
|
||||
"""This class will fetch modified sql for check constraint of table. """
|
||||
skip_on_database = ['gpdb']
|
||||
url = '/browser/check_constraint/msql/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("check_constraint_msql",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintGetMsqlTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to modify
|
||||
self.check_constraint_name = "test_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
# In case of multiple constraints
|
||||
if self.is_list:
|
||||
self.check_constraint_name = "test_checkconstraint_get_list_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch modified sql for check constraint to
|
||||
table. """
|
||||
if self.is_positive_test:
|
||||
url_encode_data = {"oid": self.check_constraint_id,
|
||||
"comment": self.data['comment']}
|
||||
|
||||
if 'convalidated' in self.data:
|
||||
url_encode_data["convalidated"] = self.data['convalidated']
|
||||
|
||||
response = check_constraint_utils.api_get_msql(self,
|
||||
url_encode_data)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
self.assertIn(self.data['comment'], response.json['data'])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -7,8 +7,8 @@
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -19,19 +19,24 @@ from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as chk_constraint_utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintPutTestCase(BaseTestGenerator):
|
||||
"""This class will update check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
scenarios = [
|
||||
('Update check constraint to table',
|
||||
dict(url='/browser/check_constraint/obj/'))
|
||||
]
|
||||
url = '/browser/check_constraint/obj/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("check_constraint_put",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintPutTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -41,6 +46,8 @@ class CheckConstraintPutTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to update a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -49,36 +56,46 @@ class CheckConstraintPutTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to update a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_put_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to modify
|
||||
self.check_constraint_name = "test_checkconstraint_put_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
chk_constraint_utils.create_check_constraint(
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete check constraint to table."""
|
||||
chk_constraint = chk_constraint_utils.verify_check_constraint(
|
||||
# Cross check constraint creation
|
||||
chk_constraint = check_constraint_utils.verify_check_constraint(
|
||||
self.server, self.db_name, self.check_constraint_name)
|
||||
if not chk_constraint:
|
||||
raise Exception("Could not find the check constraint to update.")
|
||||
data = {"oid": self.check_constraint_id,
|
||||
"comment": "This is test comment for check constraint."}
|
||||
response = self.tester.put(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
self.check_constraint_id),
|
||||
data=json.dumps(data),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update check constraint to table."""
|
||||
self.data["oid"] = self.check_constraint_id
|
||||
if self.is_positive_test:
|
||||
response = check_constraint_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = check_constraint_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,105 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintGetSqlTestCase(BaseTestGenerator):
|
||||
"""This class will fetch check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
url = '/browser/check_constraint/sql/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("check_constraint_sql",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintGetSqlTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.check_constraint_name = "test_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
chk_constraint = check_constraint_utils.verify_check_constraint(
|
||||
self.server, self.db_name, self.check_constraint_name)
|
||||
if not chk_constraint:
|
||||
raise Exception("Could not find the check constraint to delete.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch sql for check constraint to table."""
|
||||
if self.is_positive_test:
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = check_constraint_utils.api_get(self)
|
||||
elif 'check_constraint_id' in self.data:
|
||||
# Non-existing constraint id
|
||||
self.check_constraint_id = self.data["check_constraint_id"]
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,95 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as check_constraint_utils
|
||||
|
||||
|
||||
class CheckConstraintValidateTestCase(BaseTestGenerator):
|
||||
"""This class will validate check constraint to existing table"""
|
||||
skip_on_database = ['gpdb']
|
||||
url = '/browser/check_constraint/validate/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("check_constraint_validate",
|
||||
check_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(CheckConstraintValidateTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to fetch
|
||||
self.check_constraint_name = "test_checkconstraint_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
check_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will validate check constraint to table."""
|
||||
if self.is_positive_test:
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = check_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -10,14 +10,83 @@
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
import os
|
||||
import json
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
# Load test data from json file.
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/check_constraint_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
# api call methods
|
||||
def api_create(self):
|
||||
return self.tester.post("{0}{1}/{2}/{3}/{4}/{5}/".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id),
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json'
|
||||
)
|
||||
|
||||
|
||||
def api_delete(self, check_constraint_id=None):
|
||||
if check_constraint_id is None:
|
||||
check_constraint_id = self.check_constraint_id
|
||||
return self.tester.delete("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
check_constraint_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get(self, check_constraint_id=None):
|
||||
if check_constraint_id is None:
|
||||
check_constraint_id = self.check_constraint_id
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
check_constraint_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_put(self):
|
||||
return self.tester.put("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.check_constraint_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get_msql(self, url_encode_data):
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}?{7}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.check_constraint_id,
|
||||
urlencode(url_encode_data)),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def create_check_constraint(server, db_name, schema_name, table_name,
|
||||
check_constraint_name):
|
||||
check_constraint_name, query_val=None):
|
||||
"""
|
||||
This function creates a check constraint under provided table.
|
||||
:param query_val: if you want to have different query
|
||||
:param server: server details
|
||||
:type server: dict
|
||||
:param db_name: database name
|
||||
@ -40,12 +109,15 @@ def create_check_constraint(server, db_name, schema_name, table_name,
|
||||
old_isolation_level = connection.isolation_level
|
||||
connection.set_isolation_level(0)
|
||||
pg_cursor = connection.cursor()
|
||||
query = "ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK ( (id > 0)) " \
|
||||
"NOT VALID; COMMENT ON CONSTRAINT %s ON %s.%s IS " \
|
||||
"'this is test comment'" % (schema_name, table_name,
|
||||
check_constraint_name,
|
||||
check_constraint_name,
|
||||
schema_name, table_name)
|
||||
if query_val is None:
|
||||
query = "ALTER TABLE %s.%s ADD CONSTRAINT %s CHECK ( (id > 0)) " \
|
||||
"NOT VALID; COMMENT ON CONSTRAINT %s ON %s.%s IS " \
|
||||
"'this is test comment'" % (schema_name, table_name,
|
||||
check_constraint_name,
|
||||
check_constraint_name,
|
||||
schema_name, table_name)
|
||||
else:
|
||||
query = eval(query_val)
|
||||
pg_cursor.execute(query)
|
||||
connection.set_isolation_level(old_isolation_level)
|
||||
connection.commit()
|
||||
|
@ -0,0 +1,545 @@
|
||||
{
|
||||
"exclusion_constraint_create": [
|
||||
{
|
||||
"name": "Create: Valid exclusion constraint.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_exclusionconstraint_add_",
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"columns": [
|
||||
{"column": "id", "sort_order": false, "nulls": false,
|
||||
"operator": "="}],
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Valid exclusion constraint without name.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"columns": [
|
||||
{"column": "id", "sort_order": false, "nulls": false,
|
||||
"operator": "="}],
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Invalid exclusion constraint without columns parameter",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_exclusionconstraint_add_",
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 400,
|
||||
"error_msg": "Could not find required parameter (columns).",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Exclusion constraint without name & valid data while server is down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"columns": [
|
||||
{"column": "id", "sort_order": false, "nulls": false,
|
||||
"operator": "="}],
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Exclusion constraint without name in data while server is down .",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_exclusionconstraint_add_",
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"columns": [
|
||||
{"column": "id", "sort_order": false, "nulls": false,
|
||||
"operator": "="}],
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Exclusion constraint with valid data while server is down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_exclusionconstraint_add_",
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"columns": [
|
||||
{"column": "id", "sort_order": false, "nulls": false,
|
||||
"operator": "="}],
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Exclusion constraint without name while server is down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"columns": [
|
||||
{"column": "id", "sort_order": false, "nulls": false,
|
||||
"operator": "="}],
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_delete": [
|
||||
{
|
||||
"name": "Delete: Existing exclusion constraint.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Multiple existing exclusion constraint.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Delete: Non-existing exclusion constraint.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"exclusion_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": "Error: Object not found.",
|
||||
"test_result_data": {}
|
||||
},"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Existing exclusion constraint while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete existing exclusion constraint while server down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},"is_list": false
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_dependencies_dependents": [
|
||||
{
|
||||
"name": "Get dependents: With existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": true
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint dependencies: With existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": false
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_get": [
|
||||
{
|
||||
"name": "Get exclusion constraint details: For existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{"name": "Get exclusion constraint details: For existing multiple constraint ids.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint details: For Non-existing constraint id.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"exclusion_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the exclusion constraint in the table.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint details: For existing constraint id while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_get_nodes": [
|
||||
{
|
||||
"name": "Get exclusion constraint node: For existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint nodes: For existing multiple constraint ids",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint node: With non-existing constraint id.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"exclusion_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the exclusion constraint.",
|
||||
"test_result_data": {
|
||||
}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_msql": [
|
||||
{
|
||||
"name": "Get exclusion constraint msql: Modifying comment existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "Modify Name for Exclusion Constraint",
|
||||
"comment": "Testing msql api",
|
||||
"fillFactor": 22
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_put": [
|
||||
{
|
||||
"name": "Put: Update existing exclusion constraint.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for index"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing existing exclusion constraint while server is down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for index"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing existing exclusion constraint while server is down-3.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for index"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_sql": [
|
||||
{
|
||||
"name": "Get exclusion constraint sql: With existing constraint id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint sql: With non-existing constraint id.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"exclusion_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the exclusion constraint.",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint sql: With existing constraint id while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"exclusion_constraint_get_statistics": [
|
||||
{
|
||||
"name": "Get exclusion constraint statistics: With existing index id.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get exclusion constraint statistics: With existing index id while server is down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
]
|
||||
}
|
@ -9,7 +9,8 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
|
||||
from unittest.mock import patch
|
||||
from pgadmin.utils import server_utils as server_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
@ -24,20 +25,36 @@ from . import utils as exclusion_utils
|
||||
|
||||
class ExclusionConstraintAddTestCase(BaseTestGenerator):
|
||||
"""This class will add new exclusion constraint to existing table"""
|
||||
scenarios = [
|
||||
('Add Exclusion Constraint URL',
|
||||
dict(url='/browser/exclusion_constraint/obj/'))
|
||||
]
|
||||
url = '/browser/exclusion_constraint/obj/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_create",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
|
||||
# Check DB version
|
||||
server_con = server_utils.connect_server(self, self.server_id)
|
||||
if not server_con["info"] == "Server connected.":
|
||||
raise Exception("Could not connect to server to add "
|
||||
"a table.")
|
||||
self.db_version = server_con["data"]["version"]
|
||||
|
||||
# Create db connection
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -45,6 +62,8 @@ class ExclusionConstraintAddTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_for_exclusion_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
@ -52,29 +71,39 @@ class ExclusionConstraintAddTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
"""This function will add exclusion constraint to existing table."""
|
||||
self.index_name = "test_index_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
data = {"name": self.index_name,
|
||||
"spcname": "pg_default",
|
||||
"amname": "btree",
|
||||
"columns": [
|
||||
{"column": "id", "sort_order": False, "nulls": False,
|
||||
"operator": "="}],
|
||||
"indconstraint": "(1=2)",
|
||||
"include": ["name"]
|
||||
}
|
||||
response = self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.schema_id) + '/' + str(self.table_id) + '/',
|
||||
data=json.dumps(data),
|
||||
content_type='html/json')
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if "name" in self.data:
|
||||
constraint_name = self.data["name"] + (str(uuid.uuid4())[1:8])
|
||||
self.data["name"] = constraint_name
|
||||
elif self.db_version < 110000:
|
||||
constraint_name = self.table_name + '_' + \
|
||||
self.data["columns"][0]['column'] + '_excl'
|
||||
else:
|
||||
constraint_name = self.table_name + '_' + \
|
||||
self.data["columns"][0]['column'] + '_name_excl'
|
||||
|
||||
index_response = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.index_name)
|
||||
if self.is_positive_test:
|
||||
response = exclusion_utils.api_create(self)
|
||||
|
||||
if not index_response:
|
||||
raise Exception("Could not find the constraint added.")
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, constraint_name)
|
||||
self.assertIsNotNone(cross_check_res,
|
||||
"Could not find the newly created exclusion "
|
||||
"constraint.")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = exclusion_utils.api_create(self)
|
||||
else:
|
||||
response = exclusion_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -23,12 +24,16 @@ from . import utils as exclusion_utils
|
||||
|
||||
class ExclusionConstraintDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete the existing exclusion constraint of table."""
|
||||
scenarios = [
|
||||
('Delete Exclusion Constraint Node URL',
|
||||
dict(url='/browser/exclusion_constraint/obj/'))
|
||||
]
|
||||
url = '/browser/exclusion_constraint/obj/'
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_delete",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -37,6 +42,8 @@ class ExclusionConstraintDeleteTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -44,35 +51,75 @@ class ExclusionConstraintDeleteTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_exclusion_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.index_name = "test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.index_id = exclusion_utils.create_exclusion_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.index_name
|
||||
)
|
||||
|
||||
# Create constraint to delete
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils.\
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
# In case of multiple constraints
|
||||
if self.is_list:
|
||||
# Create constraint to delete
|
||||
self.exclusion_constraint_name_2 = \
|
||||
"test_exclconstraint_get_list_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id_2 = \
|
||||
exclusion_utils.create_exclusion_constraint(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.table_name,
|
||||
self.exclusion_constraint_name_2)
|
||||
|
||||
# constraint list to delete
|
||||
self.data['ids'] = [self.exclusion_constraint_id,
|
||||
self.exclusion_constraint_id_2]
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete exclusion constraint."""
|
||||
index_response = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.index_name)
|
||||
if not index_response:
|
||||
raise Exception("Could not find the constraint to delete.")
|
||||
response = self.tester.delete(self.url + str(utils.SERVER_GROUP) +
|
||||
'/' + str(self.server_id) + '/' +
|
||||
str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' +
|
||||
str(self.table_id) + '/' +
|
||||
str(self.index_id),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if self.is_positive_test:
|
||||
|
||||
index_response = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.index_name)
|
||||
if index_response:
|
||||
raise Exception("Constraint is not deleted.")
|
||||
if self.is_list:
|
||||
response = exclusion_utils.api_delete(self, '')
|
||||
else:
|
||||
response = exclusion_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
cross_chk_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server,
|
||||
self.db_name,
|
||||
self.exclusion_constraint_name)
|
||||
self.assertIsNone(cross_chk_res,
|
||||
"Deleted exclusion constraint still present")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = exclusion_utils.api_delete(self)
|
||||
elif 'exclusion_constraint_id' in self.data:
|
||||
self.exclusion_constraint_id = \
|
||||
self.data["exclusion_constraint_id"]
|
||||
response = exclusion_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,93 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as exclusion_utils
|
||||
|
||||
|
||||
class ExclusionGetDependenciesDependentsTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the dependents & dependencies for existing
|
||||
exclusion constraint """
|
||||
url = '/browser/exclusion_constraint/'
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios(
|
||||
"exclusion_constraint_dependencies_dependents",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_exclusion_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to fetch
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils. \
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch dependents & dependencies for constraint
|
||||
to table. """
|
||||
if self.is_positive_test:
|
||||
if self.is_dependent:
|
||||
self.url = self.url + 'dependent/'
|
||||
response = exclusion_utils.api_get(self)
|
||||
else:
|
||||
self.url = self.url + 'dependency/'
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -23,12 +24,17 @@ from . import utils as exclusion_utils
|
||||
|
||||
class ExclusionGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the existing exclusion constraint"""
|
||||
scenarios = [
|
||||
('Fetch Exclusion Constraint',
|
||||
dict(url='/browser/exclusion_constraint/obj/'))
|
||||
]
|
||||
url = '/browser/exclusion_constraint/obj/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_get",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -37,6 +43,8 @@ class ExclusionGetTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -44,25 +52,62 @@ class ExclusionGetTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_exclusion_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.index_name = "test_exclusion_get_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.index_id = exclusion_utils.create_exclusion_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.index_name
|
||||
)
|
||||
|
||||
# Create constraint to fetch
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils.\
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
# In case of multiple constraints
|
||||
if self.is_list:
|
||||
# Create constraint to delete
|
||||
self.exclusion_constraint_name_2 = \
|
||||
"test_exclconstraint_get_list_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id_2 = \
|
||||
exclusion_utils.create_exclusion_constraint(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.table_name,
|
||||
self.exclusion_constraint_name_2)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the existing exclusion constraint."""
|
||||
response = self.tester.get(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.index_id),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
"""This function will fetch check constraint to table."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = exclusion_utils.api_get(self, '')
|
||||
else:
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = exclusion_utils.api_get(self)
|
||||
elif 'exclusion_constraint_id' in self.data:
|
||||
self.exclusion_constraint_id = \
|
||||
self.data["exclusion_constraint_id"]
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,102 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as exclusion_utils
|
||||
|
||||
|
||||
class ExclusionGetNodesTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the nodes for existing exclusion constraint"""
|
||||
url = '/browser/exclusion_constraint/nodes/'
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_get_nodes",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_exclusion_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to fetch
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils. \
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch nodes for constraint to table."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = exclusion_utils.api_get(self, '')
|
||||
else:
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = exclusion_utils.api_get(self)
|
||||
elif 'exclusion_constraint_id' in self.data:
|
||||
self.exclusion_constraint_id = \
|
||||
self.data["exclusion_constraint_id"]
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,99 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as exclusion_utils
|
||||
|
||||
|
||||
class ExclusionGetStatisticsTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the statistics existing exclusion constraint"""
|
||||
url = '/browser/exclusion_constraint/stats/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_get_statistics",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_exclusion_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to fetch
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils.\
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch statistics for constraint to table."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = exclusion_utils.api_get(self, '')
|
||||
else:
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,100 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as exclusion_utils
|
||||
|
||||
|
||||
class ExclusionGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch modified sql for constraint to existing table"""
|
||||
url = '/browser/exclusion_constraint/msql/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_msql",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
super(ExclusionGetTestCase, self).setUp()
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a check "
|
||||
"constraint.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_exclusion_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to modify
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils.\
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch modified sql for constraint to table."""
|
||||
if self.is_positive_test:
|
||||
req_arg = '?oid=' + str(self.exclusion_constraint_id) + \
|
||||
'&name=' + self.data['name'] + \
|
||||
'&comment=' + self.data['comment'] + \
|
||||
'&fillfactor=' + str(self.data['fillFactor'])
|
||||
|
||||
response = exclusion_utils.api_get_msql(self, req_arg)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
self.assertIn(self.data['comment'], response.json['data'])
|
||||
self.assertIn(self.data['name'], response.json['data'])
|
||||
self.assertIn(str(self.data['fillFactor']), response.json['data'])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -9,6 +9,7 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -24,12 +25,16 @@ from . import utils as exclusion_utils
|
||||
|
||||
class IndexesUpdateTestCase(BaseTestGenerator):
|
||||
"""This class will update the existing exclusion constraint."""
|
||||
scenarios = [
|
||||
('Put exclusion constraint URL',
|
||||
dict(url='/browser/exclusion_constraint/obj/'))
|
||||
]
|
||||
url = '/browser/exclusion_constraint/obj/'
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_put",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -38,6 +43,8 @@ class IndexesUpdateTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -45,32 +52,46 @@ class IndexesUpdateTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
self.table_name = "table_column_%s" % (str(uuid.uuid4())[1:8])
|
||||
|
||||
# Create table
|
||||
self.table_name = "test_exclusion_put_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
self.index_name = "test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.index_id = exclusion_utils.create_exclusion_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.index_name
|
||||
)
|
||||
|
||||
# Create constraint to update
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_put_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils.\
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update an existing exclusion constraint"""
|
||||
index_response = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.index_name)
|
||||
if not index_response:
|
||||
raise Exception("Could not find the exclusion constraint.")
|
||||
data = {"oid": self.index_id,
|
||||
"comment": "This is test comment for index"}
|
||||
response = self.tester.put(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.index_id),
|
||||
data=json.dumps(data),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
self.data["oid"] = self.exclusion_constraint_id
|
||||
|
||||
if self.is_positive_test:
|
||||
response = exclusion_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = exclusion_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,99 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as exclusion_utils
|
||||
|
||||
|
||||
class ExclusionGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch sql for the existing exclusion constraint"""
|
||||
url = '/browser/exclusion_constraint/sql/'
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("exclusion_constraint_sql",
|
||||
exclusion_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_exclusion_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint to fetch
|
||||
self.exclusion_constraint_name = \
|
||||
"test_exclusion_delete_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.exclusion_constraint_id = exclusion_utils.\
|
||||
create_exclusion_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.exclusion_constraint_name)
|
||||
|
||||
# Cross check constraint creation
|
||||
cross_check_res = exclusion_utils.verify_exclusion_constraint(
|
||||
self.server, self.db_name, self.exclusion_constraint_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the exclusion constraint "
|
||||
"to delete.")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch sql for constraint to table."""
|
||||
if self.is_positive_test:
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = exclusion_utils.api_get(self)
|
||||
elif 'exclusion_constraint_id' in self.data:
|
||||
self.exclusion_constraint_id = \
|
||||
self.data["exclusion_constraint_id"]
|
||||
response = exclusion_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -9,10 +9,74 @@
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import traceback
|
||||
import json
|
||||
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
# Load test data from json file.
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/exclusion_constraint_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
# api call methods
|
||||
def api_create(self):
|
||||
return self.tester.post("{0}{1}/{2}/{3}/{4}/{5}/".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id),
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json'
|
||||
)
|
||||
|
||||
|
||||
def api_delete(self, exclusion_constraint_id=None):
|
||||
if exclusion_constraint_id is None:
|
||||
exclusion_constraint_id = self.exclusion_constraint_id
|
||||
return self.tester.delete("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
exclusion_constraint_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get(self, exclusion_constraint_id=None):
|
||||
if exclusion_constraint_id is None:
|
||||
exclusion_constraint_id = self.exclusion_constraint_id
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
exclusion_constraint_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get_msql(self, req_args):
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}{7}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.exclusion_constraint_id,
|
||||
req_args),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_put(self):
|
||||
return self.tester.put("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.exclusion_constraint_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True)
|
||||
|
||||
|
||||
def create_exclusion_constraint(server, db_name, schema_name, table_name,
|
||||
key_name):
|
||||
|
@ -0,0 +1,620 @@
|
||||
{
|
||||
"foreign_key_create": [
|
||||
{
|
||||
"name": "Create: Valid foreign key with autoindex = false.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_foreignkey_add_",
|
||||
"columns": [
|
||||
{
|
||||
"local_column": "id",
|
||||
"references": "<foreign_table_id>",
|
||||
"referenced": "id"
|
||||
}
|
||||
],
|
||||
"confupdtype": "a",
|
||||
"confdeltype": "a",
|
||||
"autoindex": false
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Valid foreign key without name with autoindex = true & convalidated=true.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"columns": [
|
||||
{
|
||||
"local_column": "id",
|
||||
"references": "<foreign_table_id>",
|
||||
"referenced": "id"
|
||||
}
|
||||
],
|
||||
"confupdtype": "a",
|
||||
"confdeltype": "a",
|
||||
"autoindex": true,
|
||||
"convalidated": true
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Invalid foreign key without columns parameter",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_foreignkey_add_",
|
||||
"confupdtype": "a",
|
||||
"confdeltype": "a",
|
||||
"autoindex": false
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 400,
|
||||
"error_msg": "Could not find required parameter (columns).",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Foreign key without name & valid data while server is down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"columns": [
|
||||
{
|
||||
"local_column": "id",
|
||||
"references": "<foreign_table_id>",
|
||||
"referenced": "id"
|
||||
}
|
||||
],
|
||||
"confupdtype": "a",
|
||||
"confdeltype": "a",
|
||||
"autoindex": false
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Foreign key with name in data while server is down .",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_foreignkey_add_",
|
||||
"columns": [
|
||||
{
|
||||
"local_column": "id",
|
||||
"references": "<foreign_table_id>",
|
||||
"referenced": "id"
|
||||
}
|
||||
],
|
||||
"confupdtype": "a",
|
||||
"confdeltype": "a",
|
||||
"autoindex": false
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Foreign key with valid data while server is down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "test_foreignkey_add_",
|
||||
"columns": [
|
||||
{
|
||||
"local_column": "id",
|
||||
"references": "<foreign_table_id>",
|
||||
"referenced": "id"
|
||||
}
|
||||
],
|
||||
"confupdtype": "a",
|
||||
"confdeltype": "a",
|
||||
"autoindex": false
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Foreign key without name while server is down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"columns": [
|
||||
{
|
||||
"local_column": "id",
|
||||
"references": "<foreign_table_id>",
|
||||
"referenced": "id"
|
||||
}
|
||||
],
|
||||
"confupdtype": "a",
|
||||
"confdeltype": "a",
|
||||
"autoindex": false
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"foreign_key_delete": [
|
||||
{
|
||||
"name": "Delete: Delete existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete multiple existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete non-existing foreign key.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"foreign_key_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": "Error: Object not found.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete existing foreign key while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete existing foreign key while server down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"foreign_key_dependencies_dependents": [
|
||||
{
|
||||
"name": "Get foreign key dependents: With existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": true
|
||||
},
|
||||
{
|
||||
"name": "Get foreign key dependencies: With existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": false
|
||||
}
|
||||
],
|
||||
"foreign_key_get": [
|
||||
{
|
||||
"name": "Get foreign key details: For existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get foreign key details: For non-existing foreign key.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"foreign_key_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the foreign key.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get foreign key details: For existing foreign key while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{"name": "Get foreign constraint details: For existing multiple foreign keys.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
}
|
||||
],
|
||||
"foreign_key_get_nodes": [
|
||||
{
|
||||
"name": "Get foreign key node: For existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get foreign key node: For existing foreign key convalidated = true.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"query": "\"ALTER TABLE %s.%s ADD FOREIGN KEY (id) REFERENCES %s.%s (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION NOT VALID\" % (schema_name, local_table_name, schema_name,foreign_table_name)"
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get foreign key nodes: For existing multiple foreign keys",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
}
|
||||
],
|
||||
"foreign_key_msql": [
|
||||
{
|
||||
"name": "Get foreign key msql: Modifying comment existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"name": "Modify Name for Exclusion Constraint",
|
||||
"comment": "Testing msql api"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"foreign_key_put": [
|
||||
{
|
||||
"name": "Put: Update existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for foreign key."
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing foreign key convalidated = true.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"query": "\"ALTER TABLE %s.%s ADD FOREIGN KEY (id) REFERENCES %s.%s (id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION NOT VALID\" % (schema_name, local_table_name, schema_name,foreign_table_name)"
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for foreign key."
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing foreign key while server is down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for index"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Put: Update existing foreign key while server is down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is test comment for index"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"foreign_key_sql": [
|
||||
{
|
||||
"name": "Get foreign key sql: With existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get foreign key sql: With non-existing foreign key.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
"foreign_key_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the foreign key.",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Get foreign key sql: With existing foreign key while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"foreign_constraint_validate": [
|
||||
{
|
||||
"name": "Validate foreign key: For existing foreign key.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Validate foreign key: For existing foreign key while server down.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Validate foreign key: For existing foreign key while server down-2.",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"foreign_key_get_coveringindex": [
|
||||
{
|
||||
"name": "Get foreign key: Get covering index details.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": { },
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
]
|
||||
}
|
@ -9,6 +9,7 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -19,16 +20,22 @@ from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignKeyAddTestCase(BaseTestGenerator):
|
||||
"""This class will add foreign key to existing table"""
|
||||
scenarios = [
|
||||
('Add foreign Key constraint to table',
|
||||
dict(url='/browser/foreign_key/obj/'))
|
||||
]
|
||||
url = '/browser/foreign_key/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_create",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -38,6 +45,8 @@ class ForeignKeyAddTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -46,12 +55,16 @@ class ForeignKeyAddTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "table_foreignkey_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "table_foreignkey_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
@ -60,20 +73,42 @@ class ForeignKeyAddTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
"""This function will add foreign key table column."""
|
||||
foreignkey_name = "test_foreignkey_add_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
data = {"name": foreignkey_name,
|
||||
"columns": [{"local_column": "id",
|
||||
"references": self.foreign_table_id,
|
||||
"referenced": "id"}],
|
||||
"confupdtype": "a", "confdeltype": "a", "autoindex": False}
|
||||
response = self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.schema_id) + '/' + str(self.local_table_id) + '/',
|
||||
data=json.dumps(data),
|
||||
content_type='html/json')
|
||||
self.assertEquals(response.status_code, 200)
|
||||
# FK name
|
||||
if "name" in self.data:
|
||||
fk_name = self.data["name"] + (str(uuid.uuid4())[1:8])
|
||||
self.data["name"] = fk_name
|
||||
else:
|
||||
fk_name = self.local_table_name + '_' + \
|
||||
self.data["columns"][0]["local_column"] + '_fkey'
|
||||
|
||||
# FK table name
|
||||
if "columns" in self.data:
|
||||
self.data["columns"][0]["references"] = self.foreign_table_id
|
||||
|
||||
if self.is_positive_test:
|
||||
response = fk_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
cross_check_res = fk_utils.verify_foreignkey(self.server,
|
||||
self.db_name,
|
||||
self.local_table_name,
|
||||
fk_name)
|
||||
self.assertIsNotNone(cross_check_res,
|
||||
"Could not find foreign key.")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = fk_utils.api_create(self)
|
||||
else:
|
||||
response = fk_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -23,12 +24,17 @@ from . import utils as fk_utils
|
||||
|
||||
class ForeignKeyDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete foreign key to existing table"""
|
||||
scenarios = [
|
||||
('Delete foreign Key constraint.',
|
||||
dict(url='/browser/foreign_key/obj/'))
|
||||
]
|
||||
url = '/browser/foreign_key/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_delete",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -39,6 +45,8 @@ class ForeignKeyDeleteTestCase(BaseTestGenerator):
|
||||
raise Exception(
|
||||
"Could not connect to database to delete a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -47,37 +55,65 @@ class ForeignKeyDeleteTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to delete a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete foreign key attached to table column."""
|
||||
fk_response = fk_utils.verify_foreignkey(self.server, self.db_name,
|
||||
self.local_table_name)
|
||||
if not fk_response:
|
||||
# Cross check
|
||||
cross_check_res = fk_utils.verify_foreignkey(self.server,
|
||||
self.db_name,
|
||||
self.local_table_name)
|
||||
if not cross_check_res:
|
||||
raise Exception("Could not find the foreign key constraint to "
|
||||
"delete.")
|
||||
response = self.tester.delete(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.local_table_id,
|
||||
self.foreign_key_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete foreign key attached to table column."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
self.data['ids'] = [self.foreign_key_id]
|
||||
response = fk_utils.api_delete(self, '')
|
||||
else:
|
||||
response = fk_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
cross_check_res = fk_utils.verify_foreignkey(self.server,
|
||||
self.db_name,
|
||||
self.local_table_name)
|
||||
self.assertIsNone(cross_check_res,
|
||||
"Deleted foreign key still present")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = fk_utils.api_delete(self)
|
||||
elif 'foreign_key_id' in self.data:
|
||||
self.foreign_key_id = self.data["foreign_key_id"]
|
||||
response = fk_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,96 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignKeyGetDependenciesDependentsTestCase(BaseTestGenerator):
|
||||
"""This class will fetch foreign key dependencies/dependents from
|
||||
existing table """
|
||||
url = '/browser/foreign_key/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_dependencies_dependents",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch foreign key dependencies/dependents
|
||||
attached to table column. """
|
||||
if self.is_positive_test:
|
||||
if self.is_dependent:
|
||||
self.url = self.url + 'dependent/'
|
||||
response = fk_utils.api_get(self)
|
||||
else:
|
||||
self.url = self.url + 'dependency/'
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -21,14 +22,19 @@ from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignGetDeleteTestCase(BaseTestGenerator):
|
||||
class ForeignKeyGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch foreign key from existing table"""
|
||||
scenarios = [
|
||||
('Fetch foreign Key constraint.',
|
||||
dict(url='/browser/foreign_key/obj/'))
|
||||
]
|
||||
url = '/browser/foreign_key/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_get",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -39,6 +45,8 @@ class ForeignGetDeleteTestCase(BaseTestGenerator):
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -47,15 +55,21 @@ class ForeignGetDeleteTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
@ -63,16 +77,28 @@ class ForeignGetDeleteTestCase(BaseTestGenerator):
|
||||
self.foreign_table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete foreign key attached to table column."""
|
||||
response = self.tester.get(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.local_table_id,
|
||||
self.foreign_key_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
"""This function will fetch foreign key attached to table column."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
self.data['ids'] = [self.foreign_key_id]
|
||||
response = fk_utils.api_get(self, '')
|
||||
else:
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = fk_utils.api_get(self)
|
||||
elif "foreign_key_id" in self.data:
|
||||
self.foreign_key_id = self.data["foreign_key_id"]
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,79 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignKeyGetConveringIndexTestCase(BaseTestGenerator):
|
||||
"""This class will fetch foreign key covering index from existing table"""
|
||||
url = '/browser/foreign_key/get_coveringindex/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_get_coveringindex",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete foreign key covering index attached to
|
||||
table column. """
|
||||
if self.is_positive_test:
|
||||
req_arg = '?cols=%5B%22id%22%5D'
|
||||
converging_idx_name = str(self.local_table_name + '_id_key')
|
||||
response = fk_utils.api_get_converging_index(self, req_arg)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
self.assertIn(converging_idx_name, str(response.data))
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,107 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignKeyGetNodesTestCase(BaseTestGenerator):
|
||||
"""This class will fetch foreign key nodes from existing table"""
|
||||
url = '/browser/foreign_key/nodes/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_get_nodes",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
if "query" in self.inventory_data:
|
||||
query = self.inventory_data["query"]
|
||||
else:
|
||||
query = None
|
||||
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name,
|
||||
self.foreign_table_name, query)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch foreign key nodes attached to table
|
||||
column. """
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = fk_utils.api_get(self, '')
|
||||
else:
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,97 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignKeyGetMsqlTestCase(BaseTestGenerator):
|
||||
"""This class will fetch modified sql for foreign key
|
||||
from existing table"""
|
||||
url = '/browser/foreign_key/msql/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_msql",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch modified sql for foreign key attached to
|
||||
table column. """
|
||||
if self.is_positive_test:
|
||||
url_encode_data = {"oid": self.foreign_key_id,
|
||||
"comment": self.data['comment'],
|
||||
"name": self.data['name']}
|
||||
|
||||
response = fk_utils.api_get_msql(self, url_encode_data)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
self.assertIn(self.data['comment'], response.json['data'])
|
||||
self.assertIn(self.data['name'], response.json['data'])
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -9,6 +9,7 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -22,14 +23,19 @@ from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignPutDeleteTestCase(BaseTestGenerator):
|
||||
class ForeignKeyPutTestCase(BaseTestGenerator):
|
||||
"""This class will update foreign key from existing table"""
|
||||
scenarios = [
|
||||
('Fetch foreign Key constraint.',
|
||||
dict(url='/browser/foreign_key/obj/'))
|
||||
]
|
||||
url = '/browser/foreign_key/obj/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_put",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -40,6 +46,8 @@ class ForeignPutDeleteTestCase(BaseTestGenerator):
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -48,35 +56,50 @@ class ForeignPutDeleteTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
if "query" in self.inventory_data:
|
||||
query = self.inventory_data["query"]
|
||||
else:
|
||||
query = None
|
||||
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name,
|
||||
self.foreign_table_name)
|
||||
self.foreign_table_name, query)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update foreign key attached to table column."""
|
||||
data = {"oid": self.foreign_key_id,
|
||||
"comment": "This is TEST comment for foreign key constraint."
|
||||
}
|
||||
response = self.tester.put(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.local_table_id,
|
||||
self.foreign_key_id),
|
||||
data=json.dumps(data),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
self.data["oid"] = self.foreign_key_id
|
||||
|
||||
if self.is_positive_test:
|
||||
response = fk_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = fk_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,102 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignKeyGetSqlTestCase(BaseTestGenerator):
|
||||
"""This class will fetch foreign key sql from existing table"""
|
||||
url = '/browser/foreign_key/sql/'
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("foreign_key_sql",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete foreign key sql attached to table
|
||||
column. """
|
||||
if self.is_positive_test:
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = fk_utils.api_get(self)
|
||||
elif 'foreign_key_id' in self.data:
|
||||
self.foreign_key_id = self.data["foreign_key_id"]
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,97 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as fk_utils
|
||||
|
||||
|
||||
class ForeignKeyValidateTestCase(BaseTestGenerator):
|
||||
url = '/browser/foreign_key/validate/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("foreign_constraint_validate",
|
||||
fk_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception(
|
||||
"Could not connect to database to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to fetch a foreign "
|
||||
"key constraint.")
|
||||
|
||||
# Create local table
|
||||
self.local_table_name = "local_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.local_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name)
|
||||
|
||||
# Create foreign table
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create foreign key
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.local_table_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will validate check constraint to table."""
|
||||
if self.is_positive_test:
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = fk_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -9,13 +9,93 @@
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import traceback
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
# Load test data from json file.
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/foreign_key_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
# api call method
|
||||
def api_create(self):
|
||||
return self.tester.post("{0}{1}/{2}/{3}/{4}/{5}/".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.local_table_id),
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json'
|
||||
)
|
||||
|
||||
|
||||
def api_delete(self, foreign_key_id=None):
|
||||
if foreign_key_id is None:
|
||||
foreign_key_id = self.foreign_key_id
|
||||
return self.tester.delete("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.local_table_id,
|
||||
foreign_key_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get(self, foreign_key_id=None):
|
||||
if foreign_key_id is None:
|
||||
foreign_key_id = self.foreign_key_id
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.local_table_id,
|
||||
foreign_key_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get_msql(self, url_encode_data):
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}?{7}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.local_table_id,
|
||||
self.foreign_key_id,
|
||||
urlencode(url_encode_data)),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_put(self):
|
||||
return self.tester.put("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.local_table_id,
|
||||
self.foreign_key_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get_converging_index(self, req_args):
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.local_table_id,
|
||||
req_args),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def create_foreignkey(server, db_name, schema_name, local_table_name,
|
||||
foreign_table_name):
|
||||
foreign_table_name, query_val=None):
|
||||
"""
|
||||
This function creates a column under provided table.
|
||||
:param server: server details
|
||||
@ -31,6 +111,13 @@ def create_foreignkey(server, db_name, schema_name, local_table_name,
|
||||
:return table_id: table id
|
||||
:rtype: int
|
||||
"""
|
||||
if query_val is None:
|
||||
query = "ALTER TABLE %s.%s ADD FOREIGN KEY (id) REFERENCES %s.%s " \
|
||||
"(id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION" % \
|
||||
(schema_name, local_table_name, schema_name,
|
||||
foreign_table_name)
|
||||
else:
|
||||
query = eval(query_val)
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
@ -41,11 +128,7 @@ def create_foreignkey(server, db_name, schema_name, local_table_name,
|
||||
old_isolation_level = connection.isolation_level
|
||||
connection.set_isolation_level(0)
|
||||
pg_cursor = connection.cursor()
|
||||
query = "ALTER TABLE %s.%s ADD FOREIGN KEY (id) REFERENCES %s.%s " \
|
||||
"(id) MATCH SIMPLE ON UPDATE NO ACTION ON DELETE NO ACTION" % \
|
||||
(
|
||||
schema_name, local_table_name, schema_name,
|
||||
foreign_table_name)
|
||||
|
||||
pg_cursor.execute(query)
|
||||
connection.set_isolation_level(old_isolation_level)
|
||||
connection.commit()
|
||||
@ -61,7 +144,7 @@ def create_foreignkey(server, db_name, schema_name, local_table_name,
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
|
||||
def verify_foreignkey(server, db_name, local_table_name):
|
||||
def verify_foreignkey(server, db_name, local_table_name, fk_name=None):
|
||||
"""
|
||||
This function verifies foreign key constraint exist or not.
|
||||
:param server: server details
|
||||
@ -73,6 +156,10 @@ def verify_foreignkey(server, db_name, local_table_name):
|
||||
:return table: table record from database
|
||||
:rtype: tuple
|
||||
"""
|
||||
if fk_name is None:
|
||||
conname = local_table_name + '_id_fkey'
|
||||
else:
|
||||
conname = fk_name
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
@ -82,8 +169,7 @@ def verify_foreignkey(server, db_name, local_table_name):
|
||||
server['sslmode'])
|
||||
pg_cursor = connection.cursor()
|
||||
pg_cursor.execute(
|
||||
"SELECT oid FROM pg_constraint where conname='%s_id_fkey'" %
|
||||
local_table_name)
|
||||
"SELECT oid FROM pg_constraint where conname='%s'" % conname)
|
||||
fk_record = pg_cursor.fetchone()
|
||||
connection.close()
|
||||
return fk_record
|
||||
|
@ -0,0 +1,910 @@
|
||||
{
|
||||
"index_constraint_create": [
|
||||
{
|
||||
"name": "Create: Add primary key constraint to table.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"spcname": "pg_default",
|
||||
"columns": [{"column": "id"}],
|
||||
"comment": "this is pk constraint"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add unique key constraint to table.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"spcname": "pg_default",
|
||||
"columns": [{"column": "id"}]
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add primary key constraint to table without name using index.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"spcname": "pg_default",
|
||||
"index": "test_primaryindex_add_"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add unique key constraint to table without name using index.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"spcname": "pg_default",
|
||||
"index": "test_uniquekey_add_"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add primary key constraint with insufficient data.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"spcname": "pg_default"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 400,
|
||||
"error_msg": "Could not find at least one required parameter (index).",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add primary key constraint while server down.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"spcname": "pg_default",
|
||||
"columns": [{"column": "id"}]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add unique key constraint without name while server down.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"spcname": "pg_default",
|
||||
"columns": [{"column": "id"}]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add primary key constraint while server down.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"spcname": "pg_default",
|
||||
"columns": [{"column": "id"}]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Create: Add Unique constraint without name while server down.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"spcname": "pg_default",
|
||||
"columns": [
|
||||
{
|
||||
"column": "id"
|
||||
}
|
||||
]
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"index_constraint_delete": [
|
||||
{
|
||||
"name": "Delete: Delete primary key constraint.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete multiple primary key constraint.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete non-existing primary key constraint.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY",
|
||||
"index_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": "Error: Object not found.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete multiple Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete non-existing Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"UNIQUE",
|
||||
"index_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": "Error: Object not found.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete primary key constraint while server down.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Delete: Delete Unique key constraint while server down-2.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"index_constraint_get": [
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch multiple primary key constraint.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch non-existing primary key constraint.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY",
|
||||
"index_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the primary key in the table.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch multiple Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch non-existing Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE",
|
||||
"index_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the unique key in the table.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint while server down.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"index_constraint_dependencies_dependents": [
|
||||
{
|
||||
"name": "Get: Dependents primary key constraint.",
|
||||
"url": "/browser/primary_key/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": true
|
||||
},
|
||||
{
|
||||
"name": "Get: Dependencies primary key constraint.",
|
||||
"url": "/browser/primary_key/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Dependents Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": true
|
||||
},
|
||||
{
|
||||
"name": "Get: Dependencies Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_dependent": false
|
||||
}
|
||||
],
|
||||
"index_constraint_get_nodes": [
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint.",
|
||||
"url": "/browser/primary_key/nodes/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch multiple primary key constraint.",
|
||||
"url": "/browser/primary_key/nodes/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/nodes/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch multiple Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/nodes/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch Non-existing Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/nodes/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE",
|
||||
"index_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the unique key in the table.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint while server down.",
|
||||
"url": "/browser/primary_key/nodes/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"index_constraint_sql": [
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint.",
|
||||
"url": "/browser/primary_key/sql/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch non-existing primary key constraint.",
|
||||
"url": "/browser/primary_key/sql/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY",
|
||||
"index_constraint_id": 1
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 410,
|
||||
"error_msg": "Could not find the primary key in the table.",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/sql/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint while server down.",
|
||||
"url": "/browser/primary_key/sql/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {},
|
||||
"test_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"index_constraint_msql": [
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint.",
|
||||
"url": "/browser/primary_key/msql/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is msql api test",
|
||||
"name": "Changing name test"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/msql/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "This is msql api test"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"index_constraint_get_statistics": [
|
||||
{
|
||||
"name": "Get statistics: For primary key.",
|
||||
"url": "/browser/primary_key/stats/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get statistics: For non-existing primary key.",
|
||||
"url": "/browser/primary_key/stats/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"test_data": {
|
||||
"index_constraint_id": 1
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
],
|
||||
"index_constraint_put": [
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "Testing put request api",
|
||||
"name": "Updating name for primary key in via put request"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch Unique key constraint.",
|
||||
"url": "/browser/unique_constraint/obj/",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_uniquekey_add_",
|
||||
"type":"UNIQUE"
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "Testing put request api",
|
||||
"name": "Updating name for Unique key in via put request"
|
||||
},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint while server down.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "Testing put request api",
|
||||
"name": "Updating name in via put request"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
},
|
||||
{
|
||||
"name": "Get: Fetch primary key constraint while server down-2.",
|
||||
"url": "/browser/primary_key/obj/",
|
||||
"is_positive_test": false,
|
||||
"inventory_data": {
|
||||
"constraint_name": "test_primarykey_add_",
|
||||
"type":"PRIMARY KEY"
|
||||
},
|
||||
"test_data": {
|
||||
"comment": "Testing put request api",
|
||||
"name": "Updating name in via put request"
|
||||
},
|
||||
"mocking_required": true,
|
||||
"mock_data": {
|
||||
"function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict",
|
||||
"return_value": "(False,'Mocked Internal Server Error')"
|
||||
},
|
||||
"expected_data": {
|
||||
"status_code": 500,
|
||||
"error_msg": "Mocked Internal Server Error",
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": false
|
||||
}
|
||||
|
||||
]
|
||||
}
|
@ -9,6 +9,7 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -26,25 +27,16 @@ class IndexConstraintAddTestCase(BaseTestGenerator):
|
||||
"""This class will add index constraint(primary key or unique key) to
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_add_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
primary_key_data = {"name": primary_key_name,
|
||||
"spcname": "pg_default",
|
||||
"columns": [{"column": "id"}]
|
||||
}
|
||||
unique_key_name = "test_uniquekey_add_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
unique_key_data = {"name": unique_key_name,
|
||||
"spcname": "pg_default",
|
||||
"columns": [{"column": "id"}]}
|
||||
scenarios = [
|
||||
('Add primary Key constraint to table',
|
||||
dict(url='/browser/primary_key/obj/', data=primary_key_data)),
|
||||
('Add unique Key constraint to table',
|
||||
dict(url='/browser/unique_constraint/obj/', data=unique_key_data))
|
||||
]
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("index_constraint_create",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -54,6 +46,8 @@ class IndexConstraintAddTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -62,6 +56,8 @@ class IndexConstraintAddTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
@ -72,87 +68,56 @@ class IndexConstraintAddTestCase(BaseTestGenerator):
|
||||
def runTest(self):
|
||||
"""This function will add index constraint(primary key or unique key)
|
||||
to table column."""
|
||||
response = self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.schema_id) + '/' + str(self.table_id) + '/',
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json')
|
||||
self.assertEquals(response.status_code, 200)
|
||||
# Create using index
|
||||
if "index" in self.data:
|
||||
index_name = self.data["index"] + (str(uuid.uuid4())[1:8])
|
||||
self.data["index"] = index_name
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||
|
||||
|
||||
class ConstraintsUsingIndexAddTestCase(BaseTestGenerator):
|
||||
"""This class will add the constraint(primary key or unique key) to the
|
||||
table column using newly created index"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
primary_key_name = "test_primarykey_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
primary_index_name = "test_primaryindex_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
primary_key_data = {
|
||||
"name": primary_key_name,
|
||||
"spcname": "pg_default",
|
||||
"columns": [],
|
||||
"index": primary_index_name
|
||||
}
|
||||
|
||||
unique_key_name = "test_uniquekey_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
unique_index_name = "test_uniqueindex_add_%s" % (str(uuid.uuid4())[1:8])
|
||||
unique_key_data = {
|
||||
"name": unique_key_name,
|
||||
"spcname": "pg_default",
|
||||
"columns": [],
|
||||
"index": unique_index_name
|
||||
}
|
||||
|
||||
scenarios = [
|
||||
('Add primary Key constraint to table using index',
|
||||
dict(url='/browser/primary_key/obj/', data=primary_key_data)),
|
||||
('Add unique Key constraint to table using index',
|
||||
dict(url='/browser/unique_constraint/obj/', data=unique_key_data))
|
||||
]
|
||||
|
||||
def setUp(self):
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"constraint using index.")
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
self.table_name = "table_constraint_%s" % (str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will add index constraint(primary key or unique key)
|
||||
to table column."""
|
||||
self.index_id = \
|
||||
index_constraint_utils.create_unique_index(
|
||||
self.index_id = index_constraint_utils.create_unique_index(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.data["index"], "name")
|
||||
index_name, "name")
|
||||
|
||||
response = self.tester.post(
|
||||
self.url + str(utils.SERVER_GROUP) + '/' +
|
||||
str(self.server_id) + '/' + str(self.db_id) +
|
||||
'/' + str(self.schema_id) + '/' + str(self.table_id) + '/',
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json')
|
||||
self.assertEquals(response.status_code, 200)
|
||||
# Constraint name
|
||||
if "constraint_name" in self.data:
|
||||
constraint_name = self.data["constraint_name"] + (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.data["name"] = constraint_name
|
||||
else:
|
||||
if "index" in self.data:
|
||||
if "primary_key" in self.url:
|
||||
constraint_name = index_name
|
||||
else:
|
||||
constraint_name = self.table_name + '_id_key'
|
||||
else:
|
||||
if "primary_key" in self.url:
|
||||
constraint_name = self.table_name + '_pkey'
|
||||
elif "columns" in self.data:
|
||||
constraint_name = self.table_name + '_' + \
|
||||
self.data["columns"][0]["column"] + '_key'
|
||||
|
||||
if self.is_positive_test:
|
||||
response = index_constraint_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
self.assertIsNotNone(index_constraint_utils.
|
||||
verify_index_constraint(self.server,
|
||||
self.db_name,
|
||||
constraint_name),
|
||||
"Could not find constraint created.")
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=eval(self.mock_data["return_value"])):
|
||||
response = index_constraint_utils.api_create(self)
|
||||
else:
|
||||
response = index_constraint_utils.api_create(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -25,20 +26,16 @@ class IndexConstraintDeleteTestCase(BaseTestGenerator):
|
||||
"""This class will delete index constraint(primary key or unique key) of
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
unique_key_name = "test_uniquekey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
scenarios = [
|
||||
('Delete primary Key constraint of table',
|
||||
dict(url='/browser/primary_key/obj/', name=primary_key_name,
|
||||
type="PRIMARY KEY")),
|
||||
('Delete unique Key constraint of table',
|
||||
dict(url='/browser/unique_constraint/obj/', name=unique_key_name,
|
||||
type="UNIQUE"))
|
||||
]
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("index_constraint_delete",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -48,6 +45,8 @@ class IndexConstraintDeleteTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -56,29 +55,47 @@ class IndexConstraintDeleteTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
# Create constraint
|
||||
self.constraint_name = self.data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete index constraint(primary key or
|
||||
unique key) of table column."""
|
||||
index_constraint_id = \
|
||||
index_constraint_utils.create_index_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.name, self.type)
|
||||
response = self.tester.delete(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
index_constraint_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
self.data['ids'] = [self.index_constraint_id]
|
||||
response = index_constraint_utils.api_delete(self, '')
|
||||
else:
|
||||
response = index_constraint_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = index_constraint_utils.api_delete(self)
|
||||
elif 'index_constraint_id' in self.data:
|
||||
self.index_constraint_id = self.data["index_constraint_id"]
|
||||
response = index_constraint_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,93 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as index_constraint_utils
|
||||
|
||||
|
||||
class IndexConstraintGetDepenedencyDependentTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the index constraint(primary key or unique key)
|
||||
dependency / dependents of table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios(
|
||||
"index_constraint_dependencies_dependents",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.constraint_name = self.data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the index constraint(primary key or
|
||||
unique key) dependency / dependents of table column."""
|
||||
if self.is_positive_test:
|
||||
if self.is_dependent:
|
||||
self.url = self.url + 'dependent/'
|
||||
response = index_constraint_utils.api_get(self)
|
||||
else:
|
||||
self.url = self.url + 'dependency/'
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -8,6 +8,7 @@
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -25,20 +26,16 @@ class IndexConstraintGetTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the index constraint(primary key or unique key) of
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
unique_key_name = "test_uniquekey_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
scenarios = [
|
||||
('Fetch primary Key constraint of table',
|
||||
dict(url='/browser/primary_key/obj/', name=primary_key_name,
|
||||
type="PRIMARY KEY")),
|
||||
('Fetch unique Key constraint of table',
|
||||
dict(url='/browser/unique_constraint/obj/', name=unique_key_name,
|
||||
type="UNIQUE"))
|
||||
]
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("index_constraint_get",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -48,6 +45,8 @@ class IndexConstraintGetTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -56,6 +55,8 @@ class IndexConstraintGetTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
@ -63,22 +64,39 @@ class IndexConstraintGetTestCase(BaseTestGenerator):
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.constraint_name = self.data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the index constraint(primary key or
|
||||
unique key) of table column."""
|
||||
index_constraint_id = \
|
||||
index_constraint_utils.create_index_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.name, self.type)
|
||||
response = self.tester.get(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
index_constraint_id),
|
||||
follow_redirects=True
|
||||
)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
self.data['ids'] = [self.index_constraint_id]
|
||||
response = index_constraint_utils.api_get(self, '')
|
||||
else:
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = index_constraint_utils.api_get(self)
|
||||
elif "index_constraint_id" in self.data:
|
||||
self.index_constraint_id = self.data["index_constraint_id"]
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,102 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as index_constraint_utils
|
||||
|
||||
|
||||
class IndexConstraintGetNodesTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the index constraint(primary key or unique key)
|
||||
nodes of table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("index_constraint_get_nodes",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.constraint_name = self.data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the index constraint(primary key or
|
||||
unique key) nodes of table column."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = index_constraint_utils.api_get(self, '')
|
||||
else:
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = index_constraint_utils.api_get(self)
|
||||
elif "index_constraint_id" in self.data:
|
||||
self.index_constraint_id = self.data["index_constraint_id"]
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,102 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as index_constraint_utils
|
||||
|
||||
|
||||
class IndexConstraintGetStasticsTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the index constraint(primary key or unique key)
|
||||
statistics of table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("index_constraint_get_statistics",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.constraint_name = self.inventory_data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.inventory_data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the index constraint(primary key or
|
||||
unique key) statistics of table column."""
|
||||
if self.is_positive_test:
|
||||
if self.is_list:
|
||||
response = index_constraint_utils.api_get(self, '')
|
||||
else:
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = index_constraint_utils.api_get(self)
|
||||
elif "index_constraint_id" in self.data:
|
||||
self.index_constraint_id = self.data["index_constraint_id"]
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,96 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as index_constraint_utils
|
||||
|
||||
|
||||
class IndexConstraintGetMsqlTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the index constraint(primary key or unique key)
|
||||
modified sql of table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("index_constraint_msql",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.constraint_name = self.inventory_data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.inventory_data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the index constraint(primary key or
|
||||
unique key) modified sql of table column."""
|
||||
url_encode_data = {"oid": self.index_constraint_id}
|
||||
|
||||
if 'name' in self.data:
|
||||
url_encode_data["name"] = self.data['name']
|
||||
|
||||
if 'comment' in self.data:
|
||||
url_encode_data["comment"] = self.data['comment']
|
||||
|
||||
if self.is_positive_test:
|
||||
response = index_constraint_utils.api_get_msql(self,
|
||||
url_encode_data)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -9,6 +9,7 @@
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
@ -25,22 +26,15 @@ from . import utils as index_constraint_utils
|
||||
class IndexConstraintUpdateTestCase(BaseTestGenerator):
|
||||
"""This class will update index constraint(primary key or unique key) of
|
||||
table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
primary_key_name = "test_primarykey_put_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
unique_key_name = "test_uniquekey_put_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
data = {"oid": "", "comment": "this is test comment"}
|
||||
scenarios = [
|
||||
('Update primary Key constraint of table',
|
||||
dict(url='/browser/primary_key/obj/', name=primary_key_name,
|
||||
type="PRIMARY KEY", data=data)),
|
||||
('Update unique Key constraint of table',
|
||||
dict(url='/browser/unique_constraint/obj/', name=unique_key_name,
|
||||
type="UNIQUE", data=data))
|
||||
]
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("index_constraint_put",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -50,6 +44,8 @@ class IndexConstraintUpdateTestCase(BaseTestGenerator):
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -58,6 +54,8 @@ class IndexConstraintUpdateTestCase(BaseTestGenerator):
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
@ -65,23 +63,33 @@ class IndexConstraintUpdateTestCase(BaseTestGenerator):
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.constraint_name = self.inventory_data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.inventory_data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will update index constraint(primary key or
|
||||
unique key) of table column."""
|
||||
index_constraint_id = \
|
||||
index_constraint_utils.create_index_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.name, self.type)
|
||||
self.data["oid"] = index_constraint_id
|
||||
response = self.tester.put(
|
||||
"{0}{1}/{2}/{3}/{4}/{5}/{6}".format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
index_constraint_id
|
||||
),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
self.data["oid"] = self.index_constraint_id
|
||||
if self.is_positive_test:
|
||||
response = index_constraint_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = index_constraint_utils.api_put(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
|
@ -0,0 +1,99 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
from unittest.mock import patch
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from . import utils as index_constraint_utils
|
||||
|
||||
|
||||
class IndexConstraintGetSqlTestCase(BaseTestGenerator):
|
||||
"""This class will fetch the index constraint(primary key or unique key)
|
||||
sql of table column"""
|
||||
skip_on_database = ['gpdb']
|
||||
|
||||
# Generates scenarios
|
||||
scenarios = utils.generate_scenarios("index_constraint_sql",
|
||||
index_constraint_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a "
|
||||
"index constraint(primary key or unique key).")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a index "
|
||||
"constraint(primary key or unique key).")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_indexconstraint_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create constraint
|
||||
self.constraint_name = self.data["constraint_name"] + \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.type = self.data["type"]
|
||||
self.index_constraint_id = index_constraint_utils. \
|
||||
create_index_constraint(self.server, self.db_name,
|
||||
self.schema_name, self.table_name,
|
||||
self.constraint_name, self.type)
|
||||
|
||||
def runTest(self):
|
||||
"""This function will fetch the index constraint(primary key or
|
||||
unique key) sql of table column."""
|
||||
if self.is_positive_test:
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
else:
|
||||
if self.mocking_required:
|
||||
with patch(self.mock_data["function_name"],
|
||||
side_effect=[eval(self.mock_data["return_value"])]):
|
||||
response = index_constraint_utils.api_get(self)
|
||||
elif 'index_constraint_id' in self.data:
|
||||
self.index_constraint_id = self.data["index_constraint_id"]
|
||||
response = index_constraint_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
utils.assert_error_message(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -10,8 +10,79 @@
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
import os
|
||||
import json
|
||||
|
||||
# Load test data from json file.
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/index_constraint_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
# api method call
|
||||
def api_create(self):
|
||||
return self.tester.post("{0}{1}/{2}/{3}/{4}/{5}/".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id),
|
||||
data=json.dumps(self.data),
|
||||
content_type='html/json'
|
||||
)
|
||||
|
||||
|
||||
def api_delete(self, index_constraint_id=None):
|
||||
if index_constraint_id is None:
|
||||
index_constraint_id = self.index_constraint_id
|
||||
return self.tester.delete("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
index_constraint_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get(self, index_constraint_id=None):
|
||||
if index_constraint_id is None:
|
||||
index_constraint_id = self.index_constraint_id
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
index_constraint_id),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_get_msql(self, url_encode_data):
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/{6}?{7}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id,
|
||||
self.table_id,
|
||||
self.index_constraint_id,
|
||||
urlencode(url_encode_data)),
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def api_put(self):
|
||||
return self.tester.put("{0}{1}/{2}/{3}/{4}/{5}/{6}".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id,
|
||||
self.index_constraint_id
|
||||
), data=json.dumps(self.data),
|
||||
follow_redirects=True
|
||||
)
|
||||
|
||||
|
||||
def create_index_constraint(server, db_name, schema_name, table_name,
|
||||
@ -59,15 +130,14 @@ def create_index_constraint(server, db_name, schema_name, table_name,
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
|
||||
|
||||
def verify_index_constraint(server, db_name, table_name):
|
||||
def verify_index_constraint(server, db_name, constraint_name):
|
||||
"""
|
||||
This function verifies that index constraint(PK or UK) is exists or not.
|
||||
:param constraint_name:
|
||||
:param server: server details
|
||||
:type server: dict
|
||||
:param db_name: database name
|
||||
:type db_name: str
|
||||
:param table_name: table name
|
||||
:type table_name: str
|
||||
:return index_constraint: index constraint record from database
|
||||
:rtype: tuple
|
||||
"""
|
||||
@ -81,7 +151,7 @@ def verify_index_constraint(server, db_name, table_name):
|
||||
pg_cursor = connection.cursor()
|
||||
pg_cursor.execute(
|
||||
"SELECT oid FROM pg_constraint where conname='%s'" %
|
||||
table_name)
|
||||
constraint_name)
|
||||
index_constraint = pg_cursor.fetchone()
|
||||
connection.close()
|
||||
return index_constraint
|
||||
|
@ -0,0 +1,48 @@
|
||||
{
|
||||
"constraints_get": [
|
||||
{"name": "Get constraints list: For existing multiple constraint ids.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {
|
||||
},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
}
|
||||
],
|
||||
"constraints_get_nodes": [
|
||||
{
|
||||
"name": "Get constraints nodes: For existing multiple constraint ids.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
}],
|
||||
"constraints_delete": [
|
||||
{
|
||||
"name": "Delete constraints nodes: For existing multiple constraint ids.",
|
||||
"is_positive_test": true,
|
||||
"inventory_data": {},
|
||||
"test_data": {},
|
||||
"mocking_required": false,
|
||||
"mock_data": {},
|
||||
"expected_data": {
|
||||
"status_code": 200,
|
||||
"error_msg": null,
|
||||
"test_result_data": {}
|
||||
},
|
||||
"is_list": true
|
||||
}]
|
||||
}
|
@ -19,24 +19,30 @@ from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables. \
|
||||
constraints.check_constraint.tests import utils as chk_constraint_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables. \
|
||||
constraints.exclusion_constraint.tests import utils as exclusion_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables. \
|
||||
constraints.foreign_key.tests import utils as fk_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables. \
|
||||
constraints.index_constraint.tests import utils as index_constraint_utils
|
||||
from . import utils as constraints_utils
|
||||
|
||||
|
||||
class ConstraintDeleteMultipleTestCase(BaseTestGenerator):
|
||||
"""This class will delete constraints under table node."""
|
||||
scenarios = [
|
||||
# Fetching default URL for table node.
|
||||
('Delete Constraints', dict(url='/browser/constraints/obj/'))
|
||||
]
|
||||
url = '/browser/constraints/obj/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("constraints_delete",
|
||||
constraints_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
@ -45,6 +51,8 @@ class ConstraintDeleteMultipleTestCase(BaseTestGenerator):
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
@ -52,6 +60,8 @@ class ConstraintDeleteMultipleTestCase(BaseTestGenerator):
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_constraint_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
@ -113,24 +123,32 @@ class ConstraintDeleteMultipleTestCase(BaseTestGenerator):
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete constraints under table node."""
|
||||
data = {'ids': [
|
||||
self.data = {'ids': [
|
||||
{'id': self.check_constraint_id, '_type': 'check_constraint'},
|
||||
{'id': self.check_constraint_id_1, '_type': 'check_constraint'},
|
||||
{'id': self.exclustion_constraint_id,
|
||||
'_type': 'exclustion_constraint'},
|
||||
{'id': self.foreign_key_id, '_type': 'foreign_key'},
|
||||
{'id': self.primary_key_id, '_type': 'index_constraint'},
|
||||
{'id': self.unique_constraint_id, '_type': 'index_constraint'}
|
||||
{'id': self.unique_constraint_id, '_type': 'index_constraint'},
|
||||
{'id': self.exclustion_constraint_id,
|
||||
'_type': 'exclustion_constraint'}
|
||||
]}
|
||||
response = self.tester.delete(self.url + str(utils.SERVER_GROUP) +
|
||||
'/' + str(self.server_id) + '/' +
|
||||
str(self.db_id) + '/' +
|
||||
str(self.schema_id) + '/' +
|
||||
str(self.table_id) + '/',
|
||||
data=json.dumps(data),
|
||||
content_type='html/json',
|
||||
follow_redirects=True)
|
||||
self.assertEquals(response.status_code, 200)
|
||||
|
||||
if self.is_positive_test:
|
||||
response = constraints_utils.api_delete(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# Verify in backend
|
||||
for val in self.data["ids"]:
|
||||
constraint_name = val["id"]
|
||||
cross_check_res = constraints_utils.verify_constraint(
|
||||
self.server,
|
||||
self.db_name,
|
||||
constraint_name)
|
||||
self.assertIsNone(cross_check_res,
|
||||
"Deleted {0} still present".
|
||||
format(constraint_name))
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
@ -0,0 +1,167 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
import json
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.check_constraint.tests import utils as chk_constraint_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.exclusion_constraint.tests import utils as exclusion_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.foreign_key.tests import utils as fk_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.index_constraint.tests import utils as index_constraint_utils
|
||||
from . import utils as constraints_utils
|
||||
|
||||
|
||||
class ConstraintDeleteMultipleTestCase(BaseTestGenerator):
|
||||
"""This class will delete constraints under table node."""
|
||||
url = '/browser/constraints/obj/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("constraints_get",
|
||||
constraints_utils.test_cases)
|
||||
|
||||
# scenarios = [
|
||||
# # Fetching default URL for table node.
|
||||
# ('Get Constraints', dict(url='/browser/constraints/obj/'))
|
||||
# ]
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_constraint_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create Check Constraints
|
||||
self.check_constraint_name = "test_constraint_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
chk_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
self.check_constraint_name_1 = "test_constraint_delete1_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id_1 = \
|
||||
chk_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name_1)
|
||||
|
||||
# Create Exclusion Constraint
|
||||
self.exclustion_constraint_name = "test_exclusion_get_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.exclustion_constraint_id = \
|
||||
exclusion_utils.create_exclusion_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.exclustion_constraint_name
|
||||
)
|
||||
|
||||
# Create Foreign Key
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create Primary Key
|
||||
self.primary_key_name = "test_primary_key_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.primary_key_id = \
|
||||
index_constraint_utils.create_index_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.primary_key_name, "PRIMARY KEY")
|
||||
|
||||
# Create Unique Key constraint
|
||||
self.unique_constraint_name = "test_unique_constraint_get_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
|
||||
self.unique_constraint_id = \
|
||||
index_constraint_utils.create_index_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.unique_constraint_name, "UNIQUE")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete constraints under table node."""
|
||||
if self.is_positive_test:
|
||||
response = constraints_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
# data = {'ids': [
|
||||
# {'id': self.check_constraint_id, '_type': 'check_constraint'},
|
||||
# {'id': self.check_constraint_id_1, '_type': 'check_constraint'},
|
||||
# {'id': self.exclustion_constraint_id,
|
||||
# '_type': 'exclustion_constraint'},
|
||||
# {'id': self.foreign_key_id, '_type': 'foreign_key'},
|
||||
# {'id': self.primary_key_id, '_type': 'index_constraint'},
|
||||
# {'id': self.unique_constraint_id, '_type': 'index_constraint'}
|
||||
# ]}
|
||||
# response = self.tester.delete(self.url + str(utils.SERVER_GROUP) +
|
||||
# '/' + str(self.server_id) + '/' +
|
||||
# str(self.db_id) + '/' +
|
||||
# str(self.schema_id) + '/' +
|
||||
# str(self.table_id) + '/',
|
||||
# data=json.dumps(data),
|
||||
# content_type='html/json',
|
||||
# follow_redirects=True)
|
||||
# response = self.tester.get(self.url + str(utils.SERVER_GROUP) +
|
||||
# '/' + str(self.server_id) + '/' +
|
||||
# str(self.db_id) + '/' +
|
||||
# str(self.schema_id) + '/' +
|
||||
# str(self.table_id) + '/',
|
||||
# data=json.dumps(data),
|
||||
# content_type='html/json',
|
||||
# follow_redirects=True)
|
||||
# self.assertEquals(response.status_code, 200)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,135 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import uuid
|
||||
import json
|
||||
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tests import \
|
||||
utils as schema_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.tests import utils as \
|
||||
database_utils
|
||||
from pgadmin.utils.route import BaseTestGenerator
|
||||
from regression import parent_node_dict
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.tests \
|
||||
import utils as tables_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.check_constraint.tests import utils as chk_constraint_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.exclusion_constraint.tests import utils as exclusion_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.foreign_key.tests import utils as fk_utils
|
||||
from pgadmin.browser.server_groups.servers.databases.schemas.tables.\
|
||||
constraints.index_constraint.tests import utils as index_constraint_utils
|
||||
from . import utils as constraints_utils
|
||||
|
||||
|
||||
class ConstraintDeleteMultipleTestCase(BaseTestGenerator):
|
||||
"""This class will delete constraints under table node."""
|
||||
url = '/browser/constraints/nodes/'
|
||||
|
||||
# Generates scenarios from cast_test_data.json file
|
||||
scenarios = utils.generate_scenarios("constraints_get_nodes",
|
||||
constraints_utils.test_cases)
|
||||
|
||||
def setUp(self):
|
||||
# Load test data
|
||||
self.data = self.test_data
|
||||
|
||||
# Create db connection
|
||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||
schema_info = parent_node_dict["schema"][-1]
|
||||
self.server_id = schema_info["server_id"]
|
||||
self.db_id = schema_info["db_id"]
|
||||
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id)
|
||||
if not db_con['data']["connected"]:
|
||||
raise Exception("Could not connect to database to add a table.")
|
||||
|
||||
# Create schema
|
||||
self.schema_id = schema_info["schema_id"]
|
||||
self.schema_name = schema_info["schema_name"]
|
||||
schema_response = schema_utils.verify_schemas(self.server,
|
||||
self.db_name,
|
||||
self.schema_name)
|
||||
if not schema_response:
|
||||
raise Exception("Could not find the schema to add a table.")
|
||||
|
||||
# Create table
|
||||
self.table_name = "table_constraint_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.table_id = tables_utils.create_table(self.server,
|
||||
self.db_name,
|
||||
self.schema_name,
|
||||
self.table_name)
|
||||
|
||||
# Create Check Constraints
|
||||
self.check_constraint_name = "test_constraint_delete_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id = \
|
||||
chk_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name)
|
||||
|
||||
self.check_constraint_name_1 = "test_constraint_delete1_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.check_constraint_id_1 = \
|
||||
chk_constraint_utils.create_check_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.check_constraint_name_1)
|
||||
|
||||
# Create Exclusion Constraint
|
||||
self.exclustion_constraint_name = "test_exclusion_get_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
self.exclustion_constraint_id = \
|
||||
exclusion_utils.create_exclusion_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.exclustion_constraint_name
|
||||
)
|
||||
|
||||
# Create Foreign Key
|
||||
self.foreign_table_name = "foreign_table_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_table_id = tables_utils.create_table(
|
||||
self.server, self.db_name, self.schema_name,
|
||||
self.foreign_table_name)
|
||||
self.foreign_key_name = "test_foreignkey_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.foreign_key_id = fk_utils.create_foreignkey(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.foreign_table_name)
|
||||
|
||||
# Create Primary Key
|
||||
self.primary_key_name = "test_primary_key_get_%s" % \
|
||||
(str(uuid.uuid4())[1:8])
|
||||
self.primary_key_id = \
|
||||
index_constraint_utils.create_index_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.primary_key_name, "PRIMARY KEY")
|
||||
|
||||
# Create Unique Key constraint
|
||||
self.unique_constraint_name = "test_unique_constraint_get_%s" % (
|
||||
str(uuid.uuid4())[1:8])
|
||||
|
||||
self.unique_constraint_id = \
|
||||
index_constraint_utils.create_index_constraint(
|
||||
self.server, self.db_name, self.schema_name, self.table_name,
|
||||
self.unique_constraint_name, "UNIQUE")
|
||||
|
||||
def runTest(self):
|
||||
"""This function will delete constraints under table node."""
|
||||
if self.is_positive_test:
|
||||
response = constraints_utils.api_get(self)
|
||||
|
||||
# Assert response
|
||||
utils.assert_status_code(self, response)
|
||||
|
||||
def tearDown(self):
|
||||
# Disconnect the database
|
||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
@ -0,0 +1,68 @@
|
||||
##########################################################################
|
||||
#
|
||||
# pgAdmin 4 - PostgreSQL Tools
|
||||
#
|
||||
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||
# This software is released under the PostgreSQL Licence
|
||||
#
|
||||
##########################################################################
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import traceback
|
||||
|
||||
from regression.python_test_utils import test_utils as utils
|
||||
|
||||
# Load test data from json file.
|
||||
CURRENT_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
with open(CURRENT_PATH + "/constraints_test_data.json") as data_file:
|
||||
test_cases = json.load(data_file)
|
||||
|
||||
|
||||
def api_get(self):
|
||||
return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}/".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id),
|
||||
content_type='html/json',
|
||||
follow_redirects=True)
|
||||
|
||||
|
||||
def api_delete(self):
|
||||
return self.tester.delete("{0}{1}/{2}/{3}/{4}/{5}/".
|
||||
format(self.url, utils.SERVER_GROUP,
|
||||
self.server_id, self.db_id,
|
||||
self.schema_id, self.table_id),
|
||||
content_type='html/json',
|
||||
data=json.dumps(self.data),
|
||||
follow_redirects=True)
|
||||
|
||||
|
||||
def verify_constraint(server, db_name, constraint_name):
|
||||
"""
|
||||
This function verifies check constraint constraint exist or not.
|
||||
:param server: server details
|
||||
:type server: dict
|
||||
:param db_name: database name
|
||||
:type db_name: str
|
||||
:param check_constraint_name: constraint name
|
||||
:type check_constraint_name: str
|
||||
:return chk_constraint_record: check constraint record from database
|
||||
:rtype: tuple
|
||||
"""
|
||||
try:
|
||||
connection = utils.get_db_connection(db_name,
|
||||
server['username'],
|
||||
server['db_password'],
|
||||
server['host'],
|
||||
server['port'])
|
||||
pg_cursor = connection.cursor()
|
||||
pg_cursor.execute(
|
||||
"SELECT oid FROM pg_constraint where conname='%s'" %
|
||||
constraint_name)
|
||||
chk_constraint_record = pg_cursor.fetchone()
|
||||
connection.close()
|
||||
return chk_constraint_record
|
||||
except Exception:
|
||||
traceback.print_exc(file=sys.stderr)
|
@ -1205,6 +1205,21 @@ def generate_scenarios(key, test_cases):
|
||||
return scenarios
|
||||
|
||||
|
||||
def assert_status_code(self, response):
|
||||
act_res = response.status_code
|
||||
exp_res = self.expected_data["status_code"]
|
||||
return self.assertEquals(act_res, exp_res)
|
||||
|
||||
|
||||
def assert_error_message(self, response, error_msg=None):
|
||||
act_res = response.json["errormsg"]
|
||||
if error_msg is not None:
|
||||
exp_res = error_msg
|
||||
else:
|
||||
exp_res = self.expected_data["error_msg"]
|
||||
return self.assertEquals(act_res, exp_res)
|
||||
|
||||
|
||||
def create_expected_output(parameters, actual_data):
|
||||
"""
|
||||
This function creates the dict using given parameter and actual data
|
||||
|
Loading…
Reference in New Issue
Block a user