From 3e35dc95e5c633572a24371b49011e306f18f0a0 Mon Sep 17 00:00:00 2001 From: Yogesh Mahajan Date: Thu, 3 Sep 2020 12:35:56 +0530 Subject: [PATCH] Improve code coverage and API test cases for Tables. Fixes #5774. --- docs/en_US/release_notes_4_26.rst | 1 + .../schemas/tables/tests/table_test_data.json | 1934 +++++++++++++++++ .../tests/test_column_privileges_put.py | 96 - .../schemas/tables/tests/test_table_add.py | 214 +- .../schemas/tables/tests/test_table_delete.py | 63 +- .../test_table_dependencies_dependents.py | 75 + .../schemas/tables/tests/test_table_get.py | 59 +- .../test_table_get_existing_table_actions.py | 98 + .../tables/tests/test_table_get_nodes.py | 98 + ...table_get_pre_table_creation_parameters.py | 104 + .../tables/tests/test_table_get_script_sql.py | 99 + .../tables/tests/test_table_get_statistics.py | 95 + .../schemas/tables/tests/test_table_msql.py | 79 + .../tables/tests/test_table_parameters.py | 175 -- .../schemas/tables/tests/test_table_put.py | 141 +- .../tables/tests/test_table_put_partition.py | 117 + ...iple.py => test_table_reset_statistics.py} | 56 +- .../schemas/tables/tests/test_table_sql.py | 88 + .../databases/schemas/tables/tests/utils.py | 89 +- 19 files changed, 3079 insertions(+), 602 deletions(-) create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/table_test_data.json delete mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_privileges_put.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_dependencies_dependents.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_existing_table_actions.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_nodes.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_pre_table_creation_parameters.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_script_sql.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_statistics.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_msql.py delete mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_parameters.py create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put_partition.py rename web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/{test_table_delete_multiple.py => test_table_reset_statistics.py} (64%) create mode 100644 web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_sql.py diff --git a/docs/en_US/release_notes_4_26.rst b/docs/en_US/release_notes_4_26.rst index c6cd3c2f3..e121aec43 100644 --- a/docs/en_US/release_notes_4_26.rst +++ b/docs/en_US/release_notes_4_26.rst @@ -18,6 +18,7 @@ Housekeeping | `Issue #5332 `_ - Improve code coverage and API test cases for Columns and Constraints (Index, Foreign Key, Check, Exclusion). | `Issue #5344 `_ - Improve code coverage and API test cases for Grant Wizard. +| `Issue #5774 `_ - Improve code coverage and API test cases for Tables. Bug fixes ********* diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/table_test_data.json b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/table_test_data.json new file mode 100644 index 000000000..00749a4f3 --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/table_test_data.json @@ -0,0 +1,1934 @@ +{ + "table_create": [ + { + "name": "Create: Add Table with Identity columns", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Identity columns are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "description": "Create Table API Test", + "columns": [ + { + "name": "iden_always", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "a", + "seqincrement": 1, + "seqstart": 1, + "seqmin": 1, + "seqmax": 10, + "seqcache": 1, + "seqcycle": true + }, + { + "name": "iden_default", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "d", + "seqincrement": 2, + "seqstart": 2, + "seqmin": 2, + "seqmax": 2000, + "seqcache": 1, + "seqcycle": true + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add Table with Generated columns", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 120000, + "skip_msg": "Generated columns are not supported by EPAS/PG 12.0 and below." + }, + "test_data": { + "columns": [ + { + "name": "c1", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attoptions": [], + "seclabels": [] + }, + { + "name": "c2", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attoptions": [], + "seclabels": [] + }, + { + "name": "genrated", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "g", + "genexpr": "m1*m2" + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add hash partitioned table with 2 partitions.", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 110000, + "skip_msg": "Hash Partition are not supported by PPAS/PG 11.0 and below." + }, + "test_data": { + "is_partitioned": true, + "partition_type": "hash", + "partitions": [ + { + "values_modulus": "24", + "values_remainder": "3", + "is_attach": false, + "partition_name": "emp_2016" + }, + { + "values_modulus": "8", + "values_remainder": "2", + "is_attach": false, + "partition_name": "emp_2017" + } + ], + "partition_keys": [ + { + "key_type": "column", + "pt_column": "empno" + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add Multilevel List partitioned table with subpartition table.", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "is_partitioned": true, + "partition_type": "list", + "multilevel_partition": true, + "partitions": [ + { + "values_in": "'2012-01-01', '2012-12-31'", + "is_attach": false, + "partition_name": "emp_2012_multi_level", + "is_sub_partitioned": true, + "sub_partition_type": "list", + "sub_partition_keys": [ + { + "key_type": "column", + "pt_column": "empno" + } + ] + }, + { + "values_in": "'2013-01-01', '2013-12-31'", + "is_attach": false, + "partition_name": "emp_2013_multi_level", + "is_sub_partitioned": true, + "sub_partition_type": "range", + "sub_partition_keys": [ + { + "key_type": "column", + "pt_column": "empno" + } + ] + } + ], + "partition_keys": [ + { + "key_type": "column", + "pt_column": "DOJ" + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add list partitioned table with 2 partitions.", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "is_partitioned": true, + "partition_type": "list", + "partitions": [ + { + "values_in": "'2012-01-01', '2012-12-31'", + "is_attach": false, + "partition_name": "emp_2012" + }, + { + "values_in": "'2013-01-01', '2013-12-31'", + "is_attach": false, + "partition_name": "emp_2013" + } + ], + "partition_keys": [ + { + "key_type": "column", + "pt_column": "DOJ" + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add multilevel Range partitioned table with subpartition table", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "is_partitioned": true, + "partition_type": "range", + "multilevel_partition": true, + "partitions": [ + { + "values_from": "'2010-01-01'", + "values_to": "'2010-12-31'", + "is_attach": false, + "partition_name": "emp_2010_multi_level", + "is_sub_partitioned": true, + "sub_partition_type": "range", + "sub_partition_keys": [ + { + "key_type": "column", + "pt_column": "empno" + } + ] + }, + { + "values_from": "'2011-01-01'", + "values_to": "'2011-12-31'", + "is_attach": false, + "partition_name": "emp_2011_multi_level", + "is_sub_partitioned": true, + "sub_partition_type": "list", + "sub_partition_keys": [ + { + "key_type": "column", + "pt_column": "empno" + } + ] + } + ], + "partition_keys": [ + { + "key_type": "column", + "pt_column": "DOJ" + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add Range partitioned table with 1 default and 2 value based partition", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 110000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "is_partitioned": true, + "partition_type": "range", + "is_default": true, + "partitions": [ + { + "values_from": "'2010-01-01'", + "values_to": "'2010-12-31'", + "is_attach": false, + "partition_name": "emp_2010_def" + }, + { + "values_from": "'2011-01-01'", + "values_to": "'2011-12-31'", + "is_attach": false, + "partition_name": "emp_2011_def" + }, + { + "values_from": "", + "values_to": "", + "is_attach": false, + "is_default": true, + "partition_name": "emp_2012_def" + } + ], + "partition_keys": [ + { + "key_type": "column", + "pt_column": "DOJ" + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add range partitioned table with 2 partitions", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "is_partitioned": true, + "partition_type": "range", + "partitions": [ + { + "values_from": "'2010-01-01'", + "values_to": "'2010-12-31'", + "is_attach": false, + "partition_name": "emp_2010" + }, + { + "values_from": "'2011-01-01'", + "values_to": "'2011-12-31'", + "is_attach": false, + "partition_name": "emp_2011" + } + ], + "partition_keys": [ + { + "key_type": "column", + "pt_column": "DOJ" + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + } + }, + { + "name": "Create: Add Table with name more than 64 chars.", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Identity columns are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "table_name": "abcdefghijklmnopqrstuvwxyz1234567890abcdefghijklmnopqrstuvwxyz123", + "description": "Create Table API Test", + "columns": [ + { + "name": "iden_always", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "a", + "seqincrement": 1, + "seqstart": 1, + "seqmin": 1, + "seqmax": 10, + "seqcache": 1, + "seqcycle": true + }, + { + "name": "iden_default", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "d", + "seqincrement": 2, + "seqstart": 2, + "seqmin": 2, + "seqmax": 2000, + "seqcache": 1, + "seqcycle": true + } + ] + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + } + }, + { + "name": "Create: Add Table without name parameter.", + "is_positive_test": false, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Identity columns are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "table_name": "", + "description": "Create Table API Test", + "columns": [ + { + "name": "iden_always", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "a", + "seqincrement": 1, + "seqstart": 1, + "seqmin": 1, + "seqmax": 10, + "seqcache": 1, + "seqcycle": true + }, + { + "name": "iden_default", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "d", + "seqincrement": 2, + "seqstart": 2, + "seqmin": 2, + "seqmax": 2000, + "seqcache": 1, + "seqcycle": true + } + ] + }, + "mocking_required": false, + "mock_data": { + }, + "expected_data": { + "status_code": 410, + "error_msg": "Could not find the required parameter (name).", + "test_result_data": {} + } + }, + { + "name": "Create: Add Table while server is down.", + "is_positive_test": false, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Identity columns are not supported by PPAS/PG 10.0 and below." + }, + "test_data": { + "description": "Create Table API Test", + "columns": [ + { + "name": "iden_always", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "a", + "seqincrement": 1, + "seqstart": 1, + "seqmin": 1, + "seqmax": 10, + "seqcache": 1, + "seqcycle": true + }, + { + "name": "iden_default", + "cltype": "bigint", + "attacl": [], + "is_primary_key": false, + "attnotnull": true, + "attlen": null, + "attprecision": null, + "attoptions": [], + "seclabels": [], + "colconstype": "i", + "attidentity": "d", + "seqincrement": 2, + "seqstart": 2, + "seqmin": 2, + "seqmax": 2000, + "seqcache": 1, + "seqcycle": true + } + ] + }, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar", + "return_value": "[(False, 'Mocked Internal Server Error'),(True,True)]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + } + } + ], + "table_delete": [ + { + "name": "Delete: Delete existing table", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Delete: Delete multiple existing table", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": true + }, + { + "name": "Delete: Delete non-existing table", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": "Error: Object not found.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Delete: Delete existing table when server is down.", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + } + ], + "table_get": [ + { + "name": "Get: Fetch existing table details", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get: Fetch multiple existing table details", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": true + }, + { + "name": "Get: Fetch non-existing table details", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get: Fetch existing table details while server is down.", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get: Fetch multiple existing table details while server is down.", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": true + } + ], + "table_get_nodes": [ + { + "name": "Get nodes: Fetch existing table details", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get nodes: Fetch multiple existing table details", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": true + }, + { + "name": "Get nodes: Fetch non-existing table details", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "Could not find the table.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get nodes: Fetch existing table details while server is down", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get nodes: Fetch multiple existing table details while server is down", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_2darray", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": true + } + ], + "table_msql": [ + { + "name": "Get msql: Fetch existing table details", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "description": "Msql API Tests", + "name": "chnagename" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get msql: Fetch non-existing table details", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1, + "description": "Msql API Tests", + "name": "chnagename" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + } + ], + "table_sql": [ + { + "name": "Get sql: Fetch existing table sql", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get sql: Fetch non-existing table sql", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + } + ], + "table_dependencies_dependents": [ + { + "name": "Get dependents: Fetch existing table.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_dependent": true + }, + { + "name": "Get dependencies: Fetch multiple existing table.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_dependent": false + } + ], + "table_get_statistics": [ + { + "name": "Get statistics: Fetch existing table stats.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get statistics: Fetch multiple existing table stats.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": true + }, + { + "name": "Get statistics: Fetch existing table stats while server is down.", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_scalar", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get statistics: Fetch existing table stats while server is down-2.", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + } + ], + "table_put_partition": [ + { + "name": "Put: Create partitions of existing range partitioned table.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "range", + "mode": "create" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Create partitions with partition table of existing range.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "range", + "mode": "multilevel" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Create partitions of existing list partitioned table.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "list", + "mode": "create" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Create partitions with partition table of existing list partitioned table.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "list", + "mode": "multilevel" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Detach partition from existing range partitioned table'.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "range", + "mode": "detach" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Detach partition from existing list partitioned table.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "list", + "mode": "detach" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Attach partition to existing range partitioned table.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "range", + "mode": "attach" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Attach partition to existing list partitioned table.", + "is_positive_test": true, + "inventory_data": { + "is_partition": true, + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below.", + "partition_type": "list", + "mode": "attach" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Update comment in table.", + "is_positive_test": true, + "inventory_data": { + "is_partition": false + }, + "test_data": { + "description": "This is test comment for table" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + } + ], + "table_put": [ + { + "name": "Put: Enable custom auto vacuum and set the parameters for table without autovacuum_enabled", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "autovacuum_custom": true, + "vacuum_table": { + "changed": [ + { + "name": "autovacuum_vacuum_cost_delay", + "value": 20 + }, + { + "name": "autovacuum_vacuum_threshold", + "value": 20 + } + ] + } + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Change a parameter to zero value without autovacuum_enabled", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "vacuum_table": { + "changed": [ + { + "name": "autovacuum_vacuum_cost_delay", + "value": 0 + } + ] + } + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Enable autovacuum_enabled", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "autovacuum_enabled": "t" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Reset individual parameters for table.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "autovacuum_enabled": "x", + "vacuum_table": { + "changed": [ + { + "name": "autovacuum_vacuum_cost_delay", + "value": null + } + ] + } + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Reset custom auto vacuum.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "autovacuum_custom": false + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Enable toast custom auto vacuum and set the parameters for table without autovacuum_enabled.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "toast_autovacuum": true, + "vacuum_toast": { + "changed": [ + { + "name": "autovacuum_vacuum_cost_delay", + "value": 20 + }, + { + "name": "autovacuum_vacuum_threshold", + "value": 20 + } + ] + } + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Change a toast parameter to zero value without autovacuum_enabled", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "vacuum_toast": { + "changed": [ + { + "name": "autovacuum_vacuum_cost_delay", + "value": 0 + } + ] + } + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Enable toast.autovacuum_enabled.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "toast_autovacuum_enabled": "t" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Reset individual toast parameters for table'.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "toast_autovacuum_enabled": "x", + "vacuum_toast": { + "changed": [ + { + "name": "autovacuum_vacuum_cost_delay", + "value": null + } + ] + } + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Reset auto vacuum.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": { + "toast_autovacuum":false + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Put: Update column privileges.", + "is_positive_test": true, + "inventory_data": { + "is_grant_tab": true + }, + "test_data": { + "columns": { + "changed": [{ + "attnum": 1, + "attacl": { + "added": [{ + "privileges": [ + {"privilege_type": "a", "privilege": true, + "with_grant": true}, + {"privilege_type": "r", "privilege": true, + "with_grant": true}, + {"privilege_type": "w", "privilege": true, + "with_grant": true}, + {"privilege_type": "x", "privilege": true, + "with_grant": true + } + ] + }] + } + }] + } + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + } + ], + "table_get_script_sql": [ + { + "name": "Get Insert Sql: Fetch insert sql for existing table.", + "add_to_url": "insert_sql/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Insert Sql: Fetch insert sql for existing table without column.", + "add_to_url": "insert_sql/", + "is_positive_test": true, + "inventory_data": { + "query": "\"create table %s.%s()\" % (schema_name,table_name)" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Insert Sql: Fetch insert sql for non-existing table.", + "add_to_url": "insert_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Insert Sql: Fetch insert sql for existing table while server is down.", + "add_to_url": "insert_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Update Sql: Fetch update sql for existing table.", + "add_to_url": "update_sql/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Update Sql: Fetch update sql for existing table without column.", + "add_to_url": "update_sql/", + "is_positive_test": true, + "inventory_data": { + "query": "\"create table %s.%s()\" % (schema_name,table_name)" + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Update Sql: Fetch update sql for non-existing table.", + "add_to_url": "update_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Update Sql: Fetch update sql for existing table while server is down.", + "add_to_url": "update_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Delete Sql: Fetch delete sql for existing table.", + "add_to_url": "delete_sql/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Delete Sql: Fetch delete sql for non-existing table.", + "add_to_url": "delete_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Delete Sql: Fetch delete sql for existing table while server is down.", + "add_to_url": "delete_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Select Sql: Fetch select sql for existing table.", + "add_to_url": "select_sql/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Select Sql: Fetch select sql for non-existing table.", + "add_to_url": "select_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get Select Sql: Fetch select sql for existing table while server is down.", + "add_to_url": "select_sql/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + } + ], + "table_get_pre_table_creation_parameters": [ + { + "name": "Get pre table creation parameter: Fetch table vacuum parameters.", + "add_to_url": "get_table_vacuum/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch toast table vacuum parameters.", + "add_to_url": "get_toast_table_vacuum/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch access methods.", + "add_to_url": "get_access_methods/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch types.", + "add_to_url": "get_types/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch of types.", + "add_to_url": "get_oftype/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch tables available for inheritance .", + "add_to_url": "get_inherits/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of tables available for like/relation .", + "add_to_url": "get_relations/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of tables required for constraints.", + "add_to_url": "all_tables/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of tables required for constraints while server down.", + "add_to_url": "all_tables/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": {}, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of columns for foreign table using tid.", + "add_to_url": "get_columns/", + "is_positive_test": true, + "inventory_data": {}, + "url_encoded_data": true, + "test_data": { + "tid": "" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of columns for foreign table using tid while server down.", + "add_to_url": "get_columns/", + "is_positive_test": false, + "inventory_data": {}, + "url_encoded_data": true, + "test_data": { + "tid": "" + }, + "mocking_required": true, + "mock_data": { + "function_name": "pgadmin.utils.driver.psycopg2.connection.Connection.execute_dict", + "return_value": "[(False,'Mocked Internal Server Error')]" + }, + "expected_data": { + "status_code": 500, + "error_msg": "Mocked Internal Server Error", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of columns for foreign table using tname.", + "add_to_url": "get_columns/", + "is_positive_test": true, + "inventory_data": {}, + "url_encoded_data": true, + "test_data": { + "tname": "" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of operators for column.", + "add_to_url": "get_operator/", + "is_positive_test": true, + "inventory_data": {}, + "url_encoded_data": true, + "test_data": { + "col_type":"integer" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get pre table creation parameter: Fetch list of operator class for column.", + "add_to_url": "get_oper_class/", + "is_positive_test": true, + "inventory_data": {}, + "url_encoded_data": true, + "test_data": { + "indextype":"btree" + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + } + ], + "table_delete_statistics": [ + { + "name": "Delete: Reset table statistics for existing table details.", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Delete: Reset table statistics for non-existing table details.", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + } + ], + "test_table_get_existing_table_actions": [ + { + "name": "Get row count of table: Fetch table row count.", + "add_to_url": "count_rows/", + "is_positive_test": true, + "inventory_data": {}, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get row count of table: Fetch table row count for non-existing table.", + "add_to_url": "count_rows/", + "is_positive_test": false, + "inventory_data": {}, + "test_data": { + "table_id": 1 + }, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 410, + "error_msg": "could not find the specified table.", + "test_result_data": {} + }, + "is_list": false + }, + { + "name": "Get attached tables table: Fetch table attached to parent.", + "add_to_url": "get_attach_tables/", + "is_positive_test": true, + "inventory_data": { + "server_min_version": 100000, + "skip_msg": "Partitioned table are not supported by PPAS/PG 10.0 and below." + }, + "test_data": {}, + "mocking_required": false, + "mock_data": {}, + "expected_data": { + "status_code": 200, + "error_msg": null, + "test_result_data": {} + }, + "is_list": false + } + ] +} diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_privileges_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_privileges_put.py deleted file mode 100644 index a4323c32d..000000000 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_column_privileges_put.py +++ /dev/null @@ -1,96 +0,0 @@ -########################################################################## -# -# pgAdmin 4 - PostgreSQL Tools -# -# Copyright (C) 2013 - 2020, The pgAdmin Development Team -# This software is released under the PostgreSQL Licence -# -########################################################################## - -import json -import uuid - -from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ - utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict -from regression.python_test_utils import test_utils as utils -from . import utils as tables_utils - - -class TableUpdateColumnTestCase(BaseTestGenerator): - """This class will update the column node from table""" - scenarios = [ - # Fetching default URL for table node. - ('Add privileges for existing column', - dict(url='/browser/table/obj/') - ) - ] - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema to add a table.") - self.table_name = "test_table_column_put_%s" % (str(uuid.uuid4())[1:8]) - - self.table_id = tables_utils.create_table( - self.server, self.db_name, - self.schema_name, - self.table_name) - - def runTest(self): - """This function will fetch added table under schema node.""" - table_response = tables_utils.verify_table(self.server, self.db_name, - self.table_id) - if not table_response: - raise Exception("Could not find the table to update.") - - data = { - "columns": { - "changed": [{ - "attnum": 1, - "attacl": { - "added": [{ - "grantee": self.server["username"], - "grantor": self.server["username"], - "privileges": [ - {"privilege_type": "a", "privilege": True, - "with_grant": True}, - {"privilege_type": "r", "privilege": True, - "with_grant": True}, - {"privilege_type": "w", "privilege": True, - "with_grant": True}, - {"privilege_type": "x", "privilege": True, - "with_grant": True - } - ] - }] - } - }] - } - } - - response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + '/' + - str(self.schema_id) + '/' + str(self.table_id), - data=json.dumps(data), follow_redirects=True) - self.assertEqual(response.status_code, 200) - - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py index fb17fd805..444de09c7 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_add.py @@ -9,6 +9,7 @@ import json import uuid +from unittest.mock import patch from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils @@ -23,159 +24,38 @@ from . import utils as tables_utils class TableAddTestCase(BaseTestGenerator): """ This class will add new collation under schema node. """ - scenarios = [ - # Fetching default URL for table node. - ('Create Table', dict(url='/browser/table/obj/')), - ('Create Range partitioned table with 2 partitions', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - skip_msg='Partitioned table are not supported by ' - 'PPAS/PG 10.0 and below.' - ) - ), - ('Create Range partitioned table with 1 default and 2' - ' value based partition', - dict(url='/browser/table/obj/', - server_min_version=110000, - partition_type='range', - is_default=True, - skip_msg='Partitioned table are not supported by ' - 'PPAS/PG 10.0 and below.' - ) - ), - ('Create Multilevel Range partitioned table with subpartition table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - multilevel_partition=True, - skip_msg='Partitioned table are not supported by ' - 'PPAS/PG 10.0 and below.' - ) - ), - ('Create List partitioned table with 2 partitions', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - skip_msg='Partitioned table are not supported by ' - 'PPAS/PG 10.0 and below.' - ) - ), - ('Create Multilevel List partitioned table with subpartition table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - multilevel_partition=True, - skip_msg='Partitioned table are not supported by ' - 'PPAS/PG 10.0 and below.' - ) - ), - ('Create Hash partitioned table with 2 partitions', - dict(url='/browser/table/obj/', - server_min_version=110000, - partition_type='hash', - skip_msg='Hash Partition are not supported by ' - 'PPAS/PG 11.0 and below.' - ) - ), - ('Create Table with Identity columns', - dict(url='/browser/table/obj/', - server_min_version=100000, - skip_msg='Identity columns are not supported by ' - 'PPAS/PG 10.0 and below.', - columns=[{ - 'name': 'iden_always', - 'cltype': 'bigint', - 'attacl': [], - 'is_primary_key': False, - 'attnotnull': True, - 'attlen': None, - 'attprecision': None, - 'attoptions': [], - 'seclabels': [], - 'colconstype': 'i', - 'attidentity': 'a', - 'seqincrement': 1, - 'seqstart': 1, - 'seqmin': 1, - 'seqmax': 10, - 'seqcache': 1, - 'seqcycle': True - }, { - 'name': 'iden_default', - 'cltype': 'bigint', - 'attacl': [], - 'is_primary_key': False, - 'attnotnull': True, - 'attlen': None, - 'attprecision': None, - 'attoptions': [], - 'seclabels': [], - 'colconstype': 'i', - 'attidentity': 'd', - 'seqincrement': 2, - 'seqstart': 2, - 'seqmin': 2, - 'seqmax': 2000, - 'seqcache': 1, - 'seqcycle': True - }]) - ), - ('Create Table with Generated columns', - dict(url='/browser/table/obj/', - server_min_version=120000, - skip_msg='Generated columns are not supported by ' - 'PPAS/PG 12.0 and below.', - columns=[{ - 'name': 'm1', - 'cltype': 'bigint', - 'attacl': [], - 'is_primary_key': False, - 'attoptions': [], - 'seclabels': [] - }, { - 'name': 'm2', - 'cltype': 'bigint', - 'attacl': [], - 'is_primary_key': False, - 'attoptions': [], - 'seclabels': [] - }, { - 'name': 'genrated', - 'cltype': 'bigint', - 'attacl': [], - 'is_primary_key': False, - 'attnotnull': True, - 'attlen': None, - 'attprecision': None, - 'attoptions': [], - 'seclabels': [], - 'colconstype': 'g', - 'genexpr': 'm1*m2' - }]) - ) + url = '/browser/table/obj/' - ] + # Generates scenarios + scenarios = utils.generate_scenarios("table_create", + tables_utils.test_cases) def setUp(self): + # Load test data + self.data = self.test_data + + # Check server version schema_info = parent_node_dict["schema"][-1] self.server_id = schema_info["server_id"] - if hasattr(self, 'server_min_version'): + if "server_min_version" in self.inventory_data: server_con = server_utils.connect_server(self, self.server_id) if not server_con["info"] == "Server connected.": raise Exception("Could not connect to server to add " "partitioned table.") - if server_con["data"]["version"] < self.server_min_version: - self.skipTest(self.skip_msg) + if server_con["data"]["version"] < \ + self.inventory_data["server_min_version"]: + self.skipTest(self.inventory_data["skip_msg"]) + # Create db connection self.db_name = parent_node_dict["database"][-1]["db_name"] - self.db_id = schema_info["db_id"] db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con['data']["connected"]: raise Exception("Could not connect to database to add a table.") + + # Create schema self.schema_id = schema_info["schema_id"] self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, @@ -186,15 +66,20 @@ class TableAddTestCase(BaseTestGenerator): def runTest(self): """ This function will add table under schema node. """ + if "table_name" in self.data: + self.table_name = self.data["table_name"] + else: + self.table_name = "test_table_add_%s" % (str(uuid.uuid4())[1:8]) + db_user = self.server["username"] - self.table_name = "test_table_add_%s" % (str(uuid.uuid4())[1:8]) + # Get the common data - data = tables_utils.get_table_common_data() + self.data.update(tables_utils.get_table_common_data()) if self.server_information and \ 'server_version' in self.server_information and \ self.server_information['server_version'] >= 120000: - data['spcname'] = None - data.update({ + self.data['spcname'] = None + self.data.update({ "name": self.table_name, "relowner": db_user, "schema": self.schema_name, @@ -217,37 +102,26 @@ class TableAddTestCase(BaseTestGenerator): }] }) - # If column is provided in the scenario then use those columns - if hasattr(self, 'columns'): - data['columns'] = self.columns - - if hasattr(self, 'partition_type'): - data['partition_type'] = self.partition_type - data['is_partitioned'] = True - if self.partition_type == 'range': - if hasattr(self, 'is_default'): - tables_utils.get_range_partitions_data(data, 'Default') - elif hasattr(self, 'multilevel_partition'): - tables_utils.get_range_partitions_data( - data, None, True) - else: - tables_utils.get_range_partitions_data(data) - elif self.partition_type == 'list': - if hasattr(self, 'multilevel_partition'): - tables_utils.get_list_partitions_data(data, True) - else: - tables_utils.get_list_partitions_data(data) - else: - tables_utils.get_hash_partitions_data(data) - # Add table - response = self.tester.post( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + - '/' + str(self.schema_id) + '/', - data=json.dumps(data), - content_type='html/json') - self.assertEqual(response.status_code, 200) + if self.is_positive_test: + response = tables_utils.api_create(self) + + # Assert response + utils.assert_status_code(self, response) + + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + response = tables_utils.api_create(self) + else: + if self.table_name == "": + del self.data["name"] + response = tables_utils.api_create(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) def tearDown(self): # Disconnect the database diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py index 62471253c..878600fc7 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete.py @@ -8,6 +8,7 @@ ########################################################################## import uuid +from unittest.mock import patch from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils @@ -21,12 +22,17 @@ from . import utils as tables_utils class TableDeleteTestCase(BaseTestGenerator): """This class will delete new table under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Delete Table', dict(url='/browser/table/obj/')) - ] + url = '/browser/table/obj/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_delete", + tables_utils.test_cases) def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection self.db_name = parent_node_dict["database"][-1]["db_name"] schema_info = parent_node_dict["schema"][-1] self.server_id = schema_info["server_id"] @@ -35,6 +41,8 @@ class TableDeleteTestCase(BaseTestGenerator): self.server_id, self.db_id) if not db_con['data']["connected"]: raise Exception("Could not connect to database to add a table.") + + # Create schema self.schema_id = schema_info["schema_id"] self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, @@ -42,24 +50,53 @@ class TableDeleteTestCase(BaseTestGenerator): self.schema_name) if not schema_response: raise Exception("Could not find the schema to add a table.") + + # Create table self.table_name = "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) - def runTest(self): - """This function will delete added table under schema node.""" + # Create table + if self.is_list: + self.table_name_1 = \ + "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) + self.table_id_1 = tables_utils.create_table(self.server, + self.db_name, + self.schema_name, + self.table_name_1 + ) + + # Verify table creation table_response = tables_utils.verify_table(self.server, self.db_name, self.table_id) if not table_response: raise Exception("Could not find the table to delete.") - response = self.tester.delete(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id), - follow_redirects=True) - self.assertEqual(response.status_code, 200) + + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + if self.is_list: + self.data["ids"] = [self.table_id, self.table_id_1] + response = tables_utils.api_delete(self, "") + else: + response = tables_utils.api_delete(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + response = tables_utils.api_delete(self) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_delete(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) def tearDown(self): # Disconnect the database diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_dependencies_dependents.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_dependencies_dependents.py new file mode 100644 index 000000000..64096ff17 --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_dependencies_dependents.py @@ -0,0 +1,75 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetDependenciesDependentsTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_dependencies_dependents", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + self.table_id = tables_utils.create_table(self.server, self.db_name, + self.schema_name, + self.table_name) + + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + if self.is_dependent: + self.url = self.url + 'dependent/' + response = tables_utils.api_get(self) + else: + self.url = self.url + 'dependency/' + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py index b6506cee6..ab1703a59 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get.py @@ -8,6 +8,7 @@ ########################################################################## import uuid +from unittest.mock import patch from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils @@ -21,12 +22,17 @@ from . import utils as tables_utils class TableGetTestCase(BaseTestGenerator): """This class will add new collation under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Fetch table Node URL', dict(url='/browser/table/obj/')) - ] + url = '/browser/table/obj/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_get", + tables_utils.test_cases) def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection self.db_name = parent_node_dict["database"][-1]["db_name"] schema_info = parent_node_dict["schema"][-1] self.server_id = schema_info["server_id"] @@ -35,6 +41,8 @@ class TableGetTestCase(BaseTestGenerator): self.server_id, self.db_id) if not db_con['data']["connected"]: raise Exception("Could not connect to database to add a table.") + + # Create schema self.schema_id = schema_info["schema_id"] self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, @@ -42,20 +50,49 @@ class TableGetTestCase(BaseTestGenerator): self.schema_name) if not schema_response: raise Exception("Could not find the schema to add a table.") + + # Create table self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) + # Create table + if self.is_list: + self.table_name_1 = \ + "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) + self.table_id_1 = tables_utils.create_table(self.server, + self.db_name, + self.schema_name, + self.table_name_1 + ) + def runTest(self): """This function will delete added table under schema node.""" - response = self.tester.get(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id), - follow_redirects=True) - self.assertEqual(response.status_code, 200) + if self.is_positive_test: + if self.is_list: + response = tables_utils.api_get(self, "") + else: + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + if self.is_list: + response = tables_utils.api_get(self, "") + else: + response = tables_utils.api_get(self) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) def tearDown(self): # Disconnect the database diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_existing_table_actions.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_existing_table_actions.py new file mode 100644 index 000000000..6f3a66d97 --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_existing_table_actions.py @@ -0,0 +1,98 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils import server_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/' + + # Generates scenarios + scenarios = utils.generate_scenarios( + "test_table_get_existing_table_actions", tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Update url + self.url = self.url + self.add_to_url + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + + # Check Server version + if "server_min_version" in self.inventory_data: + server_con = server_utils.connect_server(self, self.server_id) + if not server_con["info"] == "Server connected.": + raise Exception("Could not connect to server to add " + "partitioned table.") + if server_con["data"]["version"] < \ + self.inventory_data["server_min_version"]: + self.skipTest(self.inventory_data["skip_msg"]) + + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + self.table_id = tables_utils.create_table(self.server, self.db_name, + self.schema_name, + self.table_name) + + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + response = tables_utils.api_get(self) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_nodes.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_nodes.py new file mode 100644 index 000000000..0f6636f9e --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_nodes.py @@ -0,0 +1,98 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetNodesTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/nodes/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_get_nodes", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + self.table_id = tables_utils.create_table(self.server, self.db_name, + self.schema_name, + self.table_name) + + # Create table + if self.is_list: + self.table_name_1 = \ + "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) + self.table_id_1 = tables_utils.create_table(self.server, + self.db_name, + self.schema_name, + self.table_name_1) + + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + if self.is_list: + response = tables_utils.api_get(self, "") + else: + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + if self.is_list: + response = tables_utils.api_get(self, "") + else: + response = tables_utils.api_get(self) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_pre_table_creation_parameters.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_pre_table_creation_parameters.py new file mode 100644 index 000000000..677b96071 --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_pre_table_creation_parameters.py @@ -0,0 +1,104 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetPreTablecreationParametersTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/' + + # Generates scenarios + scenarios = utils.generate_scenarios( + "table_get_pre_table_creation_parameters", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Update url + self.url = self.url + self.add_to_url + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + self.table_name = "test_table_put_%s" % (str(uuid.uuid4())[1:8]) + self.table_id = tables_utils.create_table(self.server, self.db_name, + self.schema_name, + self.table_name) + # self.table_id = tables_utils.create_table_for_partition( + # self.server, + # self.db_name, + # self.schema_name, + # self.table_name, + # 'partitioned', + # 'list') + + def runTest(self): + """This function will delete added table under schema node.""" + url_encode_data = None + if hasattr(self, "url_encoded_data"): + if "tid" in self.data: + self.data["tid"] = self.table_id + elif "tname" in self.data: + self.data["tname"] = self.table_name + url_encode_data = self.data + + if self.is_positive_test: + response = tables_utils.api_get_pre_table_creation_params( + self, url_encode_data) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + response = tables_utils.api_get_pre_table_creation_params( + self, url_encode_data) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_get_pre_table_creation_params( + self, url_encode_data) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_script_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_script_sql.py new file mode 100644 index 000000000..0f05e7c44 --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_script_sql.py @@ -0,0 +1,99 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetScriptSqlTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_get_script_sql", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Update url + self.url = self.url + self.add_to_url + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + if "query" in self.inventory_data: + custom_query = self.inventory_data["query"] + self.table_id = tables_utils.create_table(self.server, + self.db_name, + self.schema_name, + self.table_name, + custom_query) + else: + self.table_id = tables_utils.create_table(self.server, + self.db_name, + self.schema_name, + self.table_name) + + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + if self.is_list: + response = tables_utils.api_get(self, "") + else: + response = tables_utils.api_get(self) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_statistics.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_statistics.py new file mode 100644 index 000000000..254fe530e --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_get_statistics.py @@ -0,0 +1,95 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetStatisticsTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/stats/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_get_statistics", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + self.table_id = tables_utils.create_table(self.server, self.db_name, + self.schema_name, + self.table_name) + + # Create table + if self.is_list: + self.table_name_1 = \ + "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + self.table_id_1 = tables_utils.create_table(self.server, + self.db_name, + self.schema_name, + self.table_name_1 + ) + + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + if self.is_list: + response = tables_utils.api_get(self, "") + else: + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + if self.is_list: + response = tables_utils.api_get(self, "") + else: + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_msql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_msql.py new file mode 100644 index 000000000..993f48e7e --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_msql.py @@ -0,0 +1,79 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetMsqlTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/msql/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_msql", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + self.table_id = tables_utils.create_table(self.server, self.db_name, + self.schema_name, + self.table_name) + + def runTest(self): + """This function will delete added table under schema node.""" + url_encode_data = self.data + if self.is_positive_test: + response = tables_utils.api_get_msql(self, url_encode_data) + + # Assert response + utils.assert_status_code(self, response) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_get_msql(self, url_encode_data) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_parameters.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_parameters.py deleted file mode 100644 index 8c8694934..000000000 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_parameters.py +++ /dev/null @@ -1,175 +0,0 @@ -########################################################################## -# -# pgAdmin 4 - PostgreSQL Tools -# -# Copyright (C) 2013 - 2020, The pgAdmin Development Team -# This software is released under the PostgreSQL Licence -# -########################################################################## - -import json -import uuid - -from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ - utils as schema_utils -from pgadmin.browser.server_groups.servers.databases.tests import utils as \ - database_utils -from pgadmin.utils import server_utils as server_utils -from pgadmin.utils.route import BaseTestGenerator -from regression import parent_node_dict -from regression.python_test_utils import test_utils as utils -from . import utils as tables_utils - - -class TableUpdateParameterTestCase(BaseTestGenerator): - """This class will add new collation under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Enable custom auto vacuum and set the parameters for table ' - 'without autovacuum_enabled', - dict(url='/browser/table/obj/', - api_data={ - 'autovacuum_custom': True, - 'vacuum_table': { - 'changed': [ - {'name': 'autovacuum_vacuum_cost_delay', - 'value': 20}, - {'name': 'autovacuum_vacuum_threshold', - 'value': 20} - ] - }} - ) - ), - ('Change a parameter to zero value ' - 'without autovacuum_enabled', - dict(url='/browser/table/obj/', - api_data={ - 'vacuum_table': { - 'changed': [ - {'name': 'autovacuum_vacuum_cost_delay', - 'value': 0} - ] - }} - ) - ), - ('Enable autovacuum_enabled', - dict(url='/browser/table/obj/', - api_data={'autovacuum_enabled': 't'} - ) - ), - ('Reset individual parameters for table', - dict(url='/browser/table/obj/', - api_data={ - 'autovacuum_enabled': 'x', - 'vacuum_table': { - 'changed': [ - {'name': 'autovacuum_vacuum_cost_delay', - 'value': None}, - ] - }} - ) - ), - ('Reset custom auto vacuum', - dict(url='/browser/table/obj/', - api_data={'autovacuum_custom': False} - ) - ), - ('Enable toast custom auto vacuum and set the parameters for table ' - 'without autovacuum_enabled', - dict(url='/browser/table/obj/', - api_data={ - 'toast_autovacuum': True, - 'vacuum_toast': { - 'changed': [ - {'name': 'autovacuum_vacuum_cost_delay', - 'value': 20}, - {'name': 'autovacuum_vacuum_threshold', - 'value': 20} - ] - }} - ) - ), - ('Change a toast parameter to zero value ' - 'without autovacuum_enabled', - dict(url='/browser/table/obj/', - api_data={ - 'vacuum_toast': { - 'changed': [ - {'name': 'autovacuum_vacuum_cost_delay', - 'value': 0} - ] - }} - ) - ), - ('Enable toast.autovacuum_enabled', - dict(url='/browser/table/obj/', - api_data={'toast_autovacuum_enabled': 't'} - ) - ), - ('Reset individual toast parameters for table', - dict(url='/browser/table/obj/', - api_data={ - 'toast_autovacuum_enabled': 'x', - 'vacuum_toast': { - 'changed': [ - {'name': 'autovacuum_vacuum_cost_delay', - 'value': None}, - ] - }} - ) - ), - ('Reset auto vacuum', - dict(url='/browser/table/obj/', - api_data={'toast_autovacuum': False} - ) - ), - ] - - table_name = "test_table_parameters_%s" % (str(uuid.uuid4())[1:8]) - - def setUp(self): - self.db_name = parent_node_dict["database"][-1]["db_name"] - schema_info = parent_node_dict["schema"][-1] - self.server_id = schema_info["server_id"] - self.db_id = schema_info["db_id"] - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, - self.server_id, self.db_id) - if not db_con['data']["connected"]: - raise Exception("Could not connect to database to add a table.") - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - schema_response = schema_utils.verify_schemas(self.server, - self.db_name, - self.schema_name) - if not schema_response: - raise Exception("Could not find the schema to add a table.") - - self.table_id = tables_utils.get_table_id(self.server, self.db_name, - self.table_name) - if self.table_id is None: - self.table_id = tables_utils.create_table( - self.server, self.db_name, - self.schema_name, - self.table_name) - - def runTest(self): - """This function will fetch added table under schema node.""" - table_response = tables_utils.verify_table(self.server, self.db_name, - self.table_id) - if not table_response: - raise Exception("Could not find the table to update.") - - data = self.api_data - data['oid'] = self.table_id - response = self.tester.put(self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/' + - str(self.table_id), - data=json.dumps(data), - follow_redirects=True) - self.assertEqual(response.status_code, 200) - - def tearDown(self): - # Disconnect the database - database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py index 358c5f6f5..cfff72292 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put.py @@ -21,142 +21,69 @@ from regression.python_test_utils import test_utils as utils from . import utils as tables_utils -class TableUpdateTestCase(BaseTestGenerator): +class TablePutTestCase(BaseTestGenerator): """This class will add new collation under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Update Table', dict(url='/browser/table/obj/')), - ('Create partitions of existing range partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - mode='create' - ) - ), - ('Create partitions with partition table of existing range ' - 'partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - mode='multilevel' - ) - ), - ('Create partitions of existing list partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - mode='create' - ) - ), - ('Create partitions with partition table of existing list ' - 'partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - mode='multilevel' - ) - ), - ('Detach partition from existing range partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - mode='detach' - ) - ), - ('Detach partition from existing list partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - mode='detach' - ) - ), - ('Attach partition to existing range partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='range', - mode='attach' - ) - ), - ('Attach partition to existing list partitioned table', - dict(url='/browser/table/obj/', - server_min_version=100000, - partition_type='list', - mode='attach' - ) - ) - ] + url = '/browser/table/obj/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_put", + tables_utils.test_cases) + + table_name = "test_table_parameters_%s" % (str(uuid.uuid4())[1:8]) def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection self.db_name = parent_node_dict["database"][-1]["db_name"] schema_info = parent_node_dict["schema"][-1] self.server_id = schema_info["server_id"] self.db_id = schema_info["db_id"] - self.schema_id = schema_info["schema_id"] - self.schema_name = schema_info["schema_name"] - - if hasattr(self, 'server_min_version'): - server_con = server_utils.connect_server(self, self.server_id) - if not server_con["info"] == "Server connected.": - raise Exception("Could not connect to server to add " - "partitioned table.") - if server_con["data"]["version"] < self.server_min_version: - message = "Partitioned table are not supported by " \ - "PPAS/PG 10.0 and below." - self.skipTest(message) - db_con = database_utils.connect_database(self, utils.SERVER_GROUP, self.server_id, self.db_id) if not db_con['data']["connected"]: raise Exception("Could not connect to database to add a table.") + # Verify schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) if not schema_response: raise Exception("Could not find the schema to add a table.") - self.table_name = "test_table_put_%s" % (str(uuid.uuid4())[1:8]) - self.is_partition = False - - if hasattr(self, 'server_min_version'): - self.is_partition = True - self.table_id = tables_utils.create_table_for_partition( - self.server, - self.db_name, - self.schema_name, - self.table_name, - 'partitioned', - self.partition_type) - else: + # Get/Create table + self.table_id = tables_utils.get_table_id(self.server, self.db_name, + self.table_name) + if self.table_id is None: self.table_id = tables_utils.create_table( self.server, self.db_name, self.schema_name, self.table_name) - def runTest(self): - """This function will fetch added table under schema node.""" + # Verify table creation table_response = tables_utils.verify_table(self.server, self.db_name, self.table_id) if not table_response: raise Exception("Could not find the table to update.") - if self.is_partition: - data = {"id": self.table_id} - tables_utils.set_partition_data( - self.server, self.db_name, self.schema_name, self.table_name, - self.partition_type, data, self.mode) - else: - data = { - "description": "This is test comment for table", - "id": self.table_id - } + def runTest(self): + """This function will fetch added table under schema node.""" + self.data['oid'] = self.table_id - response = self.tester.put( - self.url + str(utils.SERVER_GROUP) + '/' + - str(self.server_id) + '/' + str(self.db_id) + '/' + - str(self.schema_id) + '/' + str(self.table_id), - data=json.dumps(data), follow_redirects=True) - self.assertEqual(response.status_code, 200) + if "is_grant_tab" in self.inventory_data: + grant_data = {"grantee": self.server["username"], + "grantor": self.server["username"]} + self.data["columns"]["changed"][0]["attacl"]["added"][0].update( + grant_data) + + if self.is_positive_test: + response = tables_utils.api_put(self) + + # Assert response + utils.assert_status_code(self, response) def tearDown(self): # Disconnect the database diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put_partition.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put_partition.py new file mode 100644 index 000000000..774b6e4bf --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_put_partition.py @@ -0,0 +1,117 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import json +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils import server_utils as server_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableUpdateTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/obj/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_put_partition", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + + # Check Server version + if "server_min_version" in self.inventory_data: + server_con = server_utils.connect_server(self, self.server_id) + if not server_con["info"] == "Server connected.": + raise Exception("Could not connect to server to add " + "partitioned table.") + if server_con["data"]["version"] < \ + self.inventory_data["server_min_version"]: + self.skipTest(self.inventory_data["skip_msg"]) + + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Verify schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_put_%s" % (str(uuid.uuid4())[1:8]) + self.is_partition = self.inventory_data.get("is_partition",) + if self.is_partition: + self.table_id = tables_utils.create_table_for_partition( + self.server, + self.db_name, + self.schema_name, + self.table_name, + 'partitioned', + self.inventory_data["partition_type"]) + else: + self.table_id = tables_utils.create_table( + self.server, self.db_name, + self.schema_name, + self.table_name) + + # Verify table creation + table_response = tables_utils.verify_table(self.server, self.db_name, + self.table_id) + if not table_response: + raise Exception("Could not find the table to update.") + + def runTest(self): + """This function will fetch added table under schema node.""" + self.data["id"] = self.table_id + + if self.is_partition: + tables_utils.set_partition_data( + self.server, self.db_name, self.schema_name, self.table_name, + self.inventory_data["partition_type"], self.data, + self.inventory_data["mode"]) + + if self.is_positive_test: + response = tables_utils.api_put(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if self.mocking_required: + with patch(self.mock_data["function_name"], + side_effect=eval(self.mock_data["return_value"])): + response = tables_utils.api_put(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete_multiple.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_reset_statistics.py similarity index 64% rename from web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete_multiple.py rename to web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_reset_statistics.py index 76baec78f..63d347e62 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_delete_multiple.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_reset_statistics.py @@ -8,7 +8,7 @@ ########################################################################## import uuid -import json +from unittest.mock import patch from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ utils as schema_utils @@ -20,14 +20,19 @@ from regression.python_test_utils import test_utils as utils from . import utils as tables_utils -class TableDeleteMultipleTestCase(BaseTestGenerator): +class TableDeleteTestCase(BaseTestGenerator): """This class will delete new table under schema node.""" - scenarios = [ - # Fetching default URL for table node. - ('Delete Table', dict(url='/browser/table/obj/')) - ] + url = '/browser/table/reset/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_delete_statistics", + tables_utils.test_cases) def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection self.db_name = parent_node_dict["database"][-1]["db_name"] schema_info = parent_node_dict["schema"][-1] self.server_id = schema_info["server_id"] @@ -36,6 +41,8 @@ class TableDeleteMultipleTestCase(BaseTestGenerator): self.server_id, self.db_id) if not db_con['data']["connected"]: raise Exception("Could not connect to database to add a table.") + + # Create schema self.schema_id = schema_info["schema_id"] self.schema_name = schema_info["schema_name"] schema_response = schema_utils.verify_schemas(self.server, @@ -43,37 +50,34 @@ class TableDeleteMultipleTestCase(BaseTestGenerator): self.schema_name) if not schema_response: raise Exception("Could not find the schema to add a table.") + + # Create table self.table_name = "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) self.table_id = tables_utils.create_table(self.server, self.db_name, self.schema_name, self.table_name) - self.table_name_1 = "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) - self.table_id_1 = tables_utils.create_table(self.server, self.db_name, - self.schema_name, - self.table_name_1 - ) - def runTest(self): - """This function will delete added table under schema node.""" + # Verify table creation table_response = tables_utils.verify_table(self.server, self.db_name, self.table_id) if not table_response: raise Exception("Could not find the table to delete.") - table_response = tables_utils.verify_table(self.server, self.db_name, - self.table_id_1) - if not table_response: - raise Exception("Could not find the table to delete.") + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + response = tables_utils.api_delete(self) - data = {'ids': [self.table_id, self.table_id_1]} - response = self.tester.delete(self.url + str(utils.SERVER_GROUP) + - '/' + str(self.server_id) + '/' + - str(self.db_id) + '/' + - str(self.schema_id) + '/', - data=json.dumps(data), - content_type='html/json', - follow_redirects=True) - self.assertEqual(response.status_code, 200) + # Assert response + utils.assert_status_code(self, response) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_delete(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) def tearDown(self): # Disconnect the database diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_sql.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_sql.py new file mode 100644 index 000000000..6d1a35348 --- /dev/null +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/test_table_sql.py @@ -0,0 +1,88 @@ +########################################################################## +# +# pgAdmin 4 - PostgreSQL Tools +# +# Copyright (C) 2013 - 2020, The pgAdmin Development Team +# This software is released under the PostgreSQL Licence +# +########################################################################## + +import uuid +from unittest.mock import patch + +from pgadmin.browser.server_groups.servers.databases.schemas.tests import \ + utils as schema_utils +from pgadmin.browser.server_groups.servers.databases.tests import utils as \ + database_utils +from pgadmin.utils.route import BaseTestGenerator +from regression import parent_node_dict +from regression.python_test_utils import test_utils as utils +from . import utils as tables_utils + + +class TableGetSqlTestCase(BaseTestGenerator): + """This class will add new collation under schema node.""" + url = '/browser/table/sql/' + + # Generates scenarios + scenarios = utils.generate_scenarios("table_sql", + tables_utils.test_cases) + + def setUp(self): + # Load test data + self.data = self.test_data + + # Create db connection + self.db_name = parent_node_dict["database"][-1]["db_name"] + schema_info = parent_node_dict["schema"][-1] + self.server_id = schema_info["server_id"] + self.db_id = schema_info["db_id"] + db_con = database_utils.connect_database(self, utils.SERVER_GROUP, + self.server_id, self.db_id) + if not db_con['data']["connected"]: + raise Exception("Could not connect to database to add a table.") + + # Create schema + self.schema_id = schema_info["schema_id"] + self.schema_name = schema_info["schema_name"] + schema_response = schema_utils.verify_schemas(self.server, + self.db_name, + self.schema_name) + if not schema_response: + raise Exception("Could not find the schema to add a table.") + + # Create table + self.table_name = "test_table_get_%s" % (str(uuid.uuid4())[1:8]) + self.table_id = tables_utils.create_table(self.server, self.db_name, + self.schema_name, + self.table_name) + + # Create table + if self.is_list: + self.table_name_1 = \ + "test_table_delete_%s" % (str(uuid.uuid4())[1:8]) + self.table_id_1 = tables_utils.create_table(self.server, + self.db_name, + self.schema_name, + self.table_name_1 + ) + + def runTest(self): + """This function will delete added table under schema node.""" + if self.is_positive_test: + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + else: + if 'table_id' in self.data: + self.table_id = self.data['table_id'] + response = tables_utils.api_get(self) + + # Assert response + utils.assert_status_code(self, response) + utils.assert_error_message(self, response) + + def tearDown(self): + # Disconnect the database + database_utils.disconnect_database(self, self.server_id, self.db_id) diff --git a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/utils.py b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/utils.py index e458dc2c5..925338c7d 100644 --- a/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/utils.py +++ b/web/pgadmin/browser/server_groups/servers/databases/schemas/tables/tests/utils.py @@ -10,11 +10,89 @@ import sys import traceback +import os +import json +from urllib.parse import urlencode from regression.python_test_utils import test_utils as utils +# Load test data from json file. +CURRENT_PATH = os.path.dirname(os.path.realpath(__file__)) +with open(CURRENT_PATH + "/table_test_data.json") as data_file: + test_cases = json.load(data_file) -def create_table(server, db_name, schema_name, table_name): + +def api_create(self): + return self.tester.post("{0}{1}/{2}/{3}/{4}/". + format(self.url, utils.SERVER_GROUP, + self.server_id, + self.db_id, self.schema_id), + data=json.dumps(self.data), + content_type='html/json' + ) + + +def api_delete(self, table_id=None): + if table_id is None: + table_id = self.table_id + return self.tester.delete("{0}{1}/{2}/{3}/{4}/{5}". + format(self.url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, table_id), + data=json.dumps(self.data), + follow_redirects=True) + + +def api_get(self, table_id=None): + if table_id is None: + table_id = self.table_id + return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}". + format(self.url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, table_id), + follow_redirects=True + ) + + +def api_get_msql(self, url_encode_data): + return self.tester.get("{0}{1}/{2}/{3}/{4}/{5}?{6}". + format(self.url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, self.table_id, + urlencode(url_encode_data)), + follow_redirects=True + ) + + +def api_put(self): + return self.tester.put("{0}{1}/{2}/{3}/{4}/{5}". + format(self.url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, self.table_id), + data=json.dumps(self.data), + follow_redirects=True + ) + + +def api_get_pre_table_creation_params(self, url_encode_data=None): + if url_encode_data is None: + return self.tester.get("{0}{1}/{2}/{3}/{4}/". + format(self.url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id), + follow_redirects=True + ) + else: + return self.tester.get("{0}{1}/{2}/{3}/{4}/?{5}". + format(self.url, utils.SERVER_GROUP, + self.server_id, self.db_id, + self.schema_id, + urlencode(url_encode_data)), + follow_redirects=True + ) + + +def create_table(server, db_name, schema_name, table_name, custom_query=None): """ This function creates a table under provided schema. :param server: server details @@ -28,6 +106,12 @@ def create_table(server, db_name, schema_name, table_name): :return table_id: table id :rtype: int """ + if custom_query is None: + query = "CREATE TABLE %s.%s(id serial UNIQUE NOT NULL, name text," \ + " location text)" % \ + (schema_name, table_name) + else: + query = eval(custom_query) try: connection = utils.get_db_connection(db_name, server['username'], @@ -38,9 +122,6 @@ def create_table(server, db_name, schema_name, table_name): old_isolation_level = connection.isolation_level connection.set_isolation_level(0) pg_cursor = connection.cursor() - query = "CREATE TABLE %s.%s(id serial UNIQUE NOT NULL, name text," \ - " location text)" %\ - (schema_name, table_name) pg_cursor.execute(query) connection.set_isolation_level(old_isolation_level) connection.commit()