mirror of
https://github.com/pgadmin-org/pgadmin4.git
synced 2025-02-25 18:55:31 -06:00
Implement Selenium Grid to run multiple tests across different browsers, operating systems, and machines in parallel. Fixes #5255
This commit is contained in:
parent
b64896f558
commit
1294c089a8
@ -13,7 +13,7 @@ New features
|
|||||||
|
|
||||||
Housekeeping
|
Housekeeping
|
||||||
************
|
************
|
||||||
|
| `Issue #5255 <https://redmine.postgresql.org/issues/5255>`_ - Implement Selenium Grid to run multiple tests across different browsers, operating systems, and machines in parallel.
|
||||||
| `Issue #5334 <https://redmine.postgresql.org/issues/5334>`_ - Improve code coverage and API test cases for the Rules module.
|
| `Issue #5334 <https://redmine.postgresql.org/issues/5334>`_ - Improve code coverage and API test cases for the Rules module.
|
||||||
| `Issue #5443 <https://redmine.postgresql.org/issues/5443>`_ - Remove support for Python 2.
|
| `Issue #5443 <https://redmine.postgresql.org/issues/5443>`_ - Remove support for Python 2.
|
||||||
| `Issue #5444 <https://redmine.postgresql.org/issues/5444>`_ - Cleanup Python detection in the runtime project file.
|
| `Issue #5444 <https://redmine.postgresql.org/issues/5444>`_ - Cleanup Python detection in the runtime project file.
|
||||||
|
277
tools/update_selenoid_browsers.py
Normal file
277
tools/update_selenoid_browsers.py
Normal file
@ -0,0 +1,277 @@
|
|||||||
|
##########################################################################
|
||||||
|
#
|
||||||
|
# pgAdmin 4 - PostgreSQL Tools
|
||||||
|
#
|
||||||
|
# Copyright (C) 2013 - 2020, The pgAdmin Development Team
|
||||||
|
# This software is released under the PostgreSQL Licence
|
||||||
|
#
|
||||||
|
# #########################################################################
|
||||||
|
# Updates browser images(selenoid-docker) depending on arguments passed while
|
||||||
|
# running this script.
|
||||||
|
# e.g. --chrome /usr/bin/google-chrome --firefox /usr/bin/firefox
|
||||||
|
# Access details about switches using help
|
||||||
|
# e.g. --help
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def read_command_line():
|
||||||
|
"""Read the command line arguments.
|
||||||
|
Returns:
|
||||||
|
ArgumentParser: The parsed arguments object
|
||||||
|
|
||||||
|
"""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description='Get latest browser images(chrome & firefox) for selenoid.'
|
||||||
|
'e.g. - --chrome /usr/bin/google-chrome --firefox '
|
||||||
|
'/usr/bin/firefox')
|
||||||
|
parser.add_argument("--chrome", metavar="CHROME",
|
||||||
|
help="the Chrome executable path")
|
||||||
|
parser.add_argument("--firefox", metavar="FIREFOX",
|
||||||
|
help="the firefox executable path")
|
||||||
|
args_val = parser.parse_args()
|
||||||
|
return args_val
|
||||||
|
|
||||||
|
|
||||||
|
def get_browser_version(browser_name, executable_path):
|
||||||
|
"""
|
||||||
|
Function returns browser version for specified browser using executable
|
||||||
|
path passed in arguments.
|
||||||
|
:param browser_name:
|
||||||
|
:param executable_path: e.g. /usr/bin/firefox
|
||||||
|
:return: browser version
|
||||||
|
"""
|
||||||
|
# On Linux/Mac we run the browser executable with the --version flag,
|
||||||
|
# then parse the output.
|
||||||
|
browser_version_val = None
|
||||||
|
try:
|
||||||
|
result = subprocess.Popen([executable_path, '--version'],
|
||||||
|
stdout=subprocess.PIPE)
|
||||||
|
except FileNotFoundError:
|
||||||
|
print('The specified browser executable could not be found.')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
version_str = result.stdout.read().decode("utf-8")
|
||||||
|
|
||||||
|
if browser_name.lower() == "chrome":
|
||||||
|
# Check for 'Chrom' not 'Chrome' in case the user is using Chromium.
|
||||||
|
if "Chrom" not in version_str:
|
||||||
|
print('The specified Chrome executable output an unexpected '
|
||||||
|
'version string: {}.'.format(version_str))
|
||||||
|
sys.exit(1)
|
||||||
|
# On some linux distro `chrome--version` gives output like
|
||||||
|
# 'Google Chrome 80.0.3987.132 unknown\n'
|
||||||
|
# so we need to check and remove the unknown string from the version
|
||||||
|
if version_str.endswith("unknown\n"):
|
||||||
|
version_str = version_str.strip("unknown\n").strip()
|
||||||
|
|
||||||
|
chrome_version = '.'.join(version_str.split()[-1].split('.')[:-2])
|
||||||
|
|
||||||
|
# Make sure browser version has only 1 decimal point
|
||||||
|
if chrome_version.count('.') != 1:
|
||||||
|
print('The specified Chrome executable output an unexpected '
|
||||||
|
'version string: {}.'.format(version_str))
|
||||||
|
sys.exit(1)
|
||||||
|
browser_version_val = chrome_version
|
||||||
|
elif browser_name.lower() == "firefox":
|
||||||
|
if "Firefox" not in version_str:
|
||||||
|
print('The specified Firefox executable output an unexpected '
|
||||||
|
'version string: {}.'.format(version_str))
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Some time firefox --version gives output like
|
||||||
|
# 'Running without a11y support!
|
||||||
|
# Mozilla Firefox 68.7.0esr'
|
||||||
|
# Other output - [root@localhost local]# /usr/bin/firefox --version
|
||||||
|
# Mozilla Firefox 75.0
|
||||||
|
if 'esr' in version_str:
|
||||||
|
firefox_version = '.'.join(
|
||||||
|
version_str.split()[-1].split('.')[:-2]) + '.0'
|
||||||
|
else:
|
||||||
|
firefox_version = '.'.join(
|
||||||
|
version_str.split()[-1].split('.')[:-1]) + '.0'
|
||||||
|
|
||||||
|
# Make sure browser version has only 1 decimal point
|
||||||
|
if firefox_version.count('.') != 1:
|
||||||
|
print('The specified Chrome executable output an unexpected '
|
||||||
|
'version string: {}.'.format(version_str))
|
||||||
|
sys.exit(1)
|
||||||
|
browser_version_val = firefox_version
|
||||||
|
else:
|
||||||
|
print("{0} is not recognised ".format(browser_name))
|
||||||
|
sys.exit(1)
|
||||||
|
return browser_version_val
|
||||||
|
|
||||||
|
|
||||||
|
def check_and_download_vnc_browser_image(browser_name, browser_version):
|
||||||
|
"""
|
||||||
|
Function checks presence for vnc images for passed browser
|
||||||
|
at docker.io/selenoid/ registry
|
||||||
|
:param browser_name:
|
||||||
|
:param browser_version:
|
||||||
|
:return:true if browser image is available & downloaded else false
|
||||||
|
"""
|
||||||
|
res = requests.get(
|
||||||
|
'https://registry.hub.docker.com/v2/repositories/selenoid/vnc_' +
|
||||||
|
browser_name + '/tags/')
|
||||||
|
res = res.json()
|
||||||
|
version_tag = []
|
||||||
|
if len(res['results']) > 0:
|
||||||
|
for result in res['results']:
|
||||||
|
if 'name' in result:
|
||||||
|
version_tag.append(result['name'])
|
||||||
|
vnc_image_available = False
|
||||||
|
image_name = 'vnc_' + browser_name + ':' + browser_version
|
||||||
|
|
||||||
|
for idx, tag in enumerate(version_tag):
|
||||||
|
if browser_version == tag:
|
||||||
|
command = 'docker pull selenoid/vnc_' + browser_name + ':' \
|
||||||
|
+ browser_version
|
||||||
|
print(' VNC image is available & downloading now... {0}'.format(
|
||||||
|
command))
|
||||||
|
try:
|
||||||
|
subprocess.call([command], shell=True, stdout=subprocess.PIPE)
|
||||||
|
vnc_image_available = True
|
||||||
|
except Exception:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
print(
|
||||||
|
'{0}} Image found but could not download.'.format(command))
|
||||||
|
sys.exit(1)
|
||||||
|
break
|
||||||
|
elif idx == len(version_tag):
|
||||||
|
print("{0} Image is not available.".format(image_name))
|
||||||
|
vnc_image_available = False
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
return vnc_image_available
|
||||||
|
|
||||||
|
|
||||||
|
def reload_selenoid_config():
|
||||||
|
"""
|
||||||
|
Function runs command to refresh selenoid configuration
|
||||||
|
:return: true if command execution for selenoid reload is successful
|
||||||
|
else false
|
||||||
|
"""
|
||||||
|
command = 'docker kill -s HUP selenoid'
|
||||||
|
reload_successful = False
|
||||||
|
try:
|
||||||
|
subprocess.call([command], shell=True, stdout=subprocess.PIPE)
|
||||||
|
print(" Selenoid Configuration is reloaded.")
|
||||||
|
reload_successful = True
|
||||||
|
except Exception:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
print('Error while reloading selenoid configuration.')
|
||||||
|
sys.exit(1)
|
||||||
|
return reload_successful
|
||||||
|
|
||||||
|
|
||||||
|
def edit_browsers_json(browser_name, browser_version):
|
||||||
|
"""
|
||||||
|
Function edits browsers.json which is used by selenoid to
|
||||||
|
load browser configuration.
|
||||||
|
Default path for this file is
|
||||||
|
"user_home_dir + '/.aerokube/selenoid/browsers.json'"
|
||||||
|
Currently this is hardcoded, might need to modify
|
||||||
|
if we want to pass customize browsers.json
|
||||||
|
:param browser_name:
|
||||||
|
:param browser_version:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
file_edited = True
|
||||||
|
# Read existing browsers.json
|
||||||
|
json_file = open(file_path, 'r')
|
||||||
|
existing_data = json.load(json_file)
|
||||||
|
updated_data = None
|
||||||
|
|
||||||
|
# Update data for new browser images
|
||||||
|
if browser_name.lower() == 'chrome':
|
||||||
|
version_data = existing_data['chrome']['versions']
|
||||||
|
if browser_version in version_data.keys():
|
||||||
|
print(" {0}:{1} is already updated in browsers.json.".format(
|
||||||
|
browser_name, browser_version))
|
||||||
|
file_edited = True
|
||||||
|
else:
|
||||||
|
data_to_insert = dict(
|
||||||
|
{browser_version: {
|
||||||
|
'image': 'selenoid/vnc_chrome:' + browser_version,
|
||||||
|
'port': '4444', 'path': '/'}})
|
||||||
|
(existing_data['chrome']['versions']).update(data_to_insert)
|
||||||
|
updated_data = existing_data
|
||||||
|
print(updated_data)
|
||||||
|
|
||||||
|
elif browser_name.lower() == 'firefox':
|
||||||
|
version_data = existing_data['firefox']['versions']
|
||||||
|
if browser_version in version_data.keys():
|
||||||
|
print(" {0}:{1} is already updated in browsers.json.".format(
|
||||||
|
browser_name, browser_version))
|
||||||
|
file_edited = True
|
||||||
|
else:
|
||||||
|
data_to_insert = dict(
|
||||||
|
{browser_version: {
|
||||||
|
'image': 'selenoid/vnc_firefox:' + browser_version,
|
||||||
|
'port': '4444', 'path': '/'}})
|
||||||
|
(existing_data['firefox']['versions']).update(data_to_insert)
|
||||||
|
updated_data = existing_data
|
||||||
|
else:
|
||||||
|
print("Browser version not matched")
|
||||||
|
file_edited = False
|
||||||
|
|
||||||
|
# Write updated data in browsers.json
|
||||||
|
if updated_data is not None:
|
||||||
|
json_file = open(file_path, 'w')
|
||||||
|
json.dump(updated_data, json_file)
|
||||||
|
print(" 'browsers.json' is updated for {0} {1}".format(
|
||||||
|
browser_name, browser_version))
|
||||||
|
|
||||||
|
file_edited = True
|
||||||
|
return file_edited
|
||||||
|
|
||||||
|
|
||||||
|
# Main Program starts here
|
||||||
|
# Read command line arguments & get list of browser_name, executable path.
|
||||||
|
args = vars(read_command_line())
|
||||||
|
|
||||||
|
# Get path path for browsers.json
|
||||||
|
user_home_dir = os.getenv("HOME")
|
||||||
|
file_path = user_home_dir + '/.aerokube/selenoid/browsers.json'
|
||||||
|
print("***** Updating '{0}' for new browser versions.*****".format(file_path))
|
||||||
|
|
||||||
|
# Iterate over arguments passed
|
||||||
|
for browser, executable_path in args.items():
|
||||||
|
if executable_path is not None:
|
||||||
|
# Get browser name
|
||||||
|
browser_name = browser
|
||||||
|
# Get browser version
|
||||||
|
browser_version = get_browser_version(browser, executable_path)
|
||||||
|
print(
|
||||||
|
" Browser version for {0} is {1} in current executable path ".
|
||||||
|
format(browser_name, browser_version))
|
||||||
|
|
||||||
|
# Download vnc browser image.
|
||||||
|
download_new_image = check_and_download_vnc_browser_image(
|
||||||
|
browser_name, browser_version)
|
||||||
|
|
||||||
|
# If browser vnc image is available, then edit browsers.json
|
||||||
|
if download_new_image:
|
||||||
|
if edit_browsers_json(browser_name, browser_version):
|
||||||
|
print(
|
||||||
|
" File 'browsers.json' is updated for {0} - {1} \n".format(
|
||||||
|
browser_name, browser_version))
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
" File 'browsers.json' can NOT be updated for {0} - {1} \n"
|
||||||
|
.format(browser_name, browser_version))
|
||||||
|
else:
|
||||||
|
print(" Browser image is not available for {0}, {1}".format(
|
||||||
|
browser_name, browser_version))
|
||||||
|
|
||||||
|
# Reload selenoid configuration
|
||||||
|
if reload_selenoid_config():
|
||||||
|
print(
|
||||||
|
"***** Updated '{0}' for new browser versions.*****".format(file_path))
|
@ -562,6 +562,10 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
# Override DEFAULT_SERVE value from environment variable.
|
||||||
|
if 'PGADMIN_CONFIG_DEFAULT_SERVER' in os.environ:
|
||||||
|
DEFAULT_SERVER = os.environ['PGADMIN_CONFIG_DEFAULT_SERVER']
|
||||||
|
|
||||||
# Disable USER_INACTIVITY_TIMEOUT when SERVER_MODE=False
|
# Disable USER_INACTIVITY_TIMEOUT when SERVER_MODE=False
|
||||||
if not SERVER_MODE:
|
if not SERVER_MODE:
|
||||||
USER_INACTIVITY_TIMEOUT = 0
|
USER_INACTIVITY_TIMEOUT = 0
|
||||||
|
@ -44,31 +44,30 @@ class IndexConstraintAddTestCase(BaseTestGenerator):
|
|||||||
dict(url='/browser/unique_constraint/obj/', data=unique_key_data))
|
dict(url='/browser/unique_constraint/obj/', data=unique_key_data))
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
def setUp(self):
|
||||||
def setUpClass(cls):
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
cls.db_name = parent_node_dict["database"][-1]["db_name"]
|
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
cls.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
cls.db_id = schema_info["db_id"]
|
self.db_id = schema_info["db_id"]
|
||||||
db_con = database_utils.connect_database(cls, utils.SERVER_GROUP,
|
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||||
cls.server_id, cls.db_id)
|
self.server_id, self.db_id)
|
||||||
if not db_con['data']["connected"]:
|
if not db_con['data']["connected"]:
|
||||||
raise Exception("Could not connect to database to add a "
|
raise Exception("Could not connect to database to add a "
|
||||||
"index constraint(primary key or unique key).")
|
"index constraint(primary key or unique key).")
|
||||||
cls.schema_id = schema_info["schema_id"]
|
self.schema_id = schema_info["schema_id"]
|
||||||
cls.schema_name = schema_info["schema_name"]
|
self.schema_name = schema_info["schema_name"]
|
||||||
schema_response = schema_utils.verify_schemas(cls.server,
|
schema_response = schema_utils.verify_schemas(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name)
|
self.schema_name)
|
||||||
if not schema_response:
|
if not schema_response:
|
||||||
raise Exception("Could not find the schema to add a index "
|
raise Exception("Could not find the schema to add a index "
|
||||||
"constraint(primary key or unique key).")
|
"constraint(primary key or unique key).")
|
||||||
cls.table_name = "table_indexconstraint_%s" % \
|
self.table_name = "table_indexconstraint_%s" % \
|
||||||
(str(uuid.uuid4())[1:8])
|
(str(uuid.uuid4())[1:8])
|
||||||
cls.table_id = tables_utils.create_table(cls.server,
|
self.table_id = tables_utils.create_table(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name,
|
self.schema_name,
|
||||||
cls.table_name)
|
self.table_name)
|
||||||
|
|
||||||
def runTest(self):
|
def runTest(self):
|
||||||
"""This function will add index constraint(primary key or unique key)
|
"""This function will add index constraint(primary key or unique key)
|
||||||
@ -81,10 +80,9 @@ class IndexConstraintAddTestCase(BaseTestGenerator):
|
|||||||
content_type='html/json')
|
content_type='html/json')
|
||||||
self.assertEquals(response.status_code, 200)
|
self.assertEquals(response.status_code, 200)
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(cls):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(cls, cls.server_id, cls.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
|
||||||
|
|
||||||
class ConstraintsUsingIndexAddTestCase(BaseTestGenerator):
|
class ConstraintsUsingIndexAddTestCase(BaseTestGenerator):
|
||||||
@ -117,30 +115,28 @@ class ConstraintsUsingIndexAddTestCase(BaseTestGenerator):
|
|||||||
dict(url='/browser/unique_constraint/obj/', data=unique_key_data))
|
dict(url='/browser/unique_constraint/obj/', data=unique_key_data))
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
def setUp(self):
|
||||||
def setUpClass(cls):
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
cls.db_name = parent_node_dict["database"][-1]["db_name"]
|
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
cls.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
cls.db_id = schema_info["db_id"]
|
self.db_id = schema_info["db_id"]
|
||||||
db_con = database_utils.connect_database(cls, utils.SERVER_GROUP,
|
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||||
cls.server_id, cls.db_id)
|
self.server_id, self.db_id)
|
||||||
if not db_con['data']["connected"]:
|
if not db_con['data']["connected"]:
|
||||||
raise Exception("Could not connect to database to add a "
|
raise Exception("Could not connect to database to add a "
|
||||||
"constraint using index.")
|
"constraint using index.")
|
||||||
cls.schema_id = schema_info["schema_id"]
|
self.schema_id = schema_info["schema_id"]
|
||||||
cls.schema_name = schema_info["schema_name"]
|
self.schema_name = schema_info["schema_name"]
|
||||||
schema_response = schema_utils.verify_schemas(cls.server,
|
schema_response = schema_utils.verify_schemas(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name)
|
self.schema_name)
|
||||||
if not schema_response:
|
if not schema_response:
|
||||||
raise Exception("Could not find the schema to add a index "
|
raise Exception("Could not find the schema to add a index "
|
||||||
"constraint(primary key or unique key).")
|
"constraint(primary key or unique key).")
|
||||||
cls.table_name = "table_constraint_%s" % (str(uuid.uuid4())[1:8])
|
self.table_name = "table_constraint_%s" % (str(uuid.uuid4())[1:8])
|
||||||
cls.table_id = tables_utils.create_table(cls.server,
|
self.table_id = tables_utils.create_table(self.server, self.db_name,
|
||||||
cls.db_name,
|
self.schema_name,
|
||||||
cls.schema_name,
|
self.table_name)
|
||||||
cls.table_name)
|
|
||||||
|
|
||||||
def runTest(self):
|
def runTest(self):
|
||||||
"""This function will add index constraint(primary key or unique key)
|
"""This function will add index constraint(primary key or unique key)
|
||||||
@ -158,7 +154,6 @@ class ConstraintsUsingIndexAddTestCase(BaseTestGenerator):
|
|||||||
content_type='html/json')
|
content_type='html/json')
|
||||||
self.assertEquals(response.status_code, 200)
|
self.assertEquals(response.status_code, 200)
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(cls):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(cls, cls.server_id, cls.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
@ -38,31 +38,30 @@ class IndexConstraintDeleteTestCase(BaseTestGenerator):
|
|||||||
type="UNIQUE"))
|
type="UNIQUE"))
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
def setUp(self):
|
||||||
def setUpClass(cls):
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
cls.db_name = parent_node_dict["database"][-1]["db_name"]
|
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
cls.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
cls.db_id = schema_info["db_id"]
|
self.db_id = schema_info["db_id"]
|
||||||
db_con = database_utils.connect_database(cls, utils.SERVER_GROUP,
|
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||||
cls.server_id, cls.db_id)
|
self.server_id, self.db_id)
|
||||||
if not db_con['data']["connected"]:
|
if not db_con['data']["connected"]:
|
||||||
raise Exception("Could not connect to database to add a "
|
raise Exception("Could not connect to database to add a "
|
||||||
"index constraint(primary key or unique key).")
|
"index constraint(primary key or unique key).")
|
||||||
cls.schema_id = schema_info["schema_id"]
|
self.schema_id = schema_info["schema_id"]
|
||||||
cls.schema_name = schema_info["schema_name"]
|
self.schema_name = schema_info["schema_name"]
|
||||||
schema_response = schema_utils.verify_schemas(cls.server,
|
schema_response = schema_utils.verify_schemas(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name)
|
self.schema_name)
|
||||||
if not schema_response:
|
if not schema_response:
|
||||||
raise Exception("Could not find the schema to add a index "
|
raise Exception("Could not find the schema to add a index "
|
||||||
"constraint(primary key or unique key).")
|
"constraint(primary key or unique key).")
|
||||||
cls.table_name = "table_indexconstraint_%s" % \
|
self.table_name = "table_indexconstraint_%s" % \
|
||||||
(str(uuid.uuid4())[1:8])
|
(str(uuid.uuid4())[1:8])
|
||||||
cls.table_id = tables_utils.create_table(cls.server,
|
self.table_id = tables_utils.create_table(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name,
|
self.schema_name,
|
||||||
cls.table_name)
|
self.table_name)
|
||||||
|
|
||||||
def runTest(self):
|
def runTest(self):
|
||||||
"""This function will delete index constraint(primary key or
|
"""This function will delete index constraint(primary key or
|
||||||
@ -81,7 +80,6 @@ class IndexConstraintDeleteTestCase(BaseTestGenerator):
|
|||||||
)
|
)
|
||||||
self.assertEquals(response.status_code, 200)
|
self.assertEquals(response.status_code, 200)
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(cls):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(cls, cls.server_id, cls.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
@ -38,31 +38,30 @@ class IndexConstraintGetTestCase(BaseTestGenerator):
|
|||||||
type="UNIQUE"))
|
type="UNIQUE"))
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
def setUp(self):
|
||||||
def setUpClass(cls):
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
cls.db_name = parent_node_dict["database"][-1]["db_name"]
|
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
cls.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
cls.db_id = schema_info["db_id"]
|
self.db_id = schema_info["db_id"]
|
||||||
db_con = database_utils.connect_database(cls, utils.SERVER_GROUP,
|
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||||
cls.server_id, cls.db_id)
|
self.server_id, self.db_id)
|
||||||
if not db_con['data']["connected"]:
|
if not db_con['data']["connected"]:
|
||||||
raise Exception("Could not connect to database to add a "
|
raise Exception("Could not connect to database to add a "
|
||||||
"index constraint(primary key or unique key).")
|
"index constraint(primary key or unique key).")
|
||||||
cls.schema_id = schema_info["schema_id"]
|
self.schema_id = schema_info["schema_id"]
|
||||||
cls.schema_name = schema_info["schema_name"]
|
self.schema_name = schema_info["schema_name"]
|
||||||
schema_response = schema_utils.verify_schemas(cls.server,
|
schema_response = schema_utils.verify_schemas(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name)
|
self.schema_name)
|
||||||
if not schema_response:
|
if not schema_response:
|
||||||
raise Exception("Could not find the schema to add a index "
|
raise Exception("Could not find the schema to add a index "
|
||||||
"constraint(primary key or unique key).")
|
"constraint(primary key or unique key).")
|
||||||
cls.table_name = "table_indexconstraint_%s" % \
|
self.table_name = "table_indexconstraint_%s" % \
|
||||||
(str(uuid.uuid4())[1:8])
|
(str(uuid.uuid4())[1:8])
|
||||||
cls.table_id = tables_utils.create_table(cls.server,
|
self.table_id = tables_utils.create_table(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name,
|
self.schema_name,
|
||||||
cls.table_name)
|
self.table_name)
|
||||||
|
|
||||||
def runTest(self):
|
def runTest(self):
|
||||||
"""This function will fetch the index constraint(primary key or
|
"""This function will fetch the index constraint(primary key or
|
||||||
@ -81,7 +80,6 @@ class IndexConstraintGetTestCase(BaseTestGenerator):
|
|||||||
)
|
)
|
||||||
self.assertEquals(response.status_code, 200)
|
self.assertEquals(response.status_code, 200)
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(cls):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(cls, cls.server_id, cls.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
@ -40,31 +40,30 @@ class IndexConstraintUpdateTestCase(BaseTestGenerator):
|
|||||||
type="UNIQUE", data=data))
|
type="UNIQUE", data=data))
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
def setUp(self):
|
||||||
def setUpClass(cls):
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
cls.db_name = parent_node_dict["database"][-1]["db_name"]
|
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
cls.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
cls.db_id = schema_info["db_id"]
|
self.db_id = schema_info["db_id"]
|
||||||
db_con = database_utils.connect_database(cls, utils.SERVER_GROUP,
|
db_con = database_utils.connect_database(self, utils.SERVER_GROUP,
|
||||||
cls.server_id, cls.db_id)
|
self.server_id, self.db_id)
|
||||||
if not db_con['data']["connected"]:
|
if not db_con['data']["connected"]:
|
||||||
raise Exception("Could not connect to database to add a "
|
raise Exception("Could not connect to database to add a "
|
||||||
"index constraint(primary key or unique key).")
|
"index constraint(primary key or unique key).")
|
||||||
cls.schema_id = schema_info["schema_id"]
|
self.schema_id = schema_info["schema_id"]
|
||||||
cls.schema_name = schema_info["schema_name"]
|
self.schema_name = schema_info["schema_name"]
|
||||||
schema_response = schema_utils.verify_schemas(cls.server,
|
schema_response = schema_utils.verify_schemas(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name)
|
self.schema_name)
|
||||||
if not schema_response:
|
if not schema_response:
|
||||||
raise Exception("Could not find the schema to add a index "
|
raise Exception("Could not find the schema to add a index "
|
||||||
"constraint(primary key or unique key).")
|
"constraint(primary key or unique key).")
|
||||||
cls.table_name = "table_indexconstraint_%s" % \
|
self.table_name = "table_indexconstraint_%s" % \
|
||||||
(str(uuid.uuid4())[1:8])
|
(str(uuid.uuid4())[1:8])
|
||||||
cls.table_id = tables_utils.create_table(cls.server,
|
self.table_id = tables_utils.create_table(self.server,
|
||||||
cls.db_name,
|
self.db_name,
|
||||||
cls.schema_name,
|
self.schema_name,
|
||||||
cls.table_name)
|
self.table_name)
|
||||||
|
|
||||||
def runTest(self):
|
def runTest(self):
|
||||||
"""This function will update index constraint(primary key or
|
"""This function will update index constraint(primary key or
|
||||||
@ -84,7 +83,6 @@ class IndexConstraintUpdateTestCase(BaseTestGenerator):
|
|||||||
follow_redirects=True)
|
follow_redirects=True)
|
||||||
self.assertEquals(response.status_code, 200)
|
self.assertEquals(response.status_code, 200)
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(cls):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(cls, cls.server_id, cls.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
@ -125,8 +125,9 @@ class TableUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
table_name = "test_table_parameters_%s" % (str(uuid.uuid4())[1:8])
|
||||||
def setUpClass(self):
|
|
||||||
|
def setUp(self):
|
||||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
self.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
@ -142,8 +143,10 @@ class TableUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
self.schema_name)
|
self.schema_name)
|
||||||
if not schema_response:
|
if not schema_response:
|
||||||
raise Exception("Could not find the schema to add a table.")
|
raise Exception("Could not find the schema to add a table.")
|
||||||
self.table_name = "test_table_parameters_%s" % (str(uuid.uuid4())[1:8])
|
|
||||||
|
|
||||||
|
self.table_id = tables_utils.get_table_id(self.server, self.db_name,
|
||||||
|
self.table_name)
|
||||||
|
if self.table_id is None:
|
||||||
self.table_id = tables_utils.create_table(
|
self.table_id = tables_utils.create_table(
|
||||||
self.server, self.db_name,
|
self.server, self.db_name,
|
||||||
self.schema_name,
|
self.schema_name,
|
||||||
@ -167,7 +170,6 @@ class TableUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
follow_redirects=True)
|
follow_redirects=True)
|
||||||
self.assertEquals(response.status_code, 200)
|
self.assertEquals(response.status_code, 200)
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(self):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
@ -483,3 +483,26 @@ def get_hash_partitions_data(data):
|
|||||||
}]
|
}]
|
||||||
data['partition_keys'] = \
|
data['partition_keys'] = \
|
||||||
[{'key_type': 'column', 'pt_column': 'empno'}]
|
[{'key_type': 'column', 'pt_column': 'empno'}]
|
||||||
|
|
||||||
|
|
||||||
|
def get_table_id(server, db_name, table_name):
|
||||||
|
try:
|
||||||
|
connection = utils.get_db_connection(db_name,
|
||||||
|
server['username'],
|
||||||
|
server['db_password'],
|
||||||
|
server['host'],
|
||||||
|
server['port'],
|
||||||
|
server['sslmode'])
|
||||||
|
pg_cursor = connection.cursor()
|
||||||
|
pg_cursor.execute("select oid from pg_class where relname='%s'" %
|
||||||
|
table_name)
|
||||||
|
table = pg_cursor.fetchone()
|
||||||
|
if table:
|
||||||
|
table_id = table[0]
|
||||||
|
else:
|
||||||
|
table_id = None
|
||||||
|
connection.close()
|
||||||
|
return table_id
|
||||||
|
except Exception:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
raise
|
||||||
|
@ -125,8 +125,9 @@ class MViewsUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
m_view_name = "test_mview_put_%s" % (str(uuid.uuid4())[1:8])
|
||||||
def setUpClass(self):
|
|
||||||
|
def setUp(self):
|
||||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
self.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
@ -150,11 +151,13 @@ class MViewsUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
if not schema_response:
|
if not schema_response:
|
||||||
raise Exception("Could not find the schema to update a mview.")
|
raise Exception("Could not find the schema to update a mview.")
|
||||||
|
|
||||||
self.m_view_name = "test_mview_put_%s" % (str(uuid.uuid4())[1:8])
|
self.m_view_id = views_utils.get_view_id(self.server, self.db_name,
|
||||||
m_view_sql = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE pg_default " \
|
self.m_view_name)
|
||||||
"AS SELECT 'test_pgadmin' WITH NO DATA;ALTER TABLE " \
|
|
||||||
"%s.%s OWNER TO %s"
|
|
||||||
|
|
||||||
|
if self.m_view_id is None:
|
||||||
|
m_view_sql = "CREATE MATERIALIZED VIEW %s.%s TABLESPACE " \
|
||||||
|
"pg_default AS SELECT 'test_pgadmin' WITH NO " \
|
||||||
|
"DATA;ALTER TABLE %s.%s OWNER TO %s"
|
||||||
self.m_view_id = views_utils.create_view(self.server,
|
self.m_view_id = views_utils.create_view(self.server,
|
||||||
self.db_name,
|
self.db_name,
|
||||||
self.schema_name,
|
self.schema_name,
|
||||||
@ -180,7 +183,6 @@ class MViewsUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
follow_redirects=True)
|
follow_redirects=True)
|
||||||
self.assertEquals(response.status_code, 200)
|
self.assertEquals(response.status_code, 200)
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(self):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
@ -48,8 +48,7 @@ class MViewsUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
@classmethod
|
def setUp(self):
|
||||||
def setUpClass(self):
|
|
||||||
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
self.db_name = parent_node_dict["database"][-1]["db_name"]
|
||||||
schema_info = parent_node_dict["schema"][-1]
|
schema_info = parent_node_dict["schema"][-1]
|
||||||
self.server_id = schema_info["server_id"]
|
self.server_id = schema_info["server_id"]
|
||||||
@ -143,7 +142,6 @@ class MViewsUpdateParameterTestCase(BaseTestGenerator):
|
|||||||
# On success we get job_id from server
|
# On success we get job_id from server
|
||||||
self.assertTrue('job_id' in response.json['data'])
|
self.assertTrue('job_id' in response.json['data'])
|
||||||
|
|
||||||
@classmethod
|
def tearDown(self):
|
||||||
def tearDownClass(self):
|
|
||||||
# Disconnect the database
|
# Disconnect the database
|
||||||
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
database_utils.disconnect_database(self, self.server_id, self.db_id)
|
||||||
|
@ -86,3 +86,28 @@ def verify_view(server, db_name, view_name):
|
|||||||
except Exception:
|
except Exception:
|
||||||
traceback.print_exc(file=sys.stderr)
|
traceback.print_exc(file=sys.stderr)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def get_view_id(server, db_name, view_name):
|
||||||
|
try:
|
||||||
|
connection = utils.get_db_connection(db_name,
|
||||||
|
server['username'],
|
||||||
|
server['db_password'],
|
||||||
|
server['host'],
|
||||||
|
server['port'],
|
||||||
|
server['sslmode'])
|
||||||
|
old_isolation_level = connection.isolation_level
|
||||||
|
connection.set_isolation_level(0)
|
||||||
|
pg_cursor = connection.cursor()
|
||||||
|
# Get 'oid' from newly created view
|
||||||
|
pg_cursor.execute("select oid from pg_class where relname='%s'" %
|
||||||
|
view_name)
|
||||||
|
view = pg_cursor.fetchone()
|
||||||
|
view_id = None
|
||||||
|
if view:
|
||||||
|
view_id = view[0]
|
||||||
|
connection.close()
|
||||||
|
return view_id
|
||||||
|
except Exception:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
raise
|
||||||
|
@ -8,7 +8,6 @@
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import pyperclip
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from selenium.webdriver import ActionChains
|
from selenium.webdriver import ActionChains
|
||||||
@ -60,8 +59,18 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
self._mouseup_outside_grid_still_makes_a_selection()
|
self._mouseup_outside_grid_still_makes_a_selection()
|
||||||
self._copies_rows_with_header()
|
self._copies_rows_with_header()
|
||||||
|
|
||||||
|
def paste_values_to_scratch_pad(self):
|
||||||
|
self.page.driver.switch_to.default_content()
|
||||||
|
self.page.driver.switch_to_frame(
|
||||||
|
self.page.driver.find_element_by_tag_name("iframe"))
|
||||||
|
scratch_pad_ele = self.page.find_by_css_selector(
|
||||||
|
QueryToolLocators.scratch_pad_css)
|
||||||
|
self.page.paste_values(scratch_pad_ele)
|
||||||
|
clipboard_text = scratch_pad_ele.get_attribute("value")
|
||||||
|
scratch_pad_ele.clear()
|
||||||
|
return clipboard_text
|
||||||
|
|
||||||
def _copies_rows(self):
|
def _copies_rows(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
first_row = self.page.find_by_xpath(
|
first_row = self.page.find_by_xpath(
|
||||||
QueryToolLocators.output_row_xpath.format(1))
|
QueryToolLocators.output_row_xpath.format(1))
|
||||||
first_row.click()
|
first_row.click()
|
||||||
@ -70,14 +79,14 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
QueryToolLocators.copy_button_css)
|
QueryToolLocators.copy_button_css)
|
||||||
copy_button.click()
|
copy_button.click()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
self.assertEqual('"Some-Name"\t6\t"some info"',
|
self.assertEqual('"Some-Name"\t6\t"some info"',
|
||||||
pyperclip.paste())
|
clipboard_text)
|
||||||
|
|
||||||
def _copies_rows_with_header(self):
|
def _copies_rows_with_header(self):
|
||||||
self.page.find_by_css_selector('#btn-copy-row-dropdown').click()
|
self.page.find_by_css_selector('#btn-copy-row-dropdown').click()
|
||||||
self.page.find_by_css_selector('a#btn-copy-with-header').click()
|
self.page.find_by_css_selector('a#btn-copy-with-header').click()
|
||||||
|
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
select_all = self.page.find_by_xpath(
|
select_all = self.page.find_by_xpath(
|
||||||
QueryToolLocators.select_all_column)
|
QueryToolLocators.select_all_column)
|
||||||
select_all.click()
|
select_all.click()
|
||||||
@ -86,13 +95,14 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
QueryToolLocators.copy_button_css)
|
QueryToolLocators.copy_button_css)
|
||||||
copy_button.click()
|
copy_button.click()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
self.assertEqual("""\"some_column"\t"value"\t"details"
|
self.assertEqual("""\"some_column"\t"value"\t"details"
|
||||||
\"Some-Name"\t6\t"some info"
|
\"Some-Name"\t6\t"some info"
|
||||||
\"Some-Other-Name"\t22\t"some other info"
|
\"Some-Other-Name"\t22\t"some other info"
|
||||||
\"Yet-Another-Name"\t14\t"cool info\"""", pyperclip.paste())
|
\"Yet-Another-Name"\t14\t"cool info\"""", clipboard_text)
|
||||||
|
|
||||||
def _copies_columns(self):
|
def _copies_columns(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
column = self.page.find_by_css_selector(
|
column = self.page.find_by_css_selector(
|
||||||
QueryToolLocators.output_column_header_css.format('some_column'))
|
QueryToolLocators.output_column_header_css.format('some_column'))
|
||||||
column.click()
|
column.click()
|
||||||
@ -101,14 +111,15 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
QueryToolLocators.copy_button_css)
|
QueryToolLocators.copy_button_css)
|
||||||
copy_button.click()
|
copy_button.click()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"""\"Some-Name"
|
"""\"Some-Name"
|
||||||
"Some-Other-Name"
|
"Some-Other-Name"
|
||||||
"Yet-Another-Name\"""",
|
"Yet-Another-Name\"""",
|
||||||
pyperclip.paste())
|
clipboard_text)
|
||||||
|
|
||||||
def _copies_row_using_keyboard_shortcut(self):
|
def _copies_row_using_keyboard_shortcut(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
first_row = self.page.find_by_xpath(
|
first_row = self.page.find_by_xpath(
|
||||||
QueryToolLocators.output_row_xpath.format(1))
|
QueryToolLocators.output_row_xpath.format(1))
|
||||||
first_row.click()
|
first_row.click()
|
||||||
@ -116,11 +127,12 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
ActionChains(self.page.driver).key_down(
|
ActionChains(self.page.driver).key_down(
|
||||||
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
self.assertEqual('"Some-Name"\t6\t"some info"',
|
self.assertEqual('"Some-Name"\t6\t"some info"',
|
||||||
pyperclip.paste())
|
clipboard_text)
|
||||||
|
|
||||||
def _copies_column_using_keyboard_shortcut(self):
|
def _copies_column_using_keyboard_shortcut(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
column = self.page.find_by_css_selector(
|
column = self.page.find_by_css_selector(
|
||||||
QueryToolLocators.output_column_header_css.format('some_column'))
|
QueryToolLocators.output_column_header_css.format('some_column'))
|
||||||
column.click()
|
column.click()
|
||||||
@ -128,15 +140,15 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
ActionChains(self.page.driver).key_down(
|
ActionChains(self.page.driver).key_down(
|
||||||
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
"""\"Some-Name"
|
"""\"Some-Name"
|
||||||
"Some-Other-Name"
|
"Some-Other-Name"
|
||||||
"Yet-Another-Name\"""",
|
"Yet-Another-Name\"""",
|
||||||
pyperclip.paste())
|
clipboard_text)
|
||||||
|
|
||||||
def _copies_rectangular_selection(self):
|
def _copies_rectangular_selection(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
|
|
||||||
top_left_cell = \
|
top_left_cell = \
|
||||||
self.page.find_by_xpath(
|
self.page.find_by_xpath(
|
||||||
QueryToolLocators.output_column_data_xpath.
|
QueryToolLocators.output_column_data_xpath.
|
||||||
@ -154,12 +166,12 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
self.page.driver
|
self.page.driver
|
||||||
).key_down(Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
).key_down(Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'"Some-Other-Name"\t22\n"Yet-Another-Name"\t14', pyperclip.paste())
|
'"Some-Other-Name"\t22\n"Yet-Another-Name"\t14', clipboard_text)
|
||||||
|
|
||||||
def _shift_resizes_rectangular_selection(self):
|
def _shift_resizes_rectangular_selection(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
|
|
||||||
top_left_cell = self.page.find_by_xpath(
|
top_left_cell = self.page.find_by_xpath(
|
||||||
QueryToolLocators.output_column_data_xpath.
|
QueryToolLocators.output_column_data_xpath.
|
||||||
format('Some-Other-Name')
|
format('Some-Other-Name')
|
||||||
@ -180,12 +192,12 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
Keys.CONTROL
|
Keys.CONTROL
|
||||||
).send_keys('c').key_up(Keys.CONTROL).perform()
|
).send_keys('c').key_up(Keys.CONTROL).perform()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
self.assertEqual("""\"Some-Other-Name"\t22\t"some other info"
|
self.assertEqual("""\"Some-Other-Name"\t22\t"some other info"
|
||||||
"Yet-Another-Name"\t14\t"cool info\"""", pyperclip.paste())
|
"Yet-Another-Name"\t14\t"cool info\"""", clipboard_text)
|
||||||
|
|
||||||
def _shift_resizes_column_selection(self):
|
def _shift_resizes_column_selection(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
|
|
||||||
column = self.page.find_by_css_selector(
|
column = self.page.find_by_css_selector(
|
||||||
QueryToolLocators.output_column_header_css.format('value')
|
QueryToolLocators.output_column_header_css.format('value')
|
||||||
)
|
)
|
||||||
@ -197,13 +209,13 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
ActionChains(self.page.driver).key_down(
|
ActionChains(self.page.driver).key_down(
|
||||||
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
||||||
|
|
||||||
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
'"Some-Name"\t6\n"Some-Other-Name"\t22\n"Yet-Another-Name"\t14',
|
'"Some-Name"\t6\n"Some-Other-Name"\t22\n"Yet-Another-Name"\t14',
|
||||||
pyperclip.paste())
|
clipboard_text)
|
||||||
|
|
||||||
def _mouseup_outside_grid_still_makes_a_selection(self):
|
def _mouseup_outside_grid_still_makes_a_selection(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
|
|
||||||
bottom_right_cell = self.page.find_by_xpath(
|
bottom_right_cell = self.page.find_by_xpath(
|
||||||
QueryToolLocators.output_column_data_xpath.format('cool info')
|
QueryToolLocators.output_column_data_xpath.format('cool info')
|
||||||
)
|
)
|
||||||
@ -218,7 +230,9 @@ class CopySelectedQueryResultsFeatureTest(BaseFeatureTest):
|
|||||||
ActionChains(self.page.driver).key_down(
|
ActionChains(self.page.driver).key_down(
|
||||||
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
Keys.CONTROL).send_keys('c').key_up(Keys.CONTROL).perform()
|
||||||
|
|
||||||
self.assertIn('"cool info"', pyperclip.paste())
|
clipboard_text = self.paste_values_to_scratch_pad()
|
||||||
|
|
||||||
|
self.assertIn('"cool info"', clipboard_text)
|
||||||
|
|
||||||
def after(self):
|
def after(self):
|
||||||
self.page.close_query_tool()
|
self.page.close_query_tool()
|
||||||
|
@ -38,7 +38,8 @@ class CheckFileManagerFeatureTest(BaseFeatureTest):
|
|||||||
|
|
||||||
self.page.add_server(self.server)
|
self.page.add_server(self.server)
|
||||||
self.wait = WebDriverWait(self.page.driver, 10)
|
self.wait = WebDriverWait(self.page.driver, 10)
|
||||||
self.XSS_FILE = '/tmp/<img src=x onmouseover=alert("1")>.sql'
|
self.XSS_FILE = '/tmp/<img src=x ' + self.server['name'][:13] \
|
||||||
|
+ '=alert("1")>.sql'
|
||||||
# Remove any previous file
|
# Remove any previous file
|
||||||
if os.path.isfile(self.XSS_FILE):
|
if os.path.isfile(self.XSS_FILE):
|
||||||
os.remove(self.XSS_FILE)
|
os.remove(self.XSS_FILE)
|
||||||
@ -112,8 +113,8 @@ class CheckFileManagerFeatureTest(BaseFeatureTest):
|
|||||||
self.page.wait_for_query_tool_loading_indicator_to_disappear()
|
self.page.wait_for_query_tool_loading_indicator_to_disappear()
|
||||||
self._check_escaped_characters(
|
self._check_escaped_characters(
|
||||||
contents,
|
contents,
|
||||||
'<img src=x onmouseover=alert("1")>.sql',
|
'<img src=x ' + self.server['name'][:13] +
|
||||||
'File manager'
|
'=alert("1")>.sql', 'File manager'
|
||||||
)
|
)
|
||||||
|
|
||||||
def _check_escaped_characters(self, source_code, string_to_find, source):
|
def _check_escaped_characters(self, source_code, string_to_find, source):
|
||||||
|
@ -94,24 +94,33 @@ class KeyboardShortcutFeatureTest(BaseFeatureTest):
|
|||||||
NavMenuLocators.preference_menu_item_css)
|
NavMenuLocators.preference_menu_item_css)
|
||||||
pref_menu_item.click()
|
pref_menu_item.click()
|
||||||
|
|
||||||
# Wait till the preference dialogue box is displayed by checking the
|
|
||||||
# visibility of Show System Object label
|
|
||||||
self.wait.until(EC.presence_of_element_located(
|
|
||||||
(By.XPATH, NavMenuLocators.show_system_objects_pref_label_xpath))
|
|
||||||
)
|
|
||||||
|
|
||||||
maximize_button = self.page.find_by_css_selector(
|
|
||||||
NavMenuLocators.maximize_pref_dialogue_css)
|
|
||||||
maximize_button.click()
|
|
||||||
|
|
||||||
browser_node = self.page.find_by_xpath(
|
browser_node = self.page.find_by_xpath(
|
||||||
NavMenuLocators.specified_preference_tree_node.format('Browser'))
|
NavMenuLocators.specified_preference_tree_node.format('Browser'))
|
||||||
if self.page.find_by_xpath(
|
if self.page.find_by_xpath(
|
||||||
NavMenuLocators.specified_pref_node_exp_status.
|
NavMenuLocators.specified_pref_node_exp_status.
|
||||||
format('Browser')).get_attribute('aria-expanded') == 'false':
|
format('Browser')).get_attribute('aria-expanded') == 'false':
|
||||||
|
|
||||||
ActionChains(self.driver).double_click(browser_node).perform()
|
ActionChains(self.driver).double_click(browser_node).perform()
|
||||||
|
|
||||||
|
display_node = self.page.find_by_xpath(
|
||||||
|
NavMenuLocators.specified_sub_node_of_pref_tree_node.format(
|
||||||
|
'Browser', 'Display'))
|
||||||
|
attempt = 5
|
||||||
|
while attempt > 0:
|
||||||
|
display_node.click()
|
||||||
|
# After clicking the element gets loaded in to the dom but still
|
||||||
|
# not visible, hence sleeping for a sec.
|
||||||
|
time.sleep(1)
|
||||||
|
if self.page.wait_for_element_to_be_visible(
|
||||||
|
self.driver,
|
||||||
|
NavMenuLocators.show_system_objects_pref_label_xpath, 3):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
attempt -= 1
|
||||||
|
|
||||||
|
maximize_button = self.page.find_by_css_selector(
|
||||||
|
NavMenuLocators.maximize_pref_dialogue_css)
|
||||||
|
maximize_button.click()
|
||||||
|
|
||||||
keyboard_node = self.page.find_by_xpath(
|
keyboard_node = self.page.find_by_xpath(
|
||||||
NavMenuLocators.specified_sub_node_of_pref_tree_node.format(
|
NavMenuLocators.specified_sub_node_of_pref_tree_node.format(
|
||||||
'Browser', 'Keyboard shortcuts'))
|
'Browser', 'Keyboard shortcuts'))
|
||||||
|
@ -92,6 +92,18 @@ class PGDataypeFeatureTest(BaseFeatureTest):
|
|||||||
|
|
||||||
wait = WebDriverWait(self.page.driver, 10)
|
wait = WebDriverWait(self.page.driver, 10)
|
||||||
|
|
||||||
|
browser_node = self.page.find_by_xpath(
|
||||||
|
NavMenuLocators.specified_preference_tree_node.format('Browser'))
|
||||||
|
if self.page.find_by_xpath(
|
||||||
|
NavMenuLocators.specified_pref_node_exp_status.
|
||||||
|
format('Browser')).get_attribute('aria-expanded') == 'false':
|
||||||
|
ActionChains(self.driver).double_click(browser_node).perform()
|
||||||
|
|
||||||
|
self.page.retry_click(
|
||||||
|
(By.XPATH, NavMenuLocators.specified_sub_node_of_pref_tree_node.
|
||||||
|
format('Browser', 'Display')),
|
||||||
|
(By.XPATH, NavMenuLocators.show_system_objects_pref_label_xpath))
|
||||||
|
|
||||||
# Wait till the preference dialogue box is displayed by checking the
|
# Wait till the preference dialogue box is displayed by checking the
|
||||||
# visibility of Show System Object label
|
# visibility of Show System Object label
|
||||||
wait.until(EC.presence_of_element_located(
|
wait.until(EC.presence_of_element_located(
|
||||||
|
@ -18,6 +18,7 @@ from regression.python_test_utils import test_utils
|
|||||||
from regression.python_test_utils import test_gui_helper
|
from regression.python_test_utils import test_gui_helper
|
||||||
from regression.feature_utils.locators import NavMenuLocators
|
from regression.feature_utils.locators import NavMenuLocators
|
||||||
from regression.feature_utils.tree_area_locators import TreeAreaLocators
|
from regression.feature_utils.tree_area_locators import TreeAreaLocators
|
||||||
|
from selenium.webdriver import ActionChains
|
||||||
|
|
||||||
|
|
||||||
class PGUtilitiesBackupFeatureTest(BaseFeatureTest):
|
class PGUtilitiesBackupFeatureTest(BaseFeatureTest):
|
||||||
@ -56,6 +57,7 @@ class PGUtilitiesBackupFeatureTest(BaseFeatureTest):
|
|||||||
self.server['sslmode']
|
self.server['sslmode']
|
||||||
)
|
)
|
||||||
test_utils.drop_database(connection, self.database_name)
|
test_utils.drop_database(connection, self.database_name)
|
||||||
|
self._update_preferences()
|
||||||
db_id = test_utils.create_database(self.server, self.database_name)
|
db_id = test_utils.create_database(self.server, self.database_name)
|
||||||
if not db_id:
|
if not db_id:
|
||||||
self.assertTrue(False, "Database {} is not "
|
self.assertTrue(False, "Database {} is not "
|
||||||
@ -242,3 +244,74 @@ class PGUtilitiesBackupFeatureTest(BaseFeatureTest):
|
|||||||
# For XSS we need to search against element's html code
|
# For XSS we need to search against element's html code
|
||||||
assert source_code.find(string_to_find) != - \
|
assert source_code.find(string_to_find) != - \
|
||||||
1, "{0} might be vulnerable to XSS ".format(source)
|
1, "{0} might be vulnerable to XSS ".format(source)
|
||||||
|
|
||||||
|
def _update_preferences(self):
|
||||||
|
"""
|
||||||
|
Function updates preferences for binary path.
|
||||||
|
"""
|
||||||
|
file_menu = self.page.find_by_css_selector(
|
||||||
|
NavMenuLocators.file_menu_css)
|
||||||
|
file_menu.click()
|
||||||
|
|
||||||
|
pref_menu_item = self.page.find_by_css_selector(
|
||||||
|
NavMenuLocators.preference_menu_item_css)
|
||||||
|
pref_menu_item.click()
|
||||||
|
|
||||||
|
wait = WebDriverWait(self.page.driver, 10)
|
||||||
|
|
||||||
|
# Wait till the preference dialogue box is displayed by checking the
|
||||||
|
# visibility of Show System Object label
|
||||||
|
wait.until(EC.presence_of_element_located(
|
||||||
|
(By.XPATH, NavMenuLocators.show_system_objects_pref_label_xpath))
|
||||||
|
)
|
||||||
|
|
||||||
|
maximize_button = self.page.find_by_css_selector(
|
||||||
|
NavMenuLocators.maximize_pref_dialogue_css)
|
||||||
|
maximize_button.click()
|
||||||
|
|
||||||
|
path = self.page.find_by_xpath(
|
||||||
|
NavMenuLocators.specified_preference_tree_node.format('Paths'))
|
||||||
|
if self.page.find_by_xpath(
|
||||||
|
NavMenuLocators.specified_pref_node_exp_status.format('Paths')). \
|
||||||
|
get_attribute('aria-expanded') == 'false':
|
||||||
|
ActionChains(self.driver).double_click(path).perform()
|
||||||
|
|
||||||
|
binary_path = self.page.find_by_xpath(
|
||||||
|
NavMenuLocators.specified_sub_node_of_pref_tree_node.format(
|
||||||
|
'Paths', 'Binary paths'))
|
||||||
|
binary_path.click()
|
||||||
|
|
||||||
|
default_binary_path = self.server['default_binary_paths']
|
||||||
|
if default_binary_path is not None:
|
||||||
|
server_types = default_binary_path.keys()
|
||||||
|
for serv in server_types:
|
||||||
|
if serv == 'pg':
|
||||||
|
path_input = self.page.find_by_xpath(
|
||||||
|
"//label[text()='PostgreSQL Binary "
|
||||||
|
"Path']/following-sibling::div//input")
|
||||||
|
path_input.clear()
|
||||||
|
path_input.click()
|
||||||
|
path_input.send_keys(default_binary_path['pg'])
|
||||||
|
elif serv == 'gpdb':
|
||||||
|
path_input = self.page.find_by_xpath(
|
||||||
|
"//label[text()='Greenplum Database Binary "
|
||||||
|
"Path']/following-sibling::div//input")
|
||||||
|
path_input.clear()
|
||||||
|
path_input.click()
|
||||||
|
path_input.send_keys(default_binary_path['gpdb'])
|
||||||
|
elif serv == 'ppas':
|
||||||
|
path_input = self.page.find_by_xpath(
|
||||||
|
"//label[text()='EDB Advanced Server Binary "
|
||||||
|
"Path']/following-sibling::div//input")
|
||||||
|
path_input.clear()
|
||||||
|
path_input.click()
|
||||||
|
path_input.send_keys(default_binary_path['ppas'])
|
||||||
|
else:
|
||||||
|
print('Binary path Key is Incorrect')
|
||||||
|
|
||||||
|
# save and close the preference dialog.
|
||||||
|
self.page.click_modal('Save')
|
||||||
|
|
||||||
|
self.page.wait_for_element_to_disappear(
|
||||||
|
lambda driver: driver.find_element_by_css_selector(".ajs-modal")
|
||||||
|
)
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import sys
|
import sys
|
||||||
import pyperclip
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from selenium.webdriver import ActionChains
|
from selenium.webdriver import ActionChains
|
||||||
@ -90,7 +89,6 @@ class QueryToolJourneyTest(BaseFeatureTest):
|
|||||||
print(" OK.", file=sys.stderr)
|
print(" OK.", file=sys.stderr)
|
||||||
|
|
||||||
def _test_copies_rows(self):
|
def _test_copies_rows(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
self.page.driver.switch_to.default_content()
|
self.page.driver.switch_to.default_content()
|
||||||
self.page.driver.switch_to_frame(
|
self.page.driver.switch_to_frame(
|
||||||
self.page.driver.find_element_by_tag_name("iframe"))
|
self.page.driver.find_element_by_tag_name("iframe"))
|
||||||
@ -103,12 +101,21 @@ class QueryToolJourneyTest(BaseFeatureTest):
|
|||||||
QueryToolLocators.copy_button_css)
|
QueryToolLocators.copy_button_css)
|
||||||
copy_row.click()
|
copy_row.click()
|
||||||
|
|
||||||
|
self.page.driver.switch_to.default_content()
|
||||||
|
self.page.driver.switch_to_frame(
|
||||||
|
self.page.driver.find_element_by_tag_name("iframe"))
|
||||||
|
|
||||||
|
scratch_pad_ele = self.page.find_by_css_selector(
|
||||||
|
QueryToolLocators.scratch_pad_css)
|
||||||
|
self.page.paste_values(scratch_pad_ele)
|
||||||
|
clipboard_text = scratch_pad_ele.get_attribute("value")
|
||||||
|
|
||||||
self.assertEqual('"Some-Name"\t6\t"some info"',
|
self.assertEqual('"Some-Name"\t6\t"some info"',
|
||||||
pyperclip.paste())
|
clipboard_text)
|
||||||
|
|
||||||
|
scratch_pad_ele.clear()
|
||||||
|
|
||||||
def _test_copies_columns(self):
|
def _test_copies_columns(self):
|
||||||
pyperclip.copy("old clipboard contents")
|
|
||||||
|
|
||||||
self.page.driver.switch_to.default_content()
|
self.page.driver.switch_to.default_content()
|
||||||
self.page.driver.switch_to_frame(
|
self.page.driver.switch_to_frame(
|
||||||
self.page.driver.find_element_by_tag_name("iframe"))
|
self.page.driver.find_element_by_tag_name("iframe"))
|
||||||
@ -121,9 +128,20 @@ class QueryToolJourneyTest(BaseFeatureTest):
|
|||||||
QueryToolLocators.copy_button_css)
|
QueryToolLocators.copy_button_css)
|
||||||
copy_btn.click()
|
copy_btn.click()
|
||||||
|
|
||||||
self.assertTrue('"Some-Name"' in pyperclip.paste())
|
self.page.driver.switch_to.default_content()
|
||||||
self.assertTrue('"Some-Other-Name"' in pyperclip.paste())
|
self.page.driver.switch_to_frame(
|
||||||
self.assertTrue('"Yet-Another-Name"' in pyperclip.paste())
|
self.page.driver.find_element_by_tag_name("iframe"))
|
||||||
|
|
||||||
|
scratch_pad_ele = self.page.find_by_css_selector(
|
||||||
|
QueryToolLocators.scratch_pad_css)
|
||||||
|
self.page.paste_values(scratch_pad_ele)
|
||||||
|
|
||||||
|
clipboard_text = scratch_pad_ele.get_attribute("value")
|
||||||
|
|
||||||
|
self.assertTrue('"Some-Name"' in clipboard_text)
|
||||||
|
self.assertTrue('"Some-Other-Name"' in clipboard_text)
|
||||||
|
self.assertTrue('"Yet-Another-Name"' in clipboard_text)
|
||||||
|
scratch_pad_ele.clear()
|
||||||
|
|
||||||
def _test_history_tab(self):
|
def _test_history_tab(self):
|
||||||
self.page.clear_query_tool()
|
self.page.clear_query_tool()
|
||||||
|
@ -131,13 +131,15 @@ CREATE TABLE public.nonintpkey
|
|||||||
self.test_db, 'public')
|
self.test_db, 'public')
|
||||||
|
|
||||||
self._load_config_data('table_insert_update_cases')
|
self._load_config_data('table_insert_update_cases')
|
||||||
|
data_local = config_data
|
||||||
# iterate on both tables
|
# iterate on both tables
|
||||||
for cnt in (1, 2):
|
for cnt in (1, 2):
|
||||||
self._perform_test_for_table('defaults_{0}'.format(str(cnt)))
|
self._perform_test_for_table('defaults_{0}'.format(str(cnt)),
|
||||||
|
data_local)
|
||||||
# test nonint pkey table
|
# test nonint pkey table
|
||||||
self._load_config_data('table_insert_update_nonint')
|
self._load_config_data('table_insert_update_nonint')
|
||||||
self._perform_test_for_table('nonintpkey')
|
data_local = config_data
|
||||||
|
self._perform_test_for_table('nonintpkey', data_local)
|
||||||
|
|
||||||
def after(self):
|
def after(self):
|
||||||
self.page.remove_server(self.server)
|
self.page.remove_server(self.server)
|
||||||
@ -167,7 +169,7 @@ CREATE TABLE public.nonintpkey
|
|||||||
global config_data
|
global config_data
|
||||||
config_data = config_data_json[config_key]
|
config_data = config_data_json[config_key]
|
||||||
|
|
||||||
def _perform_test_for_table(self, table_name):
|
def _perform_test_for_table(self, table_name, config_data_local):
|
||||||
self.page.click_a_tree_node(
|
self.page.click_a_tree_node(
|
||||||
table_name,
|
table_name,
|
||||||
TreeAreaLocators.sub_nodes_of_tables_node)
|
TreeAreaLocators.sub_nodes_of_tables_node)
|
||||||
@ -176,20 +178,21 @@ CREATE TABLE public.nonintpkey
|
|||||||
|
|
||||||
self.page.wait_for_query_tool_loading_indicator_to_disappear()
|
self.page.wait_for_query_tool_loading_indicator_to_disappear()
|
||||||
# Run test to insert a new row in table with default values
|
# Run test to insert a new row in table with default values
|
||||||
self._add_row()
|
self._add_row(config_data_local)
|
||||||
self._verify_row_data(row_height=0,
|
self._verify_row_data(row_height=0,
|
||||||
config_check_data=config_data['add'])
|
config_check_data=config_data_local['add'])
|
||||||
|
|
||||||
# Run test to copy/paste a row
|
# Run test to copy/paste a row
|
||||||
self._copy_paste_row()
|
self._copy_paste_row(config_data_local)
|
||||||
|
|
||||||
self._update_row()
|
self._update_row(config_data_local)
|
||||||
self.page.click_tab("Messages")
|
self.page.click_tab("Messages")
|
||||||
self._verify_messsages("")
|
self._verify_messsages("")
|
||||||
self.page.click_tab("Data Output")
|
self.page.click_tab("Data Output")
|
||||||
updated_row_data = {
|
updated_row_data = {
|
||||||
i: config_data['update'][i] if i in config_data['update'] else val
|
i: config_data_local['update'][i] if i in config_data_local[
|
||||||
for i, val in config_data['add'].items()
|
'update'] else val
|
||||||
|
for i, val in config_data_local['add'].items()
|
||||||
}
|
}
|
||||||
self._verify_row_data(row_height=0,
|
self._verify_row_data(row_height=0,
|
||||||
config_check_data=updated_row_data)
|
config_check_data=updated_row_data)
|
||||||
@ -221,7 +224,6 @@ CREATE TABLE public.nonintpkey
|
|||||||
Returns: None
|
Returns: None
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
self.wait.until(EC.visibility_of_element_located(
|
self.wait.until(EC.visibility_of_element_located(
|
||||||
(By.XPATH, xpath)), CheckForViewDataTest.TIMEOUT_STRING
|
(By.XPATH, xpath)), CheckForViewDataTest.TIMEOUT_STRING
|
||||||
)
|
)
|
||||||
@ -290,7 +292,7 @@ CREATE TABLE public.nonintpkey
|
|||||||
self.page.driver.find_element_by_tag_name('iframe')
|
self.page.driver.find_element_by_tag_name('iframe')
|
||||||
)
|
)
|
||||||
|
|
||||||
def _copy_paste_row(self):
|
def _copy_paste_row(self, config_data_l):
|
||||||
row0_cell0_xpath = CheckForViewDataTest._get_cell_xpath("r0", 1)
|
row0_cell0_xpath = CheckForViewDataTest._get_cell_xpath("r0", 1)
|
||||||
|
|
||||||
self.page.find_by_xpath(row0_cell0_xpath).click()
|
self.page.find_by_xpath(row0_cell0_xpath).click()
|
||||||
@ -300,12 +302,12 @@ CREATE TABLE public.nonintpkey
|
|||||||
QueryToolLocators.paste_button_css).click()
|
QueryToolLocators.paste_button_css).click()
|
||||||
|
|
||||||
# Update primary key of copied cell
|
# Update primary key of copied cell
|
||||||
self._add_update_save_row(config_data['copy'], row=2)
|
self._add_update_save_row(config_data_l['copy'], row=2)
|
||||||
|
|
||||||
# Verify row 1 and row 2 data
|
# Verify row 1 and row 2 data
|
||||||
updated_row_data = {
|
updated_row_data = {
|
||||||
i: config_data['copy'][i] if i in config_data['copy'] else val
|
i: config_data_l['copy'][i] if i in config_data_l['copy'] else val
|
||||||
for i, val in config_data['add'].items()
|
for i, val in config_data_l['add'].items()
|
||||||
}
|
}
|
||||||
self._verify_row_data(row_height=25,
|
self._verify_row_data(row_height=25,
|
||||||
config_check_data=updated_row_data)
|
config_check_data=updated_row_data)
|
||||||
@ -329,11 +331,11 @@ CREATE TABLE public.nonintpkey
|
|||||||
# save ajax is completed.
|
# save ajax is completed.
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
def _add_row(self):
|
def _add_row(self, config_data_l):
|
||||||
self._add_update_save_row(config_data['add'], 1)
|
self._add_update_save_row(config_data_l['add'], 1)
|
||||||
|
|
||||||
def _update_row(self):
|
def _update_row(self, config_data_l):
|
||||||
self._add_update_save_row(config_data['update'], 1)
|
self._add_update_save_row(config_data_l['update'], 1)
|
||||||
|
|
||||||
def _verify_messsages(self, text):
|
def _verify_messsages(self, text):
|
||||||
messages_ele = self.page.find_by_css_selector(
|
messages_ele = self.page.find_by_css_selector(
|
||||||
|
@ -211,17 +211,23 @@ class CheckForXssFeatureTest(BaseFeatureTest):
|
|||||||
"Query tool (History Entry)"
|
"Query tool (History Entry)"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check for history details message
|
retry = 2
|
||||||
|
while retry > 0:
|
||||||
|
try:
|
||||||
history_ele = self.driver \
|
history_ele = self.driver \
|
||||||
.find_element_by_css_selector(".query-detail .content-value")
|
.find_element_by_css_selector(
|
||||||
|
".query-detail .content-value")
|
||||||
source_code = history_ele.get_attribute('innerHTML')
|
source_code = history_ele.get_attribute('innerHTML')
|
||||||
|
break
|
||||||
|
except StaleElementReferenceException:
|
||||||
|
retry -= 1
|
||||||
|
|
||||||
self._check_escaped_characters(
|
self._check_escaped_characters(
|
||||||
source_code,
|
source_code,
|
||||||
'<script>alert(1)</script>',
|
'<script>alert(1)</script>',
|
||||||
"Query tool (History Details-Message)"
|
"Query tool (History Details-Message)"
|
||||||
)
|
)
|
||||||
|
|
||||||
retry = 2
|
retry = 2
|
||||||
while retry > 0:
|
while retry > 0:
|
||||||
try:
|
try:
|
||||||
|
@ -120,9 +120,8 @@ class BaseTestGenerator(unittest.TestCase):
|
|||||||
self.skipTest('cannot run in: %s' %
|
self.skipTest('cannot run in: %s' %
|
||||||
server_con['data']['type'])
|
server_con['data']['type'])
|
||||||
|
|
||||||
@classmethod
|
def setTestServer(self, server):
|
||||||
def setTestServer(cls, server):
|
self.server = server
|
||||||
cls.server = server
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def runTest(self):
|
def runTest(self):
|
||||||
@ -137,17 +136,14 @@ class BaseTestGenerator(unittest.TestCase):
|
|||||||
def setTestClient(cls, test_client):
|
def setTestClient(cls, test_client):
|
||||||
cls.tester = test_client
|
cls.tester = test_client
|
||||||
|
|
||||||
@classmethod
|
def setDriver(self, driver):
|
||||||
def setDriver(cls, driver):
|
self.driver = driver
|
||||||
cls.driver = driver
|
|
||||||
|
|
||||||
@classmethod
|
def setServerInformation(self, server_information):
|
||||||
def setServerInformation(cls, server_information):
|
self.server_information = server_information
|
||||||
cls.server_information = server_information
|
|
||||||
|
|
||||||
@classmethod
|
def setTestDatabaseName(self, database_name):
|
||||||
def setTestDatabaseName(cls, database_name):
|
self.test_db = database_name
|
||||||
cls.test_db = database_name
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def setReSQLModuleList(cls, module_list):
|
def setReSQLModuleList(cls, module_list):
|
||||||
|
@ -141,6 +141,61 @@ Python Tests:
|
|||||||
and registered automatically by its module name in
|
and registered automatically by its module name in
|
||||||
'pgadmin4/web/pgadmin/utils/test.py' file.
|
'pgadmin4/web/pgadmin/utils/test.py' file.
|
||||||
|
|
||||||
|
- To run Feature Tests in parallel using selenoid(grid + docker), selenoid
|
||||||
|
need to be installed. Steps to install selenoid -
|
||||||
|
|
||||||
|
- Install & Start docker
|
||||||
|
$yum -y install docker docker-registry
|
||||||
|
$vi /etc/sysconfig/docker # in OPTIONS add ‘--selinux-enabled=false’
|
||||||
|
$systemctl enable docker.service
|
||||||
|
$systemctl start docker.service
|
||||||
|
$systemctl status docker.service
|
||||||
|
|
||||||
|
- Install & Start Selenoid
|
||||||
|
$curl -s https://aerokube.com/cm/bash | bash
|
||||||
|
$./cm selenoid start --vnc --args "-limit 3 -cpu 1.5 -mem 1.5g"
|
||||||
|
$./cm selenoid-ui start
|
||||||
|
Check selenoid status -
|
||||||
|
http://<IP address of Selenoid Installed machine>:4444/status
|
||||||
|
- Should show json with browsers details
|
||||||
|
http://<IP address of Selenoid Installed machine>:8080/#/
|
||||||
|
- Capabilities shows available browser
|
||||||
|
Note : In --args "-limit 3 -cpu 1.5 -mem 1.5g"
|
||||||
|
-limit 3 :limits maximum parallel sessions(dockers) in selenoid,
|
||||||
|
-cpu :limit memory and CPU usage,
|
||||||
|
-mem :limit memory per session.
|
||||||
|
Generally max parallel session is the number of cores * 1.5 – 2
|
||||||
|
You can list available flags by using ./cm selenoid args
|
||||||
|
Additional Information about tool
|
||||||
|
- https://aerokube.com/selenoid/latest/
|
||||||
|
|
||||||
|
- Update 'test_config.json' with selenoid config information
|
||||||
|
pgAdmin_default_server -
|
||||||
|
It is the IP address for the machine where pgadmin source code is
|
||||||
|
present.Value should NOT be '127.0.0.1' even though everything runs
|
||||||
|
on the same machine.
|
||||||
|
You can get it on linux running command 'ifconfig | grep inet'
|
||||||
|
e.g. - 192.168.143.121
|
||||||
|
max_parallel_sessions -
|
||||||
|
This is other way to control number of tests to be run in parallel.
|
||||||
|
This should be equal or less than limit specified while setting up
|
||||||
|
selenoid
|
||||||
|
selenoid_url -
|
||||||
|
Url should be formed as below -
|
||||||
|
http://<IP address of Selenoid Installed machine>:4444/wd/hub/
|
||||||
|
e.g. - selenoid_url": "http://192.168.143.121:4444/wd/hub"
|
||||||
|
If source code & selenoid servers are on same machine then
|
||||||
|
selenoid url value can be - "http://localhost:4444/wd/hub"
|
||||||
|
browsers_list -
|
||||||
|
List of browser name & version enclosed in {} on which tests to be
|
||||||
|
executed.
|
||||||
|
Make sure list contains those browsers & versions only which are shown
|
||||||
|
in capabilities tab while in selenoid status web-page.
|
||||||
|
If version is mention as null, then latest version available in
|
||||||
|
selenoid server will be used for execution.
|
||||||
|
e.g. - [ {"name": "Chrome","version": "80.0"},
|
||||||
|
{"name": "Firefox","version": "74.0"}]
|
||||||
|
|
||||||
- Change to the regression test directory:
|
- Change to the regression test directory:
|
||||||
run 'cd web/regression'
|
run 'cd web/regression'
|
||||||
|
|
||||||
@ -193,6 +248,10 @@ Python Tests:
|
|||||||
Example 3) Exclude reverse engineered SQL test framework for all modules
|
Example 3) Exclude reverse engineered SQL test framework for all modules
|
||||||
run 'python runtests.py --exclude resql'
|
run 'python runtests.py --exclude resql'
|
||||||
|
|
||||||
|
- Execute ui selenium tests in parallel using selenoid(selenium grid + docker)
|
||||||
|
Example : --pkg feature_tests --parallel
|
||||||
|
|
||||||
|
|
||||||
Code Coverage:
|
Code Coverage:
|
||||||
---------------
|
---------------
|
||||||
|
|
||||||
|
@ -61,10 +61,15 @@ class AppStarter:
|
|||||||
raise Exception('Unable to start python server even after '
|
raise Exception('Unable to start python server even after '
|
||||||
'retrying 60 times.')
|
'retrying 60 times.')
|
||||||
|
|
||||||
|
if self.driver is not None:
|
||||||
launch_browser(0)
|
launch_browser(0)
|
||||||
|
else:
|
||||||
|
return "http://" + self.app_config.DEFAULT_SERVER + ":" \
|
||||||
|
+ random_server_port
|
||||||
|
|
||||||
def stop_app(self):
|
def stop_app(self):
|
||||||
""" This function stop the started app by killing process """
|
""" This function stop the started app by killing process """
|
||||||
|
if self.driver is not None:
|
||||||
self.driver.quit()
|
self.driver.quit()
|
||||||
# os.killpg supported in Mac and Unix as this function not supported in
|
# os.killpg supported in Mac and Unix as this function not supported in
|
||||||
# Windows
|
# Windows
|
||||||
|
@ -172,6 +172,8 @@ class QueryToolLocators:
|
|||||||
|
|
||||||
new_row_xpath = "//div[contains(@class, 'new-row')]"
|
new_row_xpath = "//div[contains(@class, 'new-row')]"
|
||||||
|
|
||||||
|
scratch_pad_css = ".sql-scratch > textarea"
|
||||||
|
|
||||||
copy_button_css = "#btn-copy-row"
|
copy_button_css = "#btn-copy-row"
|
||||||
|
|
||||||
paste_button_css = "#btn-paste-row"
|
paste_button_css = "#btn-paste-row"
|
||||||
@ -217,9 +219,9 @@ class QueryToolLocators:
|
|||||||
btn_commit = "#btn-commit"
|
btn_commit = "#btn-commit"
|
||||||
|
|
||||||
show_query_internally_btn = \
|
show_query_internally_btn = \
|
||||||
"//div[label[normalize-space(" \
|
"//div[label[contains(normalize-space(text())," \
|
||||||
"text())='Show queries generated internally by pgAdmin?']]" \
|
"'Show queries generated internally by')]]//" \
|
||||||
"//div[contains(@class,'toggle btn')]"
|
"div[contains(@class,'toggle btn')]"
|
||||||
|
|
||||||
editable_column_icon_xpath = "//div[contains(@class," \
|
editable_column_icon_xpath = "//div[contains(@class," \
|
||||||
" 'editable-column-header-icon')]" \
|
" 'editable-column-header-icon')]" \
|
||||||
|
@ -88,7 +88,13 @@ class PgadminPage:
|
|||||||
(By.CSS_SELECTOR, "button[type='save'].btn.btn-primary")))
|
(By.CSS_SELECTOR, "button[type='save'].btn.btn-primary")))
|
||||||
self.find_by_css_selector("button[type='save'].btn.btn-primary").\
|
self.find_by_css_selector("button[type='save'].btn.btn-primary").\
|
||||||
click()
|
click()
|
||||||
|
try:
|
||||||
|
WebDriverWait(self.driver, 10).until(
|
||||||
|
EC.visibility_of_element_located(
|
||||||
|
(By.XPATH,
|
||||||
|
"//*[@id='tree']//*[.='" + server_config['name'] + "']")))
|
||||||
|
except TimeoutException:
|
||||||
|
self.toggle_open_servers_group()
|
||||||
WebDriverWait(self.driver, 10).until(
|
WebDriverWait(self.driver, 10).until(
|
||||||
EC.visibility_of_element_located(
|
EC.visibility_of_element_located(
|
||||||
(By.XPATH,
|
(By.XPATH,
|
||||||
@ -910,7 +916,11 @@ class PgadminPage:
|
|||||||
return element
|
return element
|
||||||
except (NoSuchElementException, WebDriverException):
|
except (NoSuchElementException, WebDriverException):
|
||||||
return False
|
return False
|
||||||
|
time.sleep(1)
|
||||||
|
self.driver.switch_to.default_content()
|
||||||
|
self.driver.switch_to_frame(
|
||||||
|
self.driver.find_element_by_tag_name("iframe"))
|
||||||
|
self.find_by_xpath("//a[text()='Query Editor']").click()
|
||||||
codemirror_ele = WebDriverWait(
|
codemirror_ele = WebDriverWait(
|
||||||
self.driver, timeout=self.timeout, poll_frequency=0.01)\
|
self.driver, timeout=self.timeout, poll_frequency=0.01)\
|
||||||
.until(find_codemirror,
|
.until(find_codemirror,
|
||||||
@ -1161,3 +1171,34 @@ class PgadminPage:
|
|||||||
except Exception:
|
except Exception:
|
||||||
attempt += 1
|
attempt += 1
|
||||||
return click_status
|
return click_status
|
||||||
|
|
||||||
|
def paste_values(self, el=None):
|
||||||
|
"""
|
||||||
|
Function paste values in scratch pad
|
||||||
|
:param el:
|
||||||
|
"""
|
||||||
|
actions = ActionChains(self.driver)
|
||||||
|
if el:
|
||||||
|
# Must step
|
||||||
|
el.click()
|
||||||
|
if self.driver.capabilities["platformName"] == 'mac':
|
||||||
|
# FF step
|
||||||
|
el.send_keys(Keys.COMMAND + "v")
|
||||||
|
# Chrome Step
|
||||||
|
actions.key_down(Keys.SHIFT)
|
||||||
|
actions.send_keys(Keys.INSERT)
|
||||||
|
actions.key_up(Keys.SHIFT)
|
||||||
|
actions.perform()
|
||||||
|
else:
|
||||||
|
el.send_keys(Keys.CONTROL + "v")
|
||||||
|
|
||||||
|
def wait_for_element_to_be_visible(self, driver, xpath, time_value=20):
|
||||||
|
"""This will wait until an element is visible on page"""
|
||||||
|
element_located_status = False
|
||||||
|
try:
|
||||||
|
if WebDriverWait(driver, time_value).until(
|
||||||
|
EC.visibility_of_element_located((By.XPATH, xpath))):
|
||||||
|
element_located_status = True
|
||||||
|
except TimeoutException:
|
||||||
|
element_located_status = False
|
||||||
|
return element_located_status
|
||||||
|
@ -8,6 +8,8 @@
|
|||||||
##########################################################################
|
##########################################################################
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import fileinput
|
||||||
import traceback
|
import traceback
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
@ -16,7 +18,17 @@ import psycopg2
|
|||||||
import sqlite3
|
import sqlite3
|
||||||
import shutil
|
import shutil
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
|
from selenium.webdriver.support.wait import WebDriverWait
|
||||||
from testtools.testcase import clone_test_with_new_id
|
from testtools.testcase import clone_test_with_new_id
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from selenium.common.exceptions import WebDriverException
|
||||||
|
import urllib.request as urllib
|
||||||
|
import json
|
||||||
|
from selenium import webdriver
|
||||||
|
from selenium.webdriver.chrome.options import Options
|
||||||
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
|
|
||||||
import config
|
import config
|
||||||
import regression
|
import regression
|
||||||
@ -1216,3 +1228,242 @@ def create_expected_output(parameters, actual_data):
|
|||||||
actual_data.remove(value)
|
actual_data.remove(value)
|
||||||
break
|
break
|
||||||
return expected_output
|
return expected_output
|
||||||
|
|
||||||
|
|
||||||
|
def is_parallel_ui_tests(args):
|
||||||
|
"""
|
||||||
|
This function checks for coverage args exists in command line args
|
||||||
|
:return: boolean
|
||||||
|
"""
|
||||||
|
if "parallel" in args and args["parallel"]:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_selenium_grid_status_and_browser_list(selenoid_url):
|
||||||
|
"""
|
||||||
|
This function checks selenoid status for given url
|
||||||
|
:param selrnoid_url:
|
||||||
|
:return: status of selenoid & list of browsers available with selenoid if
|
||||||
|
status is up
|
||||||
|
"""
|
||||||
|
selenoid_status = False
|
||||||
|
browser_list = []
|
||||||
|
try:
|
||||||
|
selenoid_status = get_selenium_grid_status_json(selenoid_url)
|
||||||
|
if selenoid_status:
|
||||||
|
available_browsers = selenoid_status["browsers"]
|
||||||
|
list_of_browsers = test_setup.config_data['selenoid_config'][
|
||||||
|
'browsers_list']
|
||||||
|
|
||||||
|
for browser in list_of_browsers:
|
||||||
|
if browser["name"].lower() in available_browsers.keys():
|
||||||
|
versions = available_browsers[(browser["name"].lower())]
|
||||||
|
if browser["version"] is None:
|
||||||
|
print("Specified version of browser is None. Hence "
|
||||||
|
"latest version of {0} available with selenoid "
|
||||||
|
"server will be used.\n".format(browser["name"]))
|
||||||
|
browser_list.append(browser)
|
||||||
|
elif browser["version"] in versions.keys():
|
||||||
|
browser_list.append(browser)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
"Available {0} versions {1}".format(
|
||||||
|
browser["name"], versions.keys()))
|
||||||
|
print("Specified Version = {0}".format(
|
||||||
|
browser["version"]))
|
||||||
|
else:
|
||||||
|
print("{0} is NOT available".format(browser["name"]))
|
||||||
|
except Exception as e:
|
||||||
|
(str(e))
|
||||||
|
print("Unable to find Selenoid Status")
|
||||||
|
|
||||||
|
return selenoid_status, browser_list
|
||||||
|
|
||||||
|
|
||||||
|
def is_feature_test_included(arguments):
|
||||||
|
"""
|
||||||
|
:param arguments: his is command line arguments for module name to
|
||||||
|
which test suite will run
|
||||||
|
:return: boolean value whether to execute feature tests or NOT &
|
||||||
|
browser name if feature_test_tobe_included = True
|
||||||
|
"""
|
||||||
|
exclude_pkgs = []
|
||||||
|
if arguments['exclude'] is not None:
|
||||||
|
exclude_pkgs += arguments['exclude'].split(',')
|
||||||
|
|
||||||
|
feature_test_tobe_included = 'feature_tests' not in exclude_pkgs and \
|
||||||
|
(arguments['pkg'] is None or arguments[
|
||||||
|
'pkg'] == "all" or
|
||||||
|
arguments['pkg'] == "feature_tests")
|
||||||
|
return feature_test_tobe_included
|
||||||
|
|
||||||
|
|
||||||
|
def launch_url_in_browser(driver_instance, url, title='pgAdmin 4', timeout=40):
|
||||||
|
"""
|
||||||
|
Function launches urls in specified driver instance
|
||||||
|
:param driver_instance:browser instance
|
||||||
|
:param url:url to be launched
|
||||||
|
:param title:web-page tile on successful launch default is 'pgAdmin 4'
|
||||||
|
:param timeout:in seconds for getting specified title default is 20sec
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
count = timeout / 5
|
||||||
|
while count > 0:
|
||||||
|
try:
|
||||||
|
driver_instance.get(url)
|
||||||
|
wait = WebDriverWait(driver_instance, 10)
|
||||||
|
wait.until(ec.title_is(title))
|
||||||
|
break
|
||||||
|
except WebDriverException as e:
|
||||||
|
time.sleep(6)
|
||||||
|
count -= 1
|
||||||
|
if count == 0:
|
||||||
|
exception_msg = 'Web-page title did not match to {0}. ' \
|
||||||
|
'Please check url {1} accessible on ' \
|
||||||
|
'internet.'.format(title, url)
|
||||||
|
raise Exception(exception_msg)
|
||||||
|
|
||||||
|
|
||||||
|
def get_remote_webdriver(hub_url, browser, browser_ver, test_name):
|
||||||
|
"""
|
||||||
|
This functions returns remote web-driver instance created in selenoid
|
||||||
|
machine.
|
||||||
|
:param hub_url
|
||||||
|
:param browser: browser name
|
||||||
|
:param browser_ver: version for browser
|
||||||
|
:param test_name: test name
|
||||||
|
:return: remote web-driver instance for specified browser
|
||||||
|
"""
|
||||||
|
test_name = browser + browser_ver + "_" + test_name + "-" + time.strftime(
|
||||||
|
"%m_%d_%y_%H_%M_%S", time.localtime())
|
||||||
|
driver_local = None
|
||||||
|
|
||||||
|
desired_capabilities = {
|
||||||
|
"version": browser_ver,
|
||||||
|
"enableVNC": True,
|
||||||
|
"enableVideo": True,
|
||||||
|
"enableLog": True,
|
||||||
|
"videoName": test_name + ".mp4",
|
||||||
|
"logName": test_name + ".log",
|
||||||
|
"name": test_name,
|
||||||
|
"timeZone": "Asia/Kolkata"
|
||||||
|
}
|
||||||
|
|
||||||
|
if browser == 'firefox':
|
||||||
|
profile = webdriver.FirefoxProfile()
|
||||||
|
profile.set_preference("dom.disable_beforeunload", True)
|
||||||
|
desired_capabilities["browserName"] = "firefox"
|
||||||
|
desired_capabilities["requireWindowFocus"] = True
|
||||||
|
desired_capabilities["enablePersistentHover"] = False
|
||||||
|
driver_local = webdriver.Remote(
|
||||||
|
command_executor=hub_url,
|
||||||
|
desired_capabilities=desired_capabilities, browser_profile=profile)
|
||||||
|
elif browser == 'chrome':
|
||||||
|
options = Options()
|
||||||
|
options.add_argument("--window-size=1280,1024")
|
||||||
|
desired_capabilities["browserName"] = "chrome"
|
||||||
|
driver_local = webdriver.Remote(
|
||||||
|
command_executor=hub_url,
|
||||||
|
desired_capabilities=desired_capabilities, options=options)
|
||||||
|
else:
|
||||||
|
print("Specified browser does not exist.")
|
||||||
|
|
||||||
|
# maximize browser window
|
||||||
|
driver_local.maximize_window()
|
||||||
|
|
||||||
|
# driver_local.implicitly_wait(2)
|
||||||
|
return driver_local
|
||||||
|
|
||||||
|
|
||||||
|
def get_parallel_sequential_module_list(module_list):
|
||||||
|
"""
|
||||||
|
Functions segregate parallel & sequential modules
|
||||||
|
:param module_list: Complete list of modules
|
||||||
|
:return: parallel & sequential module lists
|
||||||
|
"""
|
||||||
|
# list of files consisting tests that needs to be
|
||||||
|
# executed sequentially
|
||||||
|
sequential_tests_file = [
|
||||||
|
'pgadmin.feature_tests.pg_utilities_backup_restore_test',
|
||||||
|
'pgadmin.feature_tests.pg_utilities_maintenance_test',
|
||||||
|
'pgadmin.feature_tests.keyboard_shortcut_test']
|
||||||
|
|
||||||
|
# list of tests can be executed in parallel
|
||||||
|
parallel_tests = list(module_list)
|
||||||
|
for module in module_list:
|
||||||
|
if str(module[0]) in sequential_tests_file:
|
||||||
|
parallel_tests.remove(module)
|
||||||
|
|
||||||
|
# list of tests can be executed in sequentially
|
||||||
|
sequential_tests = list(
|
||||||
|
filter(lambda i: i not in parallel_tests,
|
||||||
|
module_list))
|
||||||
|
|
||||||
|
# return parallel & sequential lists
|
||||||
|
return parallel_tests, sequential_tests
|
||||||
|
|
||||||
|
|
||||||
|
def get_browser_details(browser_info_dict, url):
|
||||||
|
"""
|
||||||
|
Function extracts browser name & version from browser info dict
|
||||||
|
in test_config.json
|
||||||
|
:param browser_info_dict:
|
||||||
|
:return: browser name & version
|
||||||
|
"""
|
||||||
|
browser_name = browser_info_dict["name"].lower()
|
||||||
|
browser_version = browser_info_dict["version"]
|
||||||
|
if browser_version is None:
|
||||||
|
selenoid_status = get_selenium_grid_status_json(url)
|
||||||
|
versions = selenoid_status["browsers"][browser_name]
|
||||||
|
browser_version = max(versions)
|
||||||
|
return browser_name, browser_version
|
||||||
|
|
||||||
|
|
||||||
|
def print_test_summary(complete_module_list, parallel_testlist,
|
||||||
|
sequential_tests_list, browser_name, browser_version):
|
||||||
|
"""
|
||||||
|
Prints test summary about total, parallel, sequential, browser name,
|
||||||
|
browser version information
|
||||||
|
:param complete_module_list:
|
||||||
|
:param parallel_testlist:
|
||||||
|
:param sequential_tests_list:
|
||||||
|
:param browser_name:
|
||||||
|
:param browser_version:
|
||||||
|
"""
|
||||||
|
print(
|
||||||
|
"=================================================================",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
"Total Tests # {0}\nParallel Tests # {1}, "
|
||||||
|
"Sequential Tests # {2}".format(
|
||||||
|
len(complete_module_list), len(parallel_testlist),
|
||||||
|
len(sequential_tests_list)),
|
||||||
|
file=sys.stderr)
|
||||||
|
print("Browser: [Name:{0}, Version: {1}]".format(
|
||||||
|
browser_name.capitalize(), browser_version),
|
||||||
|
file=sys.stderr)
|
||||||
|
print(
|
||||||
|
"=================================================================\n",
|
||||||
|
file=sys.stderr
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_selenium_grid_status_json(selenoid_url):
|
||||||
|
"""
|
||||||
|
Functions returns json response received from selenoid server
|
||||||
|
:param selenoid_url:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
selenoid_status = urllib.urlopen(
|
||||||
|
"http://" + re.split('/', (re.split('//', selenoid_url, 1)[1]))[
|
||||||
|
0] + "/status", timeout=10)
|
||||||
|
selenoid_status = json.load(selenoid_status)
|
||||||
|
if isinstance(selenoid_status, dict):
|
||||||
|
return selenoid_status
|
||||||
|
except Exception as e:
|
||||||
|
print("Unable to find Selenoid Status.Kindly check url passed -'{0}'".
|
||||||
|
format(selenoid_url))
|
||||||
|
return None
|
||||||
|
@ -21,7 +21,6 @@ fixtures==3.0.0
|
|||||||
linecache2==1.0.0
|
linecache2==1.0.0
|
||||||
pbr==3.1.1
|
pbr==3.1.1
|
||||||
pycodestyle>=2.5.0
|
pycodestyle>=2.5.0
|
||||||
pyperclip~=1.6.0
|
|
||||||
python-mimeparse==1.6.0
|
python-mimeparse==1.6.0
|
||||||
testscenarios==0.5.0
|
testscenarios==0.5.0
|
||||||
testtools==2.3.0
|
testtools==2.3.0
|
||||||
|
@ -21,7 +21,8 @@ import traceback
|
|||||||
import json
|
import json
|
||||||
import random
|
import random
|
||||||
import coverage
|
import coverage
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
if sys.version_info < (3, 4):
|
if sys.version_info < (3, 4):
|
||||||
@ -136,7 +137,7 @@ scenarios.apply_scenario = test_utils.apply_scenario
|
|||||||
|
|
||||||
|
|
||||||
def get_suite(module_list, test_server, test_app_client, server_information,
|
def get_suite(module_list, test_server, test_app_client, server_information,
|
||||||
test_db_name):
|
test_db_name, driver_passed):
|
||||||
"""
|
"""
|
||||||
This function add the tests to test suite and return modified test suite
|
This function add the tests to test suite and return modified test suite
|
||||||
variable.
|
variable.
|
||||||
@ -166,7 +167,7 @@ def get_suite(module_list, test_server, test_app_client, server_information,
|
|||||||
obj.setApp(app)
|
obj.setApp(app)
|
||||||
obj.setTestClient(test_app_client)
|
obj.setTestClient(test_app_client)
|
||||||
obj.setTestServer(test_server)
|
obj.setTestServer(test_server)
|
||||||
obj.setDriver(driver)
|
obj.setDriver(driver_passed)
|
||||||
obj.setServerInformation(server_information)
|
obj.setServerInformation(server_information)
|
||||||
obj.setTestDatabaseName(test_db_name)
|
obj.setTestDatabaseName(test_db_name)
|
||||||
scenario = scenarios.generate_scenarios(obj)
|
scenario = scenarios.generate_scenarios(obj)
|
||||||
@ -210,6 +211,10 @@ def get_test_modules(arguments):
|
|||||||
(arguments['pkg'] is None or arguments['pkg'] == "all" or
|
(arguments['pkg'] is None or arguments['pkg'] == "all" or
|
||||||
arguments['pkg'] == "feature_tests"):
|
arguments['pkg'] == "feature_tests"):
|
||||||
|
|
||||||
|
if arguments['pkg'] == "feature_tests":
|
||||||
|
exclude_pkgs.extend(['resql'])
|
||||||
|
|
||||||
|
if not test_utils.is_parallel_ui_tests(args):
|
||||||
from selenium import webdriver
|
from selenium import webdriver
|
||||||
from selenium.webdriver.chrome.options import Options
|
from selenium.webdriver.chrome.options import Options
|
||||||
from selenium.webdriver.common.desired_capabilities import \
|
from selenium.webdriver.common.desired_capabilities import \
|
||||||
@ -218,8 +223,8 @@ def get_test_modules(arguments):
|
|||||||
default_browser = 'chrome'
|
default_browser = 'chrome'
|
||||||
|
|
||||||
# Check default browser provided through command line. If provided
|
# Check default browser provided through command line. If provided
|
||||||
# then use that browser as default browser else check for the setting
|
# then use that browser as default browser else check for the
|
||||||
# provided in test_config.json file.
|
# setting provided in test_config.json file.
|
||||||
if (
|
if (
|
||||||
'default_browser' in arguments and
|
'default_browser' in arguments and
|
||||||
arguments['default_browser'] is not None
|
arguments['default_browser'] is not None
|
||||||
@ -229,7 +234,8 @@ def get_test_modules(arguments):
|
|||||||
test_setup.config_data and
|
test_setup.config_data and
|
||||||
"default_browser" in test_setup.config_data
|
"default_browser" in test_setup.config_data
|
||||||
):
|
):
|
||||||
default_browser = test_setup.config_data['default_browser'].lower()
|
default_browser = test_setup.config_data[
|
||||||
|
'default_browser'].lower()
|
||||||
|
|
||||||
if default_browser == 'firefox':
|
if default_browser == 'firefox':
|
||||||
cap = DesiredCapabilities.FIREFOX
|
cap = DesiredCapabilities.FIREFOX
|
||||||
@ -319,6 +325,9 @@ def add_arguments():
|
|||||||
'--modules',
|
'--modules',
|
||||||
help='Executes the feature test for specific modules in pkg'
|
help='Executes the feature test for specific modules in pkg'
|
||||||
)
|
)
|
||||||
|
parser.add_argument('--parallel', nargs='?', const=True,
|
||||||
|
type=bool, default=False,
|
||||||
|
help='Enable parallel Feature Tests')
|
||||||
arg = parser.parse_args()
|
arg = parser.parse_args()
|
||||||
|
|
||||||
return arg
|
return arg
|
||||||
@ -404,59 +413,20 @@ class StreamToLogger(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
def execute_test(test_module_list_passed, server_passed, driver_passed):
|
||||||
# Failure detected?
|
"""
|
||||||
failure = False
|
Function executes actually test
|
||||||
test_result = dict()
|
:param test_module_list_passed:
|
||||||
cov = None
|
:param server_passed:
|
||||||
|
:param driver_passed:
|
||||||
# Set signal handler for cleanup
|
:return:
|
||||||
signal_list = dir(signal)
|
"""
|
||||||
required_signal_list = ['SIGTERM', 'SIGABRT', 'SIGQUIT', 'SIGINT']
|
|
||||||
# Get the OS wise supported signals
|
|
||||||
supported_signal_list = [sig for sig in required_signal_list if
|
|
||||||
sig in signal_list]
|
|
||||||
for sig in supported_signal_list:
|
|
||||||
signal.signal(getattr(signal, sig), sig_handler)
|
|
||||||
|
|
||||||
# Set basic logging configuration for log file
|
|
||||||
fh = logging.FileHandler(CURRENT_PATH + '/' +
|
|
||||||
'regression.log', 'w', 'utf-8')
|
|
||||||
fh.setLevel(logging.DEBUG)
|
|
||||||
fh.setFormatter(logging.Formatter(config.FILE_LOG_FORMAT))
|
|
||||||
|
|
||||||
logger = logging.getLogger()
|
|
||||||
logger.addHandler(fh)
|
|
||||||
|
|
||||||
# Create logger to write log in the logger file as well as on console
|
|
||||||
stderr_logger = logging.getLogger('STDERR')
|
|
||||||
sys.stderr = StreamToLogger(stderr_logger, logging.ERROR)
|
|
||||||
args = vars(add_arguments())
|
|
||||||
# Get test module list
|
|
||||||
try:
|
try:
|
||||||
test_module_list = get_test_modules(args)
|
|
||||||
except Exception as e:
|
|
||||||
print(str(e))
|
|
||||||
sys.exit(1)
|
|
||||||
# Login the test client
|
|
||||||
test_utils.login_tester_account(test_client)
|
|
||||||
|
|
||||||
servers_info = test_utils.get_config_data()
|
|
||||||
node_name = "all"
|
|
||||||
if args['pkg'] is not None:
|
|
||||||
node_name = args['pkg'].split('.')[-1]
|
|
||||||
|
|
||||||
# Start coverage
|
|
||||||
if test_utils.is_coverage_enabled(args):
|
|
||||||
cov = coverage.Coverage(config_file=COVERAGE_CONFIG_FILE)
|
|
||||||
cov.start()
|
|
||||||
|
|
||||||
try:
|
|
||||||
for server in servers_info:
|
|
||||||
print("\n=============Running the test cases for '%s' ============="
|
print("\n=============Running the test cases for '%s' ============="
|
||||||
% server['name'], file=sys.stderr)
|
% server_passed['name'], file=sys.stderr)
|
||||||
# Create test server
|
# Create test server
|
||||||
server_information = test_utils.create_parent_server_node(server)
|
server_information = \
|
||||||
|
test_utils.create_parent_server_node(server_passed)
|
||||||
|
|
||||||
# Create test database with random number to avoid conflict in
|
# Create test database with random number to avoid conflict in
|
||||||
# parallel execution on different platforms. This database will be
|
# parallel execution on different platforms. This database will be
|
||||||
@ -464,45 +434,66 @@ if __name__ == '__main__':
|
|||||||
test_db_name = "acceptance_test_db" + \
|
test_db_name = "acceptance_test_db" + \
|
||||||
str(random.randint(10000, 65535))
|
str(random.randint(10000, 65535))
|
||||||
connection = test_utils.get_db_connection(
|
connection = test_utils.get_db_connection(
|
||||||
server['db'],
|
server_passed['db'],
|
||||||
server['username'],
|
server_passed['username'],
|
||||||
server['db_password'],
|
server_passed['db_password'],
|
||||||
server['host'],
|
server_passed['host'],
|
||||||
server['port'],
|
server_passed['port'],
|
||||||
server['sslmode']
|
server_passed['sslmode']
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add the server version in server information
|
# Add the server version in server information
|
||||||
server_information['server_version'] = connection.server_version
|
server_information['server_version'] = connection.server_version
|
||||||
server_information['type'] = server['type']
|
server_information['type'] = server_passed['type']
|
||||||
|
|
||||||
# Drop the database if already exists.
|
# Drop the database if already exists.
|
||||||
test_utils.drop_database(connection, test_db_name)
|
test_utils.drop_database(connection, test_db_name)
|
||||||
|
|
||||||
# Create database
|
# Create database
|
||||||
test_utils.create_database(server, test_db_name)
|
test_utils.create_database(server_passed, test_db_name)
|
||||||
|
|
||||||
# Configure preferences for the test cases
|
# Configure preferences for the test cases
|
||||||
test_utils.configure_preferences(
|
test_utils.configure_preferences(
|
||||||
default_binary_path=server['default_binary_paths'])
|
default_binary_path=server_passed['default_binary_paths'])
|
||||||
|
|
||||||
suite = get_suite(test_module_list,
|
# Get unit test suit
|
||||||
server,
|
suite = get_suite(test_module_list_passed,
|
||||||
|
server_passed,
|
||||||
test_client,
|
test_client,
|
||||||
server_information, test_db_name)
|
server_information, test_db_name, driver_passed)
|
||||||
|
|
||||||
|
# Run unit test suit created
|
||||||
tests = unittest.TextTestRunner(stream=sys.stderr,
|
tests = unittest.TextTestRunner(stream=sys.stderr,
|
||||||
descriptions=True,
|
descriptions=True,
|
||||||
verbosity=2).run(suite)
|
verbosity=2).run(suite)
|
||||||
|
|
||||||
|
# processing results
|
||||||
ran_tests, failed_cases, skipped_cases, passed_cases = \
|
ran_tests, failed_cases, skipped_cases, passed_cases = \
|
||||||
get_tests_result(tests)
|
get_tests_result(tests)
|
||||||
test_result[server['name']] = [ran_tests, failed_cases,
|
|
||||||
|
# This is required when some tests are running parallel
|
||||||
|
# & some sequential in case of parallel ui tests
|
||||||
|
if threading.current_thread().getName() == "sequential_tests":
|
||||||
|
try:
|
||||||
|
if test_result[server_passed['name']][0] is not None:
|
||||||
|
ran_tests = test_result[server_passed['name']][0] + \
|
||||||
|
ran_tests
|
||||||
|
failed_cases.update(test_result[server_passed['name']][1])
|
||||||
|
skipped_cases.update(test_result[server_passed['name']][2])
|
||||||
|
passed_cases.update(test_result[server_passed['name']][3])
|
||||||
|
test_result[server_passed['name']] = [ran_tests, failed_cases,
|
||||||
|
skipped_cases,
|
||||||
|
passed_cases]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Add final results server wise in test_result dict
|
||||||
|
test_result[server_passed['name']] = [ran_tests, failed_cases,
|
||||||
skipped_cases, passed_cases]
|
skipped_cases, passed_cases]
|
||||||
|
|
||||||
# Set empty list for 'passed' parameter for each testRun.
|
# Set empty list for 'passed' parameter for each testRun.
|
||||||
# So that it will not append same test case name
|
# So that it will not append same test case name
|
||||||
unittest.result.TestResult.passed = []
|
# unittest.result.TestResult.passed = []
|
||||||
|
|
||||||
if len(failed_cases) > 0:
|
|
||||||
failure = True
|
|
||||||
|
|
||||||
# Drop the testing database created initially
|
# Drop the testing database created initially
|
||||||
if connection:
|
if connection:
|
||||||
@ -511,10 +502,124 @@ if __name__ == '__main__':
|
|||||||
|
|
||||||
# Delete test server
|
# Delete test server
|
||||||
test_utils.delete_test_server(test_client)
|
test_utils.delete_test_server(test_client)
|
||||||
except SystemExit:
|
except Exception as exc:
|
||||||
if handle_cleanup:
|
traceback.print_exc(file=sys.stderr)
|
||||||
handle_cleanup()
|
print(str(exc))
|
||||||
|
print("Exception in {0}".format(threading.current_thread().ident))
|
||||||
|
finally:
|
||||||
|
# Delete web-driver instance
|
||||||
|
thread_name = "parallel_tests" + server_passed['name']
|
||||||
|
if threading.currentThread().getName() == thread_name:
|
||||||
|
driver_passed.quit()
|
||||||
|
time.sleep(20)
|
||||||
|
|
||||||
|
# Print info about completed tests
|
||||||
|
print(
|
||||||
|
"\n=============Completed the test cases for '%s'============="
|
||||||
|
% server_passed['name'], file=sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
|
def run_parallel_tests(url_client, servers_details, parallel_tests_lists,
|
||||||
|
name_of_browser, version_of_browser, max_thread_count):
|
||||||
|
"""
|
||||||
|
Function used to run tests in parallel
|
||||||
|
:param url_client:
|
||||||
|
:param servers_details:
|
||||||
|
:param parallel_tests_lists:
|
||||||
|
:param name_of_browser:
|
||||||
|
:param version_of_browser:
|
||||||
|
:param max_thread_count:
|
||||||
|
"""
|
||||||
|
driver_object = None
|
||||||
|
try:
|
||||||
|
# Thread list
|
||||||
|
threads_list = []
|
||||||
|
# Create thread for each server
|
||||||
|
for ser in servers_details:
|
||||||
|
# Logic to add new threads
|
||||||
|
while True:
|
||||||
|
# If active thread count <= max_thread_count, add new thread
|
||||||
|
if threading.activeCount() <= max_thread_count:
|
||||||
|
# Get remote web-driver instance at server level
|
||||||
|
driver_object = \
|
||||||
|
test_utils.get_remote_webdriver(hub_url,
|
||||||
|
name_of_browser,
|
||||||
|
version_of_browser,
|
||||||
|
ser['name'])
|
||||||
|
# Launch client url in browser
|
||||||
|
test_utils.launch_url_in_browser(driver_object, url_client)
|
||||||
|
|
||||||
|
# Add name for thread
|
||||||
|
thread_name = "parallel_tests" + ser['name']
|
||||||
|
|
||||||
|
# Start thread
|
||||||
|
t = threading.Thread(target=execute_test, name=thread_name,
|
||||||
|
args=(parallel_tests_lists, ser,
|
||||||
|
driver_object))
|
||||||
|
threads_list.append(t)
|
||||||
|
t.start()
|
||||||
|
time.sleep(3)
|
||||||
|
break
|
||||||
|
# else sleep for 10 seconds
|
||||||
|
else:
|
||||||
|
time.sleep(10)
|
||||||
|
|
||||||
|
# Start threads in parallel
|
||||||
|
for t in threads_list:
|
||||||
|
t.join()
|
||||||
|
except Exception as exc:
|
||||||
|
# Print exception stack trace
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
print(str(exc))
|
||||||
|
# Clean driver object created
|
||||||
|
if driver_object is not None:
|
||||||
|
driver_object.quit()
|
||||||
|
|
||||||
|
|
||||||
|
def run_sequential_tests(url_client, servers_details, sequential_tests_lists,
|
||||||
|
name_of_browser, version_of_browser):
|
||||||
|
"""
|
||||||
|
Function is used to execute tests that needs to be run in sequential
|
||||||
|
manner.
|
||||||
|
:param url_client:
|
||||||
|
:param servers_details:
|
||||||
|
:param sequential_tests_lists:
|
||||||
|
:param name_of_browser:
|
||||||
|
:param version_of_browser:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
driver_object = None
|
||||||
|
try:
|
||||||
|
# Get remote web-driver instance
|
||||||
|
driver_object = test_utils.get_remote_webdriver(hub_url,
|
||||||
|
name_of_browser,
|
||||||
|
version_of_browser,
|
||||||
|
"Sequential_Tests")
|
||||||
|
|
||||||
|
# Launch client url in browser
|
||||||
|
test_utils.launch_url_in_browser(driver_object, url_client)
|
||||||
|
|
||||||
|
# Add name for thread
|
||||||
|
thread_name = "sequential_tests"
|
||||||
|
|
||||||
|
# Start thread
|
||||||
|
for ser in servers_details:
|
||||||
|
t = threading.Thread(target=execute_test,
|
||||||
|
name=thread_name,
|
||||||
|
args=(sequential_tests_lists, ser,
|
||||||
|
driver_object))
|
||||||
|
t.start()
|
||||||
|
t.join()
|
||||||
|
except Exception as exc:
|
||||||
|
# Print exception stack trace
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
print(str(exc))
|
||||||
|
finally:
|
||||||
|
# Clean driver object created
|
||||||
|
driver_object.quit()
|
||||||
|
|
||||||
|
|
||||||
|
def print_test_results():
|
||||||
print(
|
print(
|
||||||
"\n==============================================================="
|
"\n==============================================================="
|
||||||
"=======",
|
"=======",
|
||||||
@ -543,6 +648,10 @@ if __name__ == '__main__':
|
|||||||
total_passed_cases = int(
|
total_passed_cases = int(
|
||||||
test_result[server_res][0]) - total_failed - total_skipped
|
test_result[server_res][0]) - total_failed - total_skipped
|
||||||
|
|
||||||
|
if len(failed_cases) > 0:
|
||||||
|
global failure
|
||||||
|
failure = True
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"%s:\n\n\t%s test%s passed\n\t%s test%s failed%s%s"
|
"%s:\n\n\t%s test%s passed\n\t%s test%s failed%s%s"
|
||||||
"\n\t%s test%s skipped%s%s\n" %
|
"\n\t%s test%s skipped%s%s\n" %
|
||||||
@ -578,12 +687,162 @@ if __name__ == '__main__':
|
|||||||
file=sys.stderr
|
file=sys.stderr
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
# Failure detected?
|
||||||
|
failure = False
|
||||||
|
test_result = dict()
|
||||||
|
cov = None
|
||||||
|
|
||||||
|
# Set signal handler for cleanup
|
||||||
|
signal_list = dir(signal)
|
||||||
|
required_signal_list = ['SIGTERM', 'SIGABRT', 'SIGQUIT', 'SIGINT']
|
||||||
|
# Get the OS wise supported signals
|
||||||
|
supported_signal_list = [sig for sig in required_signal_list if
|
||||||
|
sig in signal_list]
|
||||||
|
for sig in supported_signal_list:
|
||||||
|
signal.signal(getattr(signal, sig), sig_handler)
|
||||||
|
|
||||||
|
# Set basic logging configuration for log file
|
||||||
|
fh = logging.FileHandler(CURRENT_PATH + '/' +
|
||||||
|
'regression.log', 'w', 'utf-8')
|
||||||
|
fh.setLevel(logging.DEBUG)
|
||||||
|
fh.setFormatter(logging.Formatter('[%(thread)d] ' +
|
||||||
|
config.FILE_LOG_FORMAT))
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
logger.addHandler(fh)
|
||||||
|
|
||||||
|
# Create logger to write log in the logger file as well as on console
|
||||||
|
stderr_logger = logging.getLogger('STDERR')
|
||||||
|
sys.stderr = StreamToLogger(stderr_logger, logging.ERROR)
|
||||||
|
args = vars(add_arguments())
|
||||||
|
# Get test module list
|
||||||
|
try:
|
||||||
|
test_module_list = get_test_modules(args)
|
||||||
|
except Exception as e:
|
||||||
|
print(str(e))
|
||||||
|
sys.exit(1)
|
||||||
|
# Login the test client
|
||||||
|
test_utils.login_tester_account(test_client)
|
||||||
|
|
||||||
|
servers_info = test_utils.get_config_data()
|
||||||
|
node_name = "all"
|
||||||
|
if args['pkg'] is not None:
|
||||||
|
node_name = args['pkg'].split('.')[-1]
|
||||||
|
|
||||||
|
# Start coverage
|
||||||
|
if test_utils.is_coverage_enabled(args):
|
||||||
|
cov = coverage.Coverage(config_file=COVERAGE_CONFIG_FILE)
|
||||||
|
cov.start()
|
||||||
|
|
||||||
|
# Check if feature tests included & parallel tests switch passed
|
||||||
|
if test_utils.is_feature_test_included(args) and \
|
||||||
|
test_utils.is_parallel_ui_tests(args):
|
||||||
|
|
||||||
|
# Get selenium config dict
|
||||||
|
selenoid_config = test_setup.config_data['selenoid_config']
|
||||||
|
|
||||||
|
# Set DEFAULT_SERVER value
|
||||||
|
default_server = selenoid_config['pgAdmin_default_server']
|
||||||
|
os.environ["PGADMIN_CONFIG_DEFAULT_SERVER"] = str(default_server)
|
||||||
|
config.DEFAULT_SERVER = str(default_server)
|
||||||
|
|
||||||
|
# Get hub url
|
||||||
|
hub_url = selenoid_config['selenoid_url']
|
||||||
|
|
||||||
|
# Get selenium grid status & list of available browser out passed
|
||||||
|
selenium_grid_status, list_of_browsers \
|
||||||
|
= test_utils.get_selenium_grid_status_and_browser_list(hub_url)
|
||||||
|
|
||||||
|
# Execute tests if selenium-grid is up
|
||||||
|
if selenium_grid_status and len(list_of_browsers) > 0:
|
||||||
|
app_starter_local = None
|
||||||
|
# run across browsers
|
||||||
|
for browser_info in list_of_browsers:
|
||||||
|
try:
|
||||||
|
# browser info
|
||||||
|
browser_name, browser_version = \
|
||||||
|
test_utils.get_browser_details(browser_info, hub_url)
|
||||||
|
|
||||||
|
# tests lists can be executed in parallel & sequentially
|
||||||
|
parallel_tests, sequential_tests = \
|
||||||
|
test_utils.get_parallel_sequential_module_list(
|
||||||
|
test_module_list)
|
||||||
|
|
||||||
|
# Print test summary
|
||||||
|
test_utils.print_test_summary(test_module_list,
|
||||||
|
parallel_tests,
|
||||||
|
sequential_tests,
|
||||||
|
browser_name,
|
||||||
|
browser_version)
|
||||||
|
|
||||||
|
# Create app form source code
|
||||||
|
app_starter_local = AppStarter(None, config)
|
||||||
|
client_url = app_starter_local.start_app()
|
||||||
|
|
||||||
|
# Running Parallel tests
|
||||||
|
if len(parallel_tests) > 0:
|
||||||
|
parallel_sessions = int(selenoid_config[
|
||||||
|
'max_parallel_sessions'])
|
||||||
|
|
||||||
|
run_parallel_tests(client_url, servers_info,
|
||||||
|
parallel_tests, browser_name,
|
||||||
|
browser_version, parallel_sessions)
|
||||||
|
|
||||||
|
# Wait till all threads started in parallel are finished
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
if threading.activeCount() <= 1:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
time.sleep(10)
|
||||||
|
except Exception as e:
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
|
print(str(e))
|
||||||
|
|
||||||
|
# Sequential Tests
|
||||||
|
if len(sequential_tests) > 0:
|
||||||
|
run_sequential_tests(client_url, servers_info,
|
||||||
|
sequential_tests, browser_name,
|
||||||
|
browser_version)
|
||||||
|
|
||||||
|
# Clean up environment
|
||||||
|
if app_starter_local:
|
||||||
|
app_starter_local.stop_app()
|
||||||
|
|
||||||
|
except SystemExit:
|
||||||
|
if app_starter_local:
|
||||||
|
app_starter_local.stop_app()
|
||||||
|
if handle_cleanup:
|
||||||
|
handle_cleanup()
|
||||||
|
# Pause before printing result in order not to mix output
|
||||||
|
time.sleep(5)
|
||||||
|
# Print note for completion of execution in a browser.
|
||||||
|
print(
|
||||||
|
"\n============= Test execution with {0} is "
|
||||||
|
"completed.=============".format(browser_name),
|
||||||
|
file=sys.stderr)
|
||||||
|
print_test_results()
|
||||||
|
del os.environ["PGADMIN_CONFIG_DEFAULT_SERVER"]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
for server in servers_info:
|
||||||
|
thread = threading.Thread(target=execute_test, args=(
|
||||||
|
test_module_list, server, driver))
|
||||||
|
thread.start()
|
||||||
|
thread.join()
|
||||||
|
except SystemExit:
|
||||||
|
if handle_cleanup:
|
||||||
|
handle_cleanup()
|
||||||
|
print_test_results()
|
||||||
|
|
||||||
# Stop code coverage
|
# Stop code coverage
|
||||||
if test_utils.is_coverage_enabled(args):
|
if test_utils.is_coverage_enabled(args):
|
||||||
cov.stop()
|
cov.stop()
|
||||||
cov.save()
|
cov.save()
|
||||||
|
|
||||||
# # Print coverage only if coverage args given in command line
|
# Print coverage only if coverage args given in command line
|
||||||
if test_utils.is_coverage_enabled(args):
|
if test_utils.is_coverage_enabled(args):
|
||||||
test_utils.print_and_store_coverage_report(cov)
|
test_utils.print_and_store_coverage_report(cov)
|
||||||
|
|
||||||
|
@ -54,6 +54,15 @@
|
|||||||
"key_file": ""
|
"key_file": ""
|
||||||
}
|
}
|
||||||
}],
|
}],
|
||||||
|
"selenoid_config": {
|
||||||
|
"pgAdmin_default_server":"IP address of machine where source code is going to be executed",
|
||||||
|
"max_parallel_sessions": "3",
|
||||||
|
"selenoid_url": "http://<IP address of Selenoid Installed machine>:4444/wd/hub",
|
||||||
|
"browsers_list":[
|
||||||
|
{"name": "Chrome", "version": null},
|
||||||
|
{"name": "Firefox", "version": null}
|
||||||
|
]
|
||||||
|
},
|
||||||
"server_group": 1,
|
"server_group": 1,
|
||||||
"server_credentials": [
|
"server_credentials": [
|
||||||
{
|
{
|
||||||
|
Loading…
Reference in New Issue
Block a user