2014-12-18 11:49:09 -06:00
|
|
|
##########################################################################
|
|
|
|
#
|
|
|
|
# pgAdmin 4 - PostgreSQL Tools
|
|
|
|
#
|
2024-01-01 02:43:48 -06:00
|
|
|
# Copyright (C) 2013 - 2024, The pgAdmin Development Team
|
2014-12-18 11:49:09 -06:00
|
|
|
# This software is released under the PostgreSQL Licence
|
|
|
|
#
|
|
|
|
##########################################################################
|
|
|
|
|
2015-01-21 06:00:13 -06:00
|
|
|
"""The main pgAdmin module. This handles the application initialisation tasks,
|
|
|
|
such as setup of logging, dynamic loading of modules etc."""
|
2016-06-21 08:12:14 -05:00
|
|
|
import logging
|
2018-01-26 10:54:21 -06:00
|
|
|
import os
|
|
|
|
import sys
|
2020-10-20 06:44:45 -05:00
|
|
|
import re
|
2020-11-09 01:05:19 -06:00
|
|
|
import ipaddress
|
2021-06-08 09:11:47 -05:00
|
|
|
import traceback
|
2023-02-01 06:22:22 -06:00
|
|
|
import shutil
|
2021-06-08 09:11:47 -05:00
|
|
|
|
2019-02-06 07:17:52 -06:00
|
|
|
from types import MethodType
|
2015-06-29 02:54:05 -05:00
|
|
|
from collections import defaultdict
|
2016-06-21 08:12:14 -05:00
|
|
|
from importlib import import_module
|
|
|
|
|
2017-07-25 04:15:18 -05:00
|
|
|
from flask import Flask, abort, request, current_app, session, url_for
|
2021-05-25 09:42:57 -05:00
|
|
|
from flask_socketio import SocketIO
|
2019-02-04 04:31:47 -06:00
|
|
|
from werkzeug.exceptions import HTTPException
|
2021-11-24 05:52:57 -06:00
|
|
|
from flask_babel import Babel, gettext
|
|
|
|
from flask_babel import gettext as _
|
2017-09-18 08:39:43 -05:00
|
|
|
from flask_login import user_logged_in, user_logged_out
|
2015-01-22 09:56:23 -06:00
|
|
|
from flask_mail import Mail
|
2018-03-08 03:33:43 -06:00
|
|
|
from flask_paranoid import Paranoid
|
|
|
|
from flask_security import Security, SQLAlchemyUserDatastore, current_user
|
2019-05-28 01:30:18 -05:00
|
|
|
from flask_security.utils import login_user, logout_user
|
2023-11-03 07:49:01 -05:00
|
|
|
from flask_migrate import Migrate
|
2017-01-30 05:25:02 -06:00
|
|
|
from werkzeug.datastructures import ImmutableDict
|
2016-06-21 08:12:14 -05:00
|
|
|
from werkzeug.local import LocalProxy
|
|
|
|
from werkzeug.utils import find_modules
|
2022-03-25 01:22:11 -05:00
|
|
|
from jinja2 import select_autoescape
|
2016-03-22 10:05:43 -05:00
|
|
|
|
2020-09-03 02:29:28 -05:00
|
|
|
from pgadmin.model import db, Role, Server, SharedServer, ServerGroup, \
|
2017-08-29 09:03:02 -05:00
|
|
|
User, Keys, Version, SCHEMA_VERSION as CURRENT_SCHEMA_VERSION
|
2023-01-19 04:27:02 -06:00
|
|
|
from pgadmin.utils import PgAdminModule, driver, KeyManager, heartbeat
|
2018-03-08 03:33:43 -06:00
|
|
|
from pgadmin.utils.preferences import Preferences
|
|
|
|
from pgadmin.utils.session import create_session_interface, pga_unauthorised
|
|
|
|
from pgadmin.utils.versioned_template_loader import VersionedTemplateLoader
|
2021-06-08 09:11:47 -05:00
|
|
|
from datetime import timedelta, datetime
|
|
|
|
from pgadmin.setup import get_version, set_version, check_db_tables
|
2020-10-20 06:44:45 -05:00
|
|
|
from pgadmin.utils.ajax import internal_server_error, make_json_response
|
2019-05-28 00:29:51 -05:00
|
|
|
from pgadmin.utils.csrf import pgCSRFProtect
|
2020-04-06 05:27:05 -05:00
|
|
|
from pgadmin import authenticate
|
2020-10-20 06:44:45 -05:00
|
|
|
from pgadmin.utils.security_headers import SecurityHeaders
|
2021-10-12 04:22:30 -05:00
|
|
|
from pgadmin.utils.constants import KERBEROS, OAUTH2, INTERNAL, LDAP, WEBSERVER
|
2024-02-22 05:09:25 -06:00
|
|
|
from jsonformatter import JsonFormatter
|
2023-01-19 04:27:02 -06:00
|
|
|
|
2020-11-23 00:16:09 -06:00
|
|
|
# Explicitly set the mime-types so that a corrupted windows registry will not
|
|
|
|
# affect pgAdmin 4 to be load properly. This will avoid the issues that may
|
|
|
|
# occur due to security fix of X_CONTENT_TYPE_OPTIONS = "nosniff".
|
|
|
|
import mimetypes
|
2021-07-06 02:52:58 -05:00
|
|
|
|
2020-12-14 01:04:19 -06:00
|
|
|
mimetypes.add_type('application/javascript', '.js')
|
2020-11-23 00:16:09 -06:00
|
|
|
mimetypes.add_type('text/css', '.css')
|
|
|
|
|
2021-05-25 09:42:57 -05:00
|
|
|
|
2016-07-09 02:51:47 -05:00
|
|
|
winreg = None
|
2020-04-30 06:52:48 -05:00
|
|
|
if os.name == 'nt':
|
|
|
|
import winreg
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2021-06-14 01:15:54 -05:00
|
|
|
socketio = SocketIO(manage_session=False, async_mode='threading',
|
2021-05-25 09:42:57 -05:00
|
|
|
logger=False, engineio_logger=False, debug=False,
|
|
|
|
ping_interval=25, ping_timeout=120)
|
|
|
|
|
2023-07-10 00:06:15 -05:00
|
|
|
_INDEX_PATH = 'browser.index'
|
|
|
|
|
2017-01-30 05:25:02 -06:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
class PgAdmin(Flask):
|
2017-01-30 05:25:02 -06:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
# Set the template loader to a postgres-version-aware loader
|
|
|
|
self.jinja_options = ImmutableDict(
|
2022-03-25 01:22:11 -05:00
|
|
|
autoescape=select_autoescape(enabled_extensions=('html', 'xml')),
|
2017-01-30 05:25:02 -06:00
|
|
|
loader=VersionedTemplateLoader(self)
|
|
|
|
)
|
2019-02-06 07:17:52 -06:00
|
|
|
self.logout_hooks = []
|
2023-11-03 07:49:01 -05:00
|
|
|
self.before_app_start = []
|
2019-02-06 07:17:52 -06:00
|
|
|
|
2022-11-18 22:43:41 -06:00
|
|
|
super().__init__(*args, **kwargs)
|
2017-01-30 05:25:02 -06:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
def find_submodules(self, basemodule):
|
2021-11-24 05:52:57 -06:00
|
|
|
try:
|
|
|
|
for module_name in find_modules(basemodule, True):
|
|
|
|
if module_name in self.config['MODULE_BLACKLIST']:
|
|
|
|
self.logger.info(
|
|
|
|
'Skipping blacklisted module: %s' % module_name
|
|
|
|
)
|
|
|
|
continue
|
2015-10-20 02:03:18 -05:00
|
|
|
self.logger.info(
|
2021-11-24 05:52:57 -06:00
|
|
|
'Examining potential module: %s' % module_name)
|
|
|
|
module = import_module(module_name)
|
|
|
|
for key in list(module.__dict__.keys()):
|
|
|
|
if isinstance(module.__dict__[key], PgAdminModule):
|
|
|
|
yield module.__dict__[key]
|
2022-01-12 03:23:19 -06:00
|
|
|
except Exception:
|
2021-11-24 05:52:57 -06:00
|
|
|
return []
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-06-29 02:54:05 -05:00
|
|
|
@property
|
|
|
|
def submodules(self):
|
|
|
|
for blueprint in self.blueprints.values():
|
|
|
|
if isinstance(blueprint, PgAdminModule):
|
|
|
|
yield blueprint
|
|
|
|
|
2016-05-10 05:37:45 -05:00
|
|
|
@property
|
|
|
|
def messages(self):
|
|
|
|
messages = dict()
|
|
|
|
for module in self.submodules:
|
|
|
|
messages.update(getattr(module, "messages", dict()))
|
|
|
|
return messages
|
|
|
|
|
2017-06-12 01:31:22 -05:00
|
|
|
@property
|
|
|
|
def exposed_endpoint_url_map(self):
|
2017-07-25 04:15:18 -05:00
|
|
|
#############################################################
|
|
|
|
# To handle WSGI paths
|
|
|
|
# If user has setup application under WSGI alias
|
|
|
|
# like 'localhost/pgadmin4' then we have to append '/pgadmin4'
|
|
|
|
# into endpoints
|
|
|
|
#############################################################
|
2022-11-15 02:21:12 -06:00
|
|
|
wsgi_root_path = ''
|
2023-07-10 00:06:15 -05:00
|
|
|
if url_for(_INDEX_PATH) != '/browser/':
|
|
|
|
wsgi_root_path = url_for(_INDEX_PATH).replace(
|
2022-11-14 07:28:36 -06:00
|
|
|
'/browser/', ''
|
|
|
|
)
|
2017-07-25 04:15:18 -05:00
|
|
|
|
|
|
|
def get_full_url_path(url):
|
|
|
|
"""
|
|
|
|
Generate endpoint URL at per WSGI alias
|
|
|
|
"""
|
2022-11-15 02:21:12 -06:00
|
|
|
return wsgi_root_path + url
|
2017-07-25 04:15:18 -05:00
|
|
|
|
|
|
|
# Fetch all endpoints and their respective url
|
2017-06-12 01:31:22 -05:00
|
|
|
for rule in current_app.url_map.iter_rules('static'):
|
2017-07-25 04:15:18 -05:00
|
|
|
yield rule.endpoint, get_full_url_path(rule.rule)
|
2017-06-12 01:31:22 -05:00
|
|
|
|
|
|
|
for module in self.submodules:
|
|
|
|
for endpoint in module.exposed_endpoints:
|
|
|
|
for rule in current_app.url_map.iter_rules(endpoint):
|
2017-07-25 04:15:18 -05:00
|
|
|
yield rule.endpoint, get_full_url_path(rule.rule)
|
2017-06-12 01:31:22 -05:00
|
|
|
|
2022-11-15 02:21:12 -06:00
|
|
|
yield 'pgadmin.root', wsgi_root_path
|
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
@property
|
|
|
|
def menu_items(self):
|
|
|
|
from operator import attrgetter
|
|
|
|
|
|
|
|
menu_items = defaultdict(list)
|
|
|
|
for module in self.submodules:
|
|
|
|
for key, value in module.menu_items.items():
|
|
|
|
menu_items[key].extend(value)
|
2016-01-27 08:59:54 -06:00
|
|
|
menu_items = dict((key, sorted(value, key=attrgetter('priority')))
|
|
|
|
for key, value in menu_items.items())
|
2015-06-30 00:51:55 -05:00
|
|
|
return menu_items
|
|
|
|
|
2019-02-06 07:17:52 -06:00
|
|
|
def register_logout_hook(self, module):
|
|
|
|
if hasattr(module, 'on_logout') and \
|
2020-08-31 06:15:31 -05:00
|
|
|
isinstance(getattr(module, 'on_logout'), MethodType):
|
2019-02-06 07:17:52 -06:00
|
|
|
self.logout_hooks.append(module)
|
|
|
|
|
2023-11-03 07:49:01 -05:00
|
|
|
def register_before_app_start(self, callback):
|
|
|
|
self.before_app_start.append(callback)
|
|
|
|
|
|
|
|
def run_before_app_start(self):
|
|
|
|
# call before app starts or is exported
|
|
|
|
with self.app_context(), self.test_request_context():
|
|
|
|
for callback in self.before_app_start:
|
|
|
|
callback()
|
|
|
|
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
def _find_blueprint():
|
|
|
|
if request.blueprint:
|
|
|
|
return current_app.blueprints[request.blueprint]
|
|
|
|
|
2016-06-21 08:21:06 -05:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
current_blueprint = LocalProxy(_find_blueprint)
|
|
|
|
|
2015-01-27 08:18:27 -06:00
|
|
|
|
Resolved quite a few file-system encoding/decoding related cases.
In order to resolve the non-ascii characters in path (in user directory,
storage path, etc) on windows, we have converted the path into the
short-path, so that - we don't need to deal with the encoding issues
(specially with Python 2).
We've resolved majority of the issues with this patch.
We still need couple issues to resolve after this in the same area.
TODO
* Add better support for non-ascii characters in the database name on
windows with Python 3
* Improve the messages created after the background processes by
different modules (such as Backup, Restore, Import/Export, etc.),
which does not show short-paths, and xml representable characters for
non-ascii characters, when found in the database objects, and the file
PATH.
Fixes #2174, #1797, #2166, #1940
Initial patch by: Surinder Kumar
Reviewed by: Murtuza Zabuawala
2017-03-07 04:00:57 -06:00
|
|
|
def create_app(app_name=None):
|
|
|
|
# Configuration settings
|
|
|
|
import config
|
|
|
|
if not app_name:
|
|
|
|
app_name = config.APP_NAME
|
|
|
|
|
2020-06-01 00:52:38 -05:00
|
|
|
# Check if app is created for CLI operations or Web
|
|
|
|
cli_mode = False
|
|
|
|
if app_name.endswith('-cli'):
|
|
|
|
cli_mode = True
|
|
|
|
|
2017-11-30 05:16:38 -06:00
|
|
|
# Only enable password related functionality in server mode.
|
|
|
|
if config.SERVER_MODE is True:
|
|
|
|
# Some times we need to access these config params where application
|
|
|
|
# context is not available (we can't use current_app.config in those
|
|
|
|
# cases even with current_app.app_context())
|
|
|
|
# So update these params in config itself.
|
|
|
|
# And also these updated config values will picked up by application
|
|
|
|
# since we are updating config before the application instance is
|
|
|
|
# created.
|
|
|
|
|
|
|
|
config.SECURITY_RECOVERABLE = True
|
|
|
|
config.SECURITY_CHANGEABLE = True
|
2022-09-08 04:46:48 -05:00
|
|
|
# Now we'll open change password page in dialog
|
2017-11-30 05:16:38 -06:00
|
|
|
# we don't want it to redirect to main page after password
|
|
|
|
# change operation so we will open the same password change page again.
|
|
|
|
config.SECURITY_POST_CHANGE_VIEW = 'browser.change_password'
|
|
|
|
|
2015-01-21 06:00:13 -06:00
|
|
|
"""Create the Flask application, startup logging and dynamically load
|
|
|
|
additional modules (blueprints) that are found in this directory."""
|
2015-06-29 01:58:41 -05:00
|
|
|
app = PgAdmin(__name__, static_url_path='/static')
|
2015-12-21 23:13:24 -06:00
|
|
|
# Removes unwanted whitespace from render_template function
|
|
|
|
app.jinja_env.trim_blocks = True
|
2014-12-18 11:49:09 -06:00
|
|
|
app.config.from_object(config)
|
2016-08-29 01:22:50 -05:00
|
|
|
app.config.update(dict(PROPAGATE_EXCEPTIONS=True))
|
2014-12-18 11:49:09 -06:00
|
|
|
|
2023-12-21 00:37:26 -06:00
|
|
|
config.SETTINGS_SCHEMA_VERSION = CURRENT_SCHEMA_VERSION
|
2014-12-18 11:49:09 -06:00
|
|
|
##########################################################################
|
|
|
|
# Setup logging and log the application startup
|
|
|
|
##########################################################################
|
|
|
|
|
2019-02-28 07:22:12 -06:00
|
|
|
# We won't care about errors in the logging system, we are more
|
|
|
|
# interested in application errors.
|
2019-03-25 01:06:18 -05:00
|
|
|
logging.raiseExceptions = False
|
2019-02-28 07:22:12 -06:00
|
|
|
|
2014-12-18 11:49:09 -06:00
|
|
|
# Add SQL level logging, and set the base logging level
|
|
|
|
logging.addLevelName(25, 'SQL')
|
|
|
|
app.logger.setLevel(logging.DEBUG)
|
2014-12-18 11:56:17 -06:00
|
|
|
app.logger.handlers = []
|
2014-12-18 11:49:09 -06:00
|
|
|
|
2015-10-20 02:03:18 -05:00
|
|
|
# We also need to update the handler on the webserver in order to see
|
|
|
|
# request. Setting the level prevents werkzeug from setting up it's own
|
|
|
|
# stream handler thus ensuring all the logging goes through the pgAdmin
|
|
|
|
# logger.
|
2014-12-18 11:49:09 -06:00
|
|
|
logger = logging.getLogger('werkzeug')
|
2019-08-08 09:36:22 -05:00
|
|
|
logger.setLevel(config.CONSOLE_LOG_LEVEL)
|
2014-12-18 11:49:09 -06:00
|
|
|
|
2017-06-16 04:17:38 -05:00
|
|
|
# Set SQLITE_PATH to TEST_SQLITE_PATH while running test cases
|
2018-03-08 03:33:43 -06:00
|
|
|
if (
|
|
|
|
'PGADMIN_TESTING_MODE' in os.environ and
|
|
|
|
os.environ['PGADMIN_TESTING_MODE'] == '1'
|
|
|
|
):
|
2017-06-16 04:17:38 -05:00
|
|
|
config.SQLITE_PATH = config.TEST_SQLITE_PATH
|
2019-05-31 09:19:04 -05:00
|
|
|
config.MASTER_PASSWORD_REQUIRED = False
|
2019-06-03 10:35:59 -05:00
|
|
|
config.UPGRADE_CHECK_ENABLED = False
|
2017-06-16 04:17:38 -05:00
|
|
|
|
2020-06-01 00:52:38 -05:00
|
|
|
if not cli_mode:
|
|
|
|
# Ensure the various working directories exist
|
|
|
|
from pgadmin.setup import create_app_data_directory
|
|
|
|
create_app_data_directory(config)
|
2017-05-15 09:59:47 -05:00
|
|
|
|
2020-06-01 00:52:38 -05:00
|
|
|
# File logging
|
2021-05-25 09:48:46 -05:00
|
|
|
from pgadmin.utils.enhanced_log_rotation import \
|
|
|
|
EnhancedRotatingFileHandler
|
|
|
|
fh = EnhancedRotatingFileHandler(config.LOG_FILE,
|
|
|
|
config.LOG_ROTATION_SIZE,
|
|
|
|
config.LOG_ROTATION_AGE,
|
|
|
|
config.LOG_ROTATION_MAX_LOG_FILES)
|
|
|
|
|
2020-06-01 00:52:38 -05:00
|
|
|
fh.setLevel(config.FILE_LOG_LEVEL)
|
2024-02-22 05:09:25 -06:00
|
|
|
|
|
|
|
if config.JSON_LOGGER:
|
|
|
|
json_formatter = JsonFormatter(config.FILE_LOG_FORMAT_JSON)
|
|
|
|
fh.setFormatter(json_formatter)
|
|
|
|
else:
|
|
|
|
fh.setFormatter(logging.Formatter(config.FILE_LOG_FORMAT))
|
|
|
|
|
2020-06-01 00:52:38 -05:00
|
|
|
app.logger.addHandler(fh)
|
|
|
|
logger.addHandler(fh)
|
2014-12-18 11:49:09 -06:00
|
|
|
|
|
|
|
# Console logging
|
2019-03-19 10:55:34 -05:00
|
|
|
ch = logging.StreamHandler()
|
2014-12-18 11:49:09 -06:00
|
|
|
ch.setLevel(config.CONSOLE_LOG_LEVEL)
|
2024-02-22 05:09:25 -06:00
|
|
|
|
|
|
|
if config.JSON_LOGGER:
|
|
|
|
json_formatter = JsonFormatter(config.CONSOLE_LOG_FORMAT_JSON)
|
|
|
|
ch.setFormatter(json_formatter)
|
|
|
|
else:
|
|
|
|
ch.setFormatter(logging.Formatter(config.CONSOLE_LOG_FORMAT))
|
|
|
|
|
2014-12-18 11:49:09 -06:00
|
|
|
app.logger.addHandler(ch)
|
|
|
|
logger.addHandler(ch)
|
|
|
|
|
|
|
|
# Log the startup
|
2015-10-20 02:03:18 -05:00
|
|
|
app.logger.info('########################################################')
|
2014-12-18 11:49:09 -06:00
|
|
|
app.logger.info('Starting %s v%s...', config.APP_NAME, config.APP_VERSION)
|
2015-10-20 02:03:18 -05:00
|
|
|
app.logger.info('########################################################')
|
2015-03-10 08:09:11 -05:00
|
|
|
app.logger.debug("Python syspath: %s", sys.path)
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-02-25 11:06:00 -06:00
|
|
|
##########################################################################
|
|
|
|
# Setup i18n
|
|
|
|
##########################################################################
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-02-25 11:06:00 -06:00
|
|
|
# Initialise i18n
|
|
|
|
babel = Babel(app)
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-02-25 11:06:00 -06:00
|
|
|
def get_locale():
|
2017-03-24 09:20:10 -05:00
|
|
|
"""Get the language for the user."""
|
|
|
|
language = 'en'
|
|
|
|
if config.SERVER_MODE is False:
|
|
|
|
# Get the user language preference from the miscellaneous module
|
2021-08-19 04:36:16 -05:00
|
|
|
user_id = None
|
2021-12-03 01:01:59 -06:00
|
|
|
if current_user and current_user.is_authenticated:
|
2017-10-30 07:50:25 -05:00
|
|
|
user_id = current_user.id
|
|
|
|
else:
|
2021-06-06 03:28:06 -05:00
|
|
|
user = user_datastore.find_user(email=config.DESKTOP_USER)
|
2017-10-30 07:50:25 -05:00
|
|
|
if user is not None:
|
|
|
|
user_id = user.id
|
|
|
|
user_language = Preferences.raw_value(
|
2019-12-05 03:04:44 -06:00
|
|
|
'misc', 'user_language', 'user_language', user_id
|
2017-10-30 07:50:25 -05:00
|
|
|
)
|
|
|
|
if user_language is not None:
|
|
|
|
language = user_language
|
2017-03-24 09:20:10 -05:00
|
|
|
else:
|
|
|
|
# If language is available in get request then return the same
|
|
|
|
# otherwise check the session or cookie
|
|
|
|
data = request.form
|
|
|
|
if 'language' in data:
|
|
|
|
language = data['language'] or language
|
|
|
|
setattr(session, 'PGADMIN_LANGUAGE', language)
|
|
|
|
elif hasattr(session, 'PGADMIN_LANGUAGE'):
|
|
|
|
language = getattr(session, 'PGADMIN_LANGUAGE', language)
|
|
|
|
elif hasattr(request.cookies, 'PGADMIN_LANGUAGE'):
|
2018-01-26 10:54:21 -06:00
|
|
|
language = getattr(
|
|
|
|
request.cookies, 'PGADMIN_LANGUAGE', language
|
|
|
|
)
|
2017-03-24 09:20:10 -05:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
return language
|
2015-02-25 11:06:00 -06:00
|
|
|
|
Update SQLAlchemy, Flask, Flask-SQLAlchemy, and other packages to current versions. #5901
- Update Flask, Flask-SQLAlchemy, Flask-Babel, Flask-Security-Too, Flask-SocketIO, pytz, psutil, SQLAlchemy, bcrypt, cryptography, eventlet, Authlib, requests python packages
- Remove pinned dnspython, Werkzeug packages from requirements.txt
2023-03-15 01:27:16 -05:00
|
|
|
babel.init_app(app, locale_selector=get_locale)
|
2015-01-22 09:56:23 -06:00
|
|
|
##########################################################################
|
|
|
|
# Setup authentication
|
|
|
|
##########################################################################
|
2022-10-20 05:48:41 -05:00
|
|
|
if config.CONFIG_DATABASE_URI is not None and \
|
|
|
|
len(config.CONFIG_DATABASE_URI) > 0:
|
|
|
|
app.config['SQLALCHEMY_DATABASE_URI'] = config.CONFIG_DATABASE_URI
|
|
|
|
else:
|
|
|
|
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{0}?timeout={1}' \
|
|
|
|
.format(config.SQLITE_PATH.replace('\\', '/'),
|
|
|
|
getattr(config, 'SQLITE_TIMEOUT', 500)
|
|
|
|
)
|
2015-01-26 09:20:28 -06:00
|
|
|
|
2020-09-11 00:56:47 -05:00
|
|
|
# Override USER_DOES_NOT_EXIST and INVALID_PASSWORD messages from flask.
|
|
|
|
app.config['SECURITY_MSG_USER_DOES_NOT_EXIST'] = \
|
|
|
|
app.config['SECURITY_MSG_INVALID_PASSWORD'] = \
|
|
|
|
(gettext("Incorrect username or password."), "error")
|
2023-02-01 02:54:49 -06:00
|
|
|
app.config['SECURITY_PASSWORD_LENGTH_MIN'] = config.PASSWORD_LENGTH_MIN
|
2020-09-11 00:56:47 -05:00
|
|
|
|
2015-01-22 09:56:23 -06:00
|
|
|
# Create database connection object and mailer
|
|
|
|
db.init_app(app)
|
2023-11-03 07:49:01 -05:00
|
|
|
Migrate(app, db)
|
2017-04-23 22:06:55 -05:00
|
|
|
|
|
|
|
##########################################################################
|
|
|
|
# Upgrade the schema (if required)
|
|
|
|
##########################################################################
|
2022-10-20 05:48:41 -05:00
|
|
|
from config import SQLITE_PATH
|
|
|
|
from pgadmin.setup import db_upgrade
|
|
|
|
|
2021-06-08 09:11:47 -05:00
|
|
|
def backup_db_file():
|
|
|
|
"""
|
|
|
|
Create a backup of the current database file
|
|
|
|
and create new database file with default settings.
|
|
|
|
"""
|
|
|
|
backup_file_name = "{0}.{1}".format(
|
|
|
|
SQLITE_PATH, datetime.now().strftime('%Y%m%d%H%M%S'))
|
|
|
|
os.rename(SQLITE_PATH, backup_file_name)
|
|
|
|
app.logger.error('Exception in database migration.')
|
|
|
|
app.logger.info('Creating new database file.')
|
|
|
|
try:
|
|
|
|
db_upgrade(app)
|
|
|
|
os.environ[
|
|
|
|
'CORRUPTED_DB_BACKUP_FILE'] = backup_file_name
|
|
|
|
app.logger.info('Database migration completed.')
|
2022-01-12 03:23:19 -06:00
|
|
|
except Exception:
|
2021-06-08 09:11:47 -05:00
|
|
|
app.logger.error('Database migration failed')
|
|
|
|
app.logger.error(traceback.format_exc())
|
|
|
|
raise RuntimeError('Migration failed')
|
|
|
|
|
|
|
|
def upgrade_db():
|
|
|
|
"""
|
|
|
|
Execute the migrations.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
db_upgrade(app)
|
|
|
|
os.environ['CORRUPTED_DB_BACKUP_FILE'] = ''
|
2022-01-12 03:23:19 -06:00
|
|
|
except Exception:
|
2023-01-30 04:08:36 -06:00
|
|
|
app.logger.error('Database migration failed')
|
|
|
|
app.logger.error(traceback.format_exc())
|
2021-06-08 09:11:47 -05:00
|
|
|
backup_db_file()
|
|
|
|
|
|
|
|
# check all tables are present in the db.
|
|
|
|
is_db_error, invalid_tb_names = check_db_tables()
|
|
|
|
if is_db_error:
|
|
|
|
app.logger.error(
|
|
|
|
'Table(s) {0} are missing in the'
|
|
|
|
' database'.format(invalid_tb_names))
|
|
|
|
backup_db_file()
|
|
|
|
|
2022-10-20 05:48:41 -05:00
|
|
|
def run_migration_for_sqlite():
|
|
|
|
with app.app_context():
|
|
|
|
# Run migration for the first time i.e. create database
|
|
|
|
# If version not available, user must have aborted. Tables are not
|
|
|
|
# created and so its an empty db
|
|
|
|
if not os.path.exists(SQLITE_PATH) or get_version() == -1:
|
|
|
|
# If running in cli mode then don't try to upgrade, just raise
|
|
|
|
# the exception
|
|
|
|
if not cli_mode:
|
|
|
|
upgrade_db()
|
|
|
|
else:
|
|
|
|
if not os.path.exists(SQLITE_PATH):
|
|
|
|
raise FileNotFoundError(
|
|
|
|
'SQLite database file "' + SQLITE_PATH +
|
|
|
|
'" does not exists.')
|
|
|
|
raise RuntimeError(
|
|
|
|
'The configuration database file is not valid.')
|
2021-06-08 09:11:47 -05:00
|
|
|
else:
|
2022-10-20 05:48:41 -05:00
|
|
|
schema_version = get_version()
|
|
|
|
|
|
|
|
# Run migration if current schema version is greater than the
|
|
|
|
# schema version stored in version table
|
2023-02-01 06:22:22 -06:00
|
|
|
if CURRENT_SCHEMA_VERSION > schema_version:
|
|
|
|
# Take a backup of the old database file.
|
|
|
|
try:
|
|
|
|
prev_database_file_name = \
|
|
|
|
"{0}.prev.bak".format(SQLITE_PATH)
|
|
|
|
shutil.copyfile(SQLITE_PATH, prev_database_file_name)
|
|
|
|
except Exception as e:
|
|
|
|
app.logger.error(e)
|
|
|
|
|
2022-10-20 05:48:41 -05:00
|
|
|
upgrade_db()
|
|
|
|
else:
|
|
|
|
# check all tables are present in the db.
|
|
|
|
is_db_error, invalid_tb_names = check_db_tables()
|
|
|
|
if is_db_error:
|
|
|
|
app.logger.error(
|
|
|
|
'Table(s) {0} are missing in the'
|
|
|
|
' database'.format(invalid_tb_names))
|
|
|
|
backup_db_file()
|
|
|
|
|
|
|
|
# Update schema version to the latest
|
|
|
|
if CURRENT_SCHEMA_VERSION > schema_version:
|
|
|
|
set_version(CURRENT_SCHEMA_VERSION)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
if os.name != 'nt':
|
|
|
|
os.chmod(config.SQLITE_PATH, 0o600)
|
|
|
|
|
|
|
|
def run_migration_for_others():
|
|
|
|
with app.app_context():
|
|
|
|
# Run migration for the first time i.e. create database
|
|
|
|
# If version not available, user must have aborted. Tables are not
|
|
|
|
# created and so its an empty db
|
Update SQLAlchemy, Flask, Flask-SQLAlchemy, and other packages to current versions. #5901
- Update Flask, Flask-SQLAlchemy, Flask-Babel, Flask-Security-Too, Flask-SocketIO, pytz, psutil, SQLAlchemy, bcrypt, cryptography, eventlet, Authlib, requests python packages
- Remove pinned dnspython, Werkzeug packages from requirements.txt
2023-03-15 01:27:16 -05:00
|
|
|
if get_version() == -1:
|
|
|
|
db_upgrade(app)
|
|
|
|
else:
|
|
|
|
schema_version = get_version()
|
|
|
|
|
|
|
|
# Run migration if current schema version is greater than
|
|
|
|
# the schema version stored in version table.
|
|
|
|
if CURRENT_SCHEMA_VERSION > schema_version:
|
2022-10-20 05:48:41 -05:00
|
|
|
db_upgrade(app)
|
Update SQLAlchemy, Flask, Flask-SQLAlchemy, and other packages to current versions. #5901
- Update Flask, Flask-SQLAlchemy, Flask-Babel, Flask-Security-Too, Flask-SocketIO, pytz, psutil, SQLAlchemy, bcrypt, cryptography, eventlet, Authlib, requests python packages
- Remove pinned dnspython, Werkzeug packages from requirements.txt
2023-03-15 01:27:16 -05:00
|
|
|
# Update schema version to the latest
|
|
|
|
set_version(CURRENT_SCHEMA_VERSION)
|
|
|
|
db.session.commit()
|
2022-10-20 05:48:41 -05:00
|
|
|
|
|
|
|
# Run the migration as per specified by the user.
|
|
|
|
if config.CONFIG_DATABASE_URI is not None and \
|
|
|
|
len(config.CONFIG_DATABASE_URI) > 0:
|
|
|
|
run_migration_for_others()
|
|
|
|
else:
|
|
|
|
run_migration_for_sqlite()
|
2019-04-17 10:57:34 -05:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
Mail(app)
|
2015-01-22 09:56:23 -06:00
|
|
|
|
2020-06-01 00:52:38 -05:00
|
|
|
# Don't bother paths when running in cli mode
|
|
|
|
if not cli_mode:
|
2022-11-18 22:43:41 -06:00
|
|
|
from pgadmin.utils import paths
|
2022-09-09 08:06:51 -05:00
|
|
|
paths.init_app()
|
2016-05-12 13:34:28 -05:00
|
|
|
|
2015-01-22 09:56:23 -06:00
|
|
|
# Setup Flask-Security
|
|
|
|
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
|
2016-10-19 03:22:38 -05:00
|
|
|
security = Security(None, user_datastore)
|
2015-07-22 11:42:39 -05:00
|
|
|
|
2016-10-19 03:22:38 -05:00
|
|
|
##########################################################################
|
|
|
|
# Setup security
|
|
|
|
##########################################################################
|
|
|
|
with app.app_context():
|
2018-01-26 10:54:21 -06:00
|
|
|
config.CSRF_SESSION_KEY = Keys.query.filter_by(
|
|
|
|
name='CSRF_SESSION_KEY').first().value
|
|
|
|
config.SECRET_KEY = Keys.query.filter_by(
|
|
|
|
name='SECRET_KEY').first().value
|
|
|
|
config.SECURITY_PASSWORD_SALT = Keys.query.filter_by(
|
|
|
|
name='SECURITY_PASSWORD_SALT').first().value
|
2016-10-19 03:22:38 -05:00
|
|
|
|
|
|
|
# Update the app.config with proper security keyes for signing CSRF data,
|
|
|
|
# signing cookies, and the SALT for hashing the passwords.
|
2018-01-26 10:54:21 -06:00
|
|
|
app.config.update(dict({
|
|
|
|
'CSRF_SESSION_KEY': config.CSRF_SESSION_KEY,
|
|
|
|
'SECRET_KEY': config.SECRET_KEY,
|
2018-03-19 12:09:19 -05:00
|
|
|
'SECURITY_PASSWORD_SALT': config.SECURITY_PASSWORD_SALT,
|
2019-05-28 00:29:51 -05:00
|
|
|
'SESSION_COOKIE_DOMAIN': config.SESSION_COOKIE_DOMAIN,
|
|
|
|
# CSRF Token expiration till session expires
|
|
|
|
'WTF_CSRF_TIME_LIMIT': getattr(config, 'CSRF_TIME_LIMIT', None),
|
|
|
|
'WTF_CSRF_METHODS': ['GET', 'POST', 'PUT', 'DELETE'],
|
2023-11-08 06:37:32 -06:00
|
|
|
# Disable deliverable check for email addresss
|
2024-04-16 01:53:03 -05:00
|
|
|
'SECURITY_EMAIL_VALIDATOR_ARGS': config.SECURITY_EMAIL_VALIDATOR_ARGS,
|
|
|
|
# Disable CSRF for unauthenticated endpoints
|
|
|
|
'SECURITY_CSRF_IGNORE_UNAUTH_ENDPOINTS': True
|
2018-01-26 10:54:21 -06:00
|
|
|
}))
|
2015-01-22 09:56:23 -06:00
|
|
|
|
2021-07-06 02:52:58 -05:00
|
|
|
app.config.update(dict({
|
|
|
|
'INTERNAL': INTERNAL,
|
|
|
|
'LDAP': LDAP,
|
|
|
|
'KERBEROS': KERBEROS,
|
2021-10-12 04:22:30 -05:00
|
|
|
'OAUTH2': OAUTH2,
|
|
|
|
'WEBSERVER': WEBSERVER
|
2021-07-06 02:52:58 -05:00
|
|
|
}))
|
|
|
|
|
2017-09-18 05:40:09 -05:00
|
|
|
security.init_app(app, user_datastore)
|
2016-10-19 03:22:38 -05:00
|
|
|
|
2024-07-01 02:09:12 -05:00
|
|
|
# Flask-Security-Too > 5.4.* requires custom unauth handeler
|
|
|
|
# to be registeres with it.
|
|
|
|
security.unauthn_handler(pga_unauthorised)
|
2018-02-01 07:29:18 -06:00
|
|
|
|
2018-10-09 05:34:13 -05:00
|
|
|
# Set the permanent session lifetime to the specified value in config file.
|
|
|
|
app.permanent_session_lifetime = timedelta(
|
|
|
|
days=config.SESSION_EXPIRATION_TIME)
|
|
|
|
|
2020-06-01 00:52:38 -05:00
|
|
|
if not cli_mode:
|
|
|
|
app.session_interface = create_session_interface(
|
|
|
|
app, config.SESSION_SKIP_PATHS
|
|
|
|
)
|
2016-10-19 03:22:38 -05:00
|
|
|
|
2017-07-20 12:04:33 -05:00
|
|
|
# Make the Session more secure against XSS & CSRF when running in web mode
|
2019-08-06 03:21:31 -05:00
|
|
|
if config.SERVER_MODE and config.ENHANCED_COOKIE_PROTECTION:
|
2017-07-20 12:04:33 -05:00
|
|
|
paranoid = Paranoid(app)
|
2023-07-10 00:06:15 -05:00
|
|
|
paranoid.redirect_view = _INDEX_PATH
|
2017-07-20 12:04:33 -05:00
|
|
|
|
2016-10-19 03:22:38 -05:00
|
|
|
##########################################################################
|
2016-06-23 10:27:06 -05:00
|
|
|
# Load all available server drivers
|
2016-10-19 03:22:38 -05:00
|
|
|
##########################################################################
|
2016-04-25 05:03:48 -05:00
|
|
|
driver.init_app(app)
|
2020-04-06 05:27:05 -05:00
|
|
|
authenticate.init_app(app)
|
2023-01-19 04:27:02 -06:00
|
|
|
heartbeat.init_app(app)
|
2017-03-24 09:20:10 -05:00
|
|
|
|
|
|
|
##########################################################################
|
|
|
|
# Register language to the preferences after login
|
|
|
|
##########################################################################
|
|
|
|
@user_logged_in.connect_via(app)
|
|
|
|
def register_language(sender, user):
|
|
|
|
# After logged in, set the language in the preferences if we get from
|
|
|
|
# the login page
|
|
|
|
data = request.form
|
|
|
|
if 'language' in data:
|
|
|
|
language = data['language']
|
|
|
|
|
|
|
|
# Set the user language preference
|
2019-12-05 03:04:44 -06:00
|
|
|
misc_preference = Preferences.module('misc')
|
2017-03-24 09:20:10 -05:00
|
|
|
user_languages = misc_preference.preference(
|
|
|
|
'user_language'
|
|
|
|
)
|
|
|
|
|
|
|
|
if user_languages and language:
|
|
|
|
language = user_languages.set(language)
|
|
|
|
|
2016-06-23 10:27:06 -05:00
|
|
|
##########################################################################
|
|
|
|
# Register any local servers we can discover
|
|
|
|
##########################################################################
|
|
|
|
@user_logged_in.connect_via(app)
|
|
|
|
def on_user_logged_in(sender, user):
|
2021-12-13 01:37:37 -06:00
|
|
|
|
|
|
|
# If Auto Discover servers is turned off then return from the
|
|
|
|
# function.
|
|
|
|
if not config.AUTO_DISCOVER_SERVERS:
|
|
|
|
return
|
|
|
|
|
2016-06-23 10:27:06 -05:00
|
|
|
# Keep hold of the user ID
|
|
|
|
user_id = user.id
|
|
|
|
|
|
|
|
# Get the first server group for the user
|
|
|
|
servergroup_id = 1
|
|
|
|
servergroups = ServerGroup.query.filter_by(
|
|
|
|
user_id=user_id
|
|
|
|
).order_by("id")
|
|
|
|
|
2022-10-20 05:48:41 -05:00
|
|
|
if int(servergroups.count()) > 0:
|
2016-06-23 10:27:06 -05:00
|
|
|
servergroup = servergroups.first()
|
|
|
|
servergroup_id = servergroup.id
|
|
|
|
|
|
|
|
'''Add a server to the config database'''
|
2018-03-08 03:33:43 -06:00
|
|
|
|
2018-02-26 09:58:48 -06:00
|
|
|
def add_server(user_id, servergroup_id, name, superuser, port,
|
|
|
|
discovery_id, comment):
|
2018-01-26 10:54:21 -06:00
|
|
|
# Create a server object if needed, and store it.
|
2016-06-23 10:27:06 -05:00
|
|
|
servers = Server.query.filter_by(
|
|
|
|
user_id=user_id,
|
|
|
|
discovery_id=svr_discovery_id
|
|
|
|
).order_by("id")
|
|
|
|
|
2022-10-20 05:48:41 -05:00
|
|
|
if int(servers.count()) > 0:
|
2018-01-26 10:54:21 -06:00
|
|
|
return
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
svr = Server(user_id=user_id,
|
2018-01-26 10:54:21 -06:00
|
|
|
servergroup_id=servergroup_id,
|
|
|
|
name=name,
|
|
|
|
host='localhost',
|
|
|
|
port=port,
|
|
|
|
maintenance_db='postgres',
|
|
|
|
username=superuser,
|
2023-01-23 05:49:59 -06:00
|
|
|
connection_params={'sslmode': 'prefer',
|
|
|
|
'connect_timeout': 10},
|
2022-09-09 08:06:51 -05:00
|
|
|
comment=comment,
|
2018-01-26 10:54:21 -06:00
|
|
|
discovery_id=discovery_id)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
db.session.add(svr)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
# Figure out what servers are present
|
2016-07-09 02:51:47 -05:00
|
|
|
if winreg is not None:
|
2016-09-09 08:50:51 -05:00
|
|
|
arch_keys = set()
|
2016-06-23 10:27:06 -05:00
|
|
|
proc_arch = os.environ['PROCESSOR_ARCHITECTURE'].lower()
|
2016-07-01 09:50:28 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
proc_arch64 = os.environ['PROCESSOR_ARCHITEW6432'].lower()
|
2020-04-08 09:57:09 -05:00
|
|
|
except Exception:
|
2016-07-01 09:50:28 -05:00
|
|
|
proc_arch64 = None
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
if proc_arch == 'x86' and not proc_arch64:
|
2016-09-06 08:06:10 -05:00
|
|
|
arch_keys.add(0)
|
2016-06-23 10:27:06 -05:00
|
|
|
elif proc_arch == 'x86' or proc_arch == 'amd64':
|
2016-09-06 08:06:10 -05:00
|
|
|
arch_keys.add(winreg.KEY_WOW64_32KEY)
|
|
|
|
arch_keys.add(winreg.KEY_WOW64_64KEY)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
for arch_key in arch_keys:
|
2016-09-06 08:06:10 -05:00
|
|
|
for server_type in ('PostgreSQL', 'EnterpriseDB'):
|
2016-06-23 10:27:06 -05:00
|
|
|
try:
|
2016-07-09 02:51:47 -05:00
|
|
|
root_key = winreg.OpenKey(
|
|
|
|
winreg.HKEY_LOCAL_MACHINE,
|
2020-04-08 09:57:09 -05:00
|
|
|
"SOFTWARE\\" + server_type + "\\Services", 0,
|
2016-07-09 02:51:47 -05:00
|
|
|
winreg.KEY_READ | arch_key
|
|
|
|
)
|
2020-08-31 06:15:31 -05:00
|
|
|
for i in range(0, winreg.QueryInfoKey(root_key)[0]):
|
2016-07-09 02:51:47 -05:00
|
|
|
inst_id = winreg.EnumKey(root_key, i)
|
|
|
|
inst_key = winreg.OpenKey(root_key, inst_id)
|
|
|
|
|
|
|
|
svr_name = winreg.QueryValueEx(
|
|
|
|
inst_key, 'Display Name'
|
|
|
|
)[0]
|
|
|
|
svr_superuser = winreg.QueryValueEx(
|
|
|
|
inst_key, 'Database Superuser'
|
|
|
|
)[0]
|
|
|
|
svr_port = winreg.QueryValueEx(inst_key, 'Port')[0]
|
2016-06-23 10:27:06 -05:00
|
|
|
svr_discovery_id = inst_id
|
2016-07-09 02:51:47 -05:00
|
|
|
svr_comment = gettext(
|
2020-04-20 00:49:09 -05:00
|
|
|
"Auto-detected {0} installation with the data "
|
|
|
|
"directory at {1}").format(
|
2016-07-09 02:51:47 -05:00
|
|
|
winreg.QueryValueEx(
|
|
|
|
inst_key, 'Display Name'
|
|
|
|
)[0],
|
|
|
|
winreg.QueryValueEx(
|
|
|
|
inst_key, 'Data Directory'
|
2020-04-10 04:22:41 -05:00
|
|
|
)[0])
|
2016-07-09 02:51:47 -05:00
|
|
|
|
|
|
|
add_server(
|
|
|
|
user_id, servergroup_id, svr_name,
|
|
|
|
svr_superuser, svr_port,
|
|
|
|
svr_discovery_id, svr_comment
|
|
|
|
)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
inst_key.Close()
|
2020-04-08 09:57:09 -05:00
|
|
|
except Exception:
|
2016-06-23 10:27:06 -05:00
|
|
|
pass
|
|
|
|
else:
|
2016-07-09 02:51:47 -05:00
|
|
|
# We use the postgres-winreg.ini file on non-Windows
|
2020-05-08 01:58:21 -05:00
|
|
|
from configparser import ConfigParser
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
registry = ConfigParser()
|
|
|
|
|
|
|
|
try:
|
|
|
|
registry.read('/etc/postgres-reg.ini')
|
|
|
|
sections = registry.sections()
|
|
|
|
|
|
|
|
# Loop the sections, and get the data from any that are PG or PPAS
|
|
|
|
for section in sections:
|
2018-03-08 03:33:43 -06:00
|
|
|
if (
|
|
|
|
section.startswith('PostgreSQL/') or
|
|
|
|
section.startswith('EnterpriseDB/')
|
|
|
|
):
|
2016-06-23 10:27:06 -05:00
|
|
|
svr_name = registry.get(section, 'Description')
|
|
|
|
svr_superuser = registry.get(section, 'Superuser')
|
2018-06-21 15:34:33 -05:00
|
|
|
|
|
|
|
# getint function throws exception if value is blank.
|
|
|
|
# Ex: Port=
|
|
|
|
# In such case we should handle the exception and continue
|
|
|
|
# to read the next section of the config file.
|
|
|
|
try:
|
|
|
|
svr_port = registry.getint(section, 'Port')
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
|
2016-06-23 10:27:06 -05:00
|
|
|
svr_discovery_id = section
|
2017-02-16 05:00:40 -06:00
|
|
|
description = registry.get(section, 'Description')
|
|
|
|
data_directory = registry.get(section, 'DataDirectory')
|
2020-08-31 06:15:31 -05:00
|
|
|
svr_comment = gettext("Auto-detected {0} installation "
|
|
|
|
"with the data directory at {1}"
|
2020-04-15 05:00:33 -05:00
|
|
|
).format(description, data_directory)
|
2018-01-26 10:54:21 -06:00
|
|
|
add_server(user_id, servergroup_id, svr_name,
|
2018-02-26 09:58:48 -06:00
|
|
|
svr_superuser, svr_port, svr_discovery_id,
|
|
|
|
svr_comment)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
Update SQLAlchemy, Flask, Flask-SQLAlchemy, and other packages to current versions. #5901
- Update Flask, Flask-SQLAlchemy, Flask-Babel, Flask-Security-Too, Flask-SocketIO, pytz, psutil, SQLAlchemy, bcrypt, cryptography, eventlet, Authlib, requests python packages
- Remove pinned dnspython, Werkzeug packages from requirements.txt
2023-03-15 01:27:16 -05:00
|
|
|
except Exception as e:
|
|
|
|
print(str(e))
|
|
|
|
db.session.rollback()
|
2016-06-23 10:27:06 -05:00
|
|
|
|
2017-09-18 08:39:43 -05:00
|
|
|
@user_logged_in.connect_via(app)
|
|
|
|
@user_logged_out.connect_via(app)
|
|
|
|
def force_session_write(app, user):
|
|
|
|
session.force_write = True
|
2016-06-23 10:27:06 -05:00
|
|
|
|
2019-05-28 01:30:18 -05:00
|
|
|
@user_logged_in.connect_via(app)
|
|
|
|
def store_crypt_key(app, user):
|
|
|
|
# in desktop mode, master password is used to encrypt/decrypt
|
|
|
|
# and is stored in the keyManager memory
|
2020-06-12 04:54:17 -05:00
|
|
|
if config.SERVER_MODE and 'password' in request.form:
|
|
|
|
current_app.keyManager.set(request.form['password'])
|
2019-05-28 01:30:18 -05:00
|
|
|
|
2018-10-17 06:42:41 -05:00
|
|
|
@user_logged_out.connect_via(app)
|
2019-02-06 07:17:52 -06:00
|
|
|
def current_user_cleanup(app, user):
|
2018-10-17 06:42:41 -05:00
|
|
|
from config import PG_DEFAULT_DRIVER
|
|
|
|
from pgadmin.utils.driver import get_driver
|
2019-02-06 07:17:52 -06:00
|
|
|
from flask import current_app
|
|
|
|
|
|
|
|
for mdl in current_app.logout_hooks:
|
|
|
|
try:
|
2022-09-09 08:06:51 -05:00
|
|
|
mdl.on_logout()
|
2019-02-06 07:17:52 -06:00
|
|
|
except Exception as e:
|
|
|
|
current_app.logger.exception(e)
|
|
|
|
|
2018-10-17 06:42:41 -05:00
|
|
|
_driver = get_driver(PG_DEFAULT_DRIVER)
|
|
|
|
_driver.gc_own()
|
|
|
|
|
2022-08-16 04:59:57 -05:00
|
|
|
# remove key
|
|
|
|
current_app.keyManager.reset()
|
|
|
|
|
2015-01-22 09:56:23 -06:00
|
|
|
##########################################################################
|
|
|
|
# Load plugin modules
|
|
|
|
##########################################################################
|
2022-06-30 00:36:50 -05:00
|
|
|
from .submodules import get_submodules
|
|
|
|
for module in get_submodules():
|
2015-06-29 01:58:41 -05:00
|
|
|
app.logger.info('Registering blueprint module: %s' % module)
|
2021-11-24 05:52:57 -06:00
|
|
|
if app.blueprints.get(module.name) is None:
|
|
|
|
app.register_blueprint(module)
|
|
|
|
app.register_logout_hook(module)
|
2015-01-19 10:38:47 -06:00
|
|
|
|
2020-11-09 01:05:19 -06:00
|
|
|
@app.before_request
|
|
|
|
def limit_host_addr():
|
|
|
|
"""
|
|
|
|
This function validate the hosts from ALLOWED_HOSTS before allowing
|
|
|
|
HTTP request to avoid Host Header Injection attack
|
|
|
|
:return: None/JSON response with 403 HTTP status code
|
|
|
|
"""
|
2022-11-18 22:43:41 -06:00
|
|
|
client_host = str(request.host).split(':', maxsplit=1)[0]
|
2020-11-09 01:05:19 -06:00
|
|
|
valid = True
|
|
|
|
allowed_hosts = config.ALLOWED_HOSTS
|
|
|
|
|
|
|
|
if len(allowed_hosts) != 0:
|
|
|
|
regex = re.compile(
|
|
|
|
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}(?:/\d{1,2}|)')
|
|
|
|
# Create separate list for ip addresses and host names
|
|
|
|
ip_set = list(filter(lambda ip: regex.match(ip), allowed_hosts))
|
|
|
|
host_set = list(filter(lambda ip: not regex.match(ip),
|
|
|
|
allowed_hosts))
|
|
|
|
is_ip = regex.match(client_host)
|
|
|
|
if is_ip:
|
|
|
|
ip_address = []
|
|
|
|
for ip in ip_set:
|
|
|
|
ip_address.extend(list(ipaddress.ip_network(ip)))
|
|
|
|
valid = ip_address.__contains__(
|
|
|
|
ipaddress.ip_address(client_host)
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
valid = host_set.__contains__(client_host)
|
|
|
|
|
|
|
|
if not valid:
|
|
|
|
return make_json_response(
|
|
|
|
status=403, success=0,
|
|
|
|
errormsg=_("403 FORBIDDEN")
|
|
|
|
)
|
|
|
|
|
2015-01-26 09:20:28 -06:00
|
|
|
##########################################################################
|
|
|
|
# Handle the desktop login
|
|
|
|
##########################################################################
|
|
|
|
|
|
|
|
@app.before_request
|
|
|
|
def before_request():
|
|
|
|
"""Login the default user if running in desktop mode"""
|
2017-03-26 20:52:31 -05:00
|
|
|
|
|
|
|
# Check the auth key is valid, if it's set, and we're not in server
|
|
|
|
# mode, and it's not a help file request.
|
2021-01-18 05:02:10 -06:00
|
|
|
|
2020-04-08 09:57:09 -05:00
|
|
|
if not config.SERVER_MODE and app.PGADMIN_INT_KEY != '' and ((
|
|
|
|
'key' not in request.args or
|
|
|
|
request.args['key'] != app.PGADMIN_INT_KEY) and
|
|
|
|
request.cookies.get('PGADMIN_INT_KEY') != app.PGADMIN_INT_KEY and
|
|
|
|
request.endpoint != 'help.static'
|
|
|
|
):
|
|
|
|
abort(401)
|
2017-03-06 08:53:49 -06:00
|
|
|
|
2017-12-18 03:15:36 -06:00
|
|
|
if not config.SERVER_MODE and not current_user.is_authenticated:
|
2021-06-06 03:28:06 -05:00
|
|
|
user = user_datastore.find_user(email=config.DESKTOP_USER)
|
2015-01-26 09:20:28 -06:00
|
|
|
# Throw an error if we failed to find the desktop user, to give
|
|
|
|
# the sysadmin a hint. We'll continue to try to login anyway as
|
|
|
|
# that'll through a nice 500 error for us.
|
|
|
|
if user is None:
|
2015-10-20 02:03:18 -05:00
|
|
|
app.logger.error(
|
2018-02-26 09:58:48 -06:00
|
|
|
'The desktop user %s was not found in the configuration '
|
|
|
|
'database.'
|
2016-06-21 08:21:06 -05:00
|
|
|
% config.DESKTOP_USER
|
|
|
|
)
|
2015-01-26 09:20:28 -06:00
|
|
|
abort(401)
|
|
|
|
login_user(user)
|
2022-01-12 03:23:19 -06:00
|
|
|
elif config.SERVER_MODE and not current_user.is_authenticated and \
|
|
|
|
request.endpoint in ('redirects.index', 'security.login') and \
|
|
|
|
app.PGADMIN_EXTERNAL_AUTH_SOURCE in [KERBEROS, WEBSERVER]:
|
|
|
|
return authenticate.login()
|
2019-05-28 01:30:18 -05:00
|
|
|
# if the server is restarted the in memory key will be lost
|
|
|
|
# but the user session may still be active. Logout the user
|
|
|
|
# to get the key again when login
|
2020-04-08 09:57:09 -05:00
|
|
|
if config.SERVER_MODE and current_user.is_authenticated and \
|
2021-10-12 04:22:30 -05:00
|
|
|
app.PGADMIN_EXTERNAL_AUTH_SOURCE not in [
|
|
|
|
KERBEROS, OAUTH2, WEBSERVER] and \
|
2020-04-08 09:57:09 -05:00
|
|
|
current_app.keyManager.get() is None and \
|
|
|
|
request.endpoint not in ('security.login', 'security.logout'):
|
|
|
|
logout_user()
|
2019-05-28 01:30:18 -05:00
|
|
|
|
2017-03-06 08:53:49 -06:00
|
|
|
@app.after_request
|
|
|
|
def after_request(response):
|
|
|
|
if 'key' in request.args:
|
2018-03-19 12:09:19 -05:00
|
|
|
domain = dict()
|
2018-03-23 05:14:02 -05:00
|
|
|
if config.COOKIE_DEFAULT_DOMAIN and \
|
|
|
|
config.COOKIE_DEFAULT_DOMAIN != 'localhost':
|
2018-03-19 12:09:19 -05:00
|
|
|
domain['domain'] = config.COOKIE_DEFAULT_DOMAIN
|
2019-12-12 23:12:43 -06:00
|
|
|
response.set_cookie('PGADMIN_INT_KEY', value=request.args['key'],
|
2024-01-22 00:55:20 -06:00
|
|
|
path=config.SESSION_COOKIE_PATH,
|
2020-10-20 06:44:45 -05:00
|
|
|
secure=config.SESSION_COOKIE_SECURE,
|
|
|
|
httponly=config.SESSION_COOKIE_HTTPONLY,
|
|
|
|
samesite=config.SESSION_COOKIE_SAMESITE,
|
2018-03-19 12:09:19 -05:00
|
|
|
**domain)
|
2017-03-06 08:53:49 -06:00
|
|
|
|
2020-10-20 06:44:45 -05:00
|
|
|
SecurityHeaders.set_response_headers(response)
|
2017-03-06 08:53:49 -06:00
|
|
|
return response
|
|
|
|
|
2018-08-06 04:51:10 -05:00
|
|
|
##########################################################################
|
|
|
|
# Cache busting
|
|
|
|
##########################################################################
|
|
|
|
|
|
|
|
# Version number to be added to all static file url requests
|
|
|
|
# This is used by url_for function when generating urls
|
|
|
|
# This will solve caching issues when application is upgrading
|
|
|
|
# This is called - Cache Busting
|
|
|
|
@app.url_defaults
|
|
|
|
def add_internal_version(endpoint, values):
|
|
|
|
extensions = config.APP_VERSION_EXTN
|
|
|
|
|
|
|
|
# Add the internal version only if it is set
|
|
|
|
if config.APP_VERSION_PARAM is not None and \
|
|
|
|
config.APP_VERSION_PARAM != '':
|
|
|
|
# If there is a filename, add the version
|
|
|
|
if 'filename' in values \
|
|
|
|
and values['filename'].endswith(extensions):
|
|
|
|
values[config.APP_VERSION_PARAM] = config.APP_VERSION_INT
|
|
|
|
else:
|
|
|
|
# Sometimes there may be direct endpoint for some files
|
|
|
|
# There will be only one rule for such endpoints
|
|
|
|
urls = [url for url in app.url_map.iter_rules(endpoint)]
|
|
|
|
if len(urls) == 1 and urls[0].rule.endswith(extensions):
|
|
|
|
values[config.APP_VERSION_PARAM] = \
|
|
|
|
config.APP_VERSION_INT
|
|
|
|
|
|
|
|
# Strip away internal version param before sending further to app as it was
|
|
|
|
# required for cache busting only
|
|
|
|
@app.url_value_preprocessor
|
|
|
|
def strip_version_number(endpoint, values):
|
2018-08-16 03:23:58 -05:00
|
|
|
if values and config.APP_VERSION_PARAM in values:
|
2018-08-06 04:51:10 -05:00
|
|
|
values.pop(config.APP_VERSION_PARAM)
|
|
|
|
|
2015-02-12 04:28:15 -06:00
|
|
|
##########################################################################
|
2019-10-10 01:35:28 -05:00
|
|
|
# Minify output. Not required in desktop mode
|
2015-06-29 01:58:41 -05:00
|
|
|
##########################################################################
|
2019-10-10 01:35:28 -05:00
|
|
|
if not config.DEBUG and config.SERVER_MODE:
|
|
|
|
from flask_compress import Compress
|
|
|
|
Compress(app)
|
2015-02-12 04:28:15 -06:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
@app.context_processor
|
|
|
|
def inject_blueprint():
|
2019-11-07 07:21:03 -06:00
|
|
|
"""
|
|
|
|
Inject a reference to the current blueprint, if any.
|
|
|
|
"""
|
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
return {
|
2015-06-29 02:54:05 -05:00
|
|
|
'current_app': current_app,
|
2019-11-07 07:21:03 -06:00
|
|
|
'current_blueprint': current_blueprint,
|
2016-06-21 08:21:06 -05:00
|
|
|
}
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2019-01-23 01:19:05 -06:00
|
|
|
@app.errorhandler(Exception)
|
|
|
|
def all_exception_handler(e):
|
|
|
|
current_app.logger.error(e, exc_info=True)
|
|
|
|
return internal_server_error(errormsg=str(e))
|
|
|
|
|
2019-02-04 04:31:47 -06:00
|
|
|
# Exclude HTTPexception from above handler (all_exception_handler)
|
|
|
|
# HTTPException are user defined exceptions and those should be returned
|
|
|
|
# as is
|
|
|
|
@app.errorhandler(HTTPException)
|
|
|
|
def http_exception_handler(e):
|
|
|
|
current_app.logger.error(e, exc_info=True)
|
|
|
|
return e
|
|
|
|
|
2019-05-28 01:30:18 -05:00
|
|
|
# Intialize the key manager
|
|
|
|
app.keyManager = KeyManager()
|
|
|
|
|
2015-01-26 09:20:28 -06:00
|
|
|
##########################################################################
|
2019-05-28 00:29:51 -05:00
|
|
|
# Protection against CSRF attacks
|
2015-01-26 09:20:28 -06:00
|
|
|
##########################################################################
|
2019-05-28 00:29:51 -05:00
|
|
|
with app.app_context():
|
|
|
|
pgCSRFProtect.init_app(app)
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2019-05-28 00:29:51 -05:00
|
|
|
##########################################################################
|
|
|
|
# All done!
|
|
|
|
##########################################################################
|
2022-11-15 02:21:12 -06:00
|
|
|
socketio.init_app(app, cors_allowed_origins="*")
|
2014-12-18 11:49:09 -06:00
|
|
|
return app
|