2014-12-18 11:49:09 -06:00
|
|
|
##########################################################################
|
|
|
|
#
|
|
|
|
# pgAdmin 4 - PostgreSQL Tools
|
|
|
|
#
|
2019-01-02 04:24:12 -06:00
|
|
|
# Copyright (C) 2013 - 2019, The pgAdmin Development Team
|
2014-12-18 11:49:09 -06:00
|
|
|
# This software is released under the PostgreSQL Licence
|
|
|
|
#
|
|
|
|
##########################################################################
|
|
|
|
|
2015-01-21 06:00:13 -06:00
|
|
|
"""The main pgAdmin module. This handles the application initialisation tasks,
|
|
|
|
such as setup of logging, dynamic loading of modules etc."""
|
2016-06-21 08:12:14 -05:00
|
|
|
import logging
|
2018-01-26 10:54:21 -06:00
|
|
|
import os
|
|
|
|
import sys
|
2019-02-06 07:17:52 -06:00
|
|
|
from types import MethodType
|
2015-06-29 02:54:05 -05:00
|
|
|
from collections import defaultdict
|
2016-06-21 08:12:14 -05:00
|
|
|
from importlib import import_module
|
|
|
|
|
2017-07-25 04:15:18 -05:00
|
|
|
from flask import Flask, abort, request, current_app, session, url_for
|
2019-02-04 04:31:47 -06:00
|
|
|
from werkzeug.exceptions import HTTPException
|
2018-04-04 04:47:01 -05:00
|
|
|
from flask_babelex import Babel, gettext
|
2017-09-18 08:39:43 -05:00
|
|
|
from flask_login import user_logged_in, user_logged_out
|
2015-01-22 09:56:23 -06:00
|
|
|
from flask_mail import Mail
|
2018-03-08 03:33:43 -06:00
|
|
|
from flask_paranoid import Paranoid
|
|
|
|
from flask_security import Security, SQLAlchemyUserDatastore, current_user
|
2016-06-21 08:12:14 -05:00
|
|
|
from flask_security.utils import login_user
|
2017-01-30 05:25:02 -06:00
|
|
|
from werkzeug.datastructures import ImmutableDict
|
2016-06-21 08:12:14 -05:00
|
|
|
from werkzeug.local import LocalProxy
|
|
|
|
from werkzeug.utils import find_modules
|
2016-03-22 10:05:43 -05:00
|
|
|
|
2017-08-29 09:03:02 -05:00
|
|
|
from pgadmin.model import db, Role, Server, ServerGroup, \
|
|
|
|
User, Keys, Version, SCHEMA_VERSION as CURRENT_SCHEMA_VERSION
|
2018-03-08 03:33:43 -06:00
|
|
|
from pgadmin.utils import PgAdminModule, driver
|
|
|
|
from pgadmin.utils.preferences import Preferences
|
|
|
|
from pgadmin.utils.session import create_session_interface, pga_unauthorised
|
|
|
|
from pgadmin.utils.versioned_template_loader import VersionedTemplateLoader
|
2018-10-09 05:34:13 -05:00
|
|
|
from datetime import timedelta
|
2018-12-21 04:30:49 -06:00
|
|
|
from pgadmin.setup import get_version, set_version
|
2019-01-23 01:19:05 -06:00
|
|
|
from pgadmin.utils.ajax import internal_server_error
|
|
|
|
|
2014-12-18 11:49:09 -06:00
|
|
|
|
2016-07-09 02:51:47 -05:00
|
|
|
# If script is running under python3, it will not have the xrange function
|
|
|
|
# defined
|
|
|
|
winreg = None
|
2016-09-06 08:06:10 -05:00
|
|
|
if sys.version_info[0] >= 3:
|
2016-07-09 02:51:47 -05:00
|
|
|
xrange = range
|
|
|
|
if os.name == 'nt':
|
|
|
|
import winreg
|
|
|
|
elif os.name == 'nt':
|
|
|
|
import _winreg as winreg
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2017-01-30 05:25:02 -06:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
class PgAdmin(Flask):
|
2017-01-30 05:25:02 -06:00
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
# Set the template loader to a postgres-version-aware loader
|
|
|
|
self.jinja_options = ImmutableDict(
|
|
|
|
extensions=['jinja2.ext.autoescape', 'jinja2.ext.with_'],
|
|
|
|
loader=VersionedTemplateLoader(self)
|
|
|
|
)
|
2019-02-06 07:17:52 -06:00
|
|
|
self.logout_hooks = []
|
|
|
|
|
2017-01-30 05:25:02 -06:00
|
|
|
super(PgAdmin, self).__init__(*args, **kwargs)
|
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
def find_submodules(self, basemodule):
|
|
|
|
for module_name in find_modules(basemodule, True):
|
|
|
|
if module_name in self.config['MODULE_BLACKLIST']:
|
2015-10-20 02:03:18 -05:00
|
|
|
self.logger.info(
|
2016-06-21 08:21:06 -05:00
|
|
|
'Skipping blacklisted module: %s' % module_name
|
|
|
|
)
|
2015-06-29 01:58:41 -05:00
|
|
|
continue
|
|
|
|
self.logger.info('Examining potential module: %s' % module_name)
|
|
|
|
module = import_module(module_name)
|
2015-11-06 04:23:19 -06:00
|
|
|
for key in list(module.__dict__.keys()):
|
|
|
|
if isinstance(module.__dict__[key], PgAdminModule):
|
|
|
|
yield module.__dict__[key]
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-06-29 02:54:05 -05:00
|
|
|
@property
|
|
|
|
def submodules(self):
|
|
|
|
for blueprint in self.blueprints.values():
|
|
|
|
if isinstance(blueprint, PgAdminModule):
|
|
|
|
yield blueprint
|
|
|
|
|
|
|
|
@property
|
|
|
|
def stylesheets(self):
|
|
|
|
stylesheets = []
|
|
|
|
for module in self.submodules:
|
|
|
|
stylesheets.extend(getattr(module, "stylesheets", []))
|
2016-05-11 02:16:10 -05:00
|
|
|
return set(stylesheets)
|
2015-06-29 02:54:05 -05:00
|
|
|
|
2016-05-10 05:37:45 -05:00
|
|
|
@property
|
|
|
|
def messages(self):
|
|
|
|
messages = dict()
|
|
|
|
for module in self.submodules:
|
|
|
|
messages.update(getattr(module, "messages", dict()))
|
|
|
|
return messages
|
|
|
|
|
2017-06-12 01:31:22 -05:00
|
|
|
@property
|
|
|
|
def exposed_endpoint_url_map(self):
|
2017-07-25 04:15:18 -05:00
|
|
|
#############################################################
|
|
|
|
# To handle WSGI paths
|
|
|
|
# If user has setup application under WSGI alias
|
|
|
|
# like 'localhost/pgadmin4' then we have to append '/pgadmin4'
|
|
|
|
# into endpoints
|
|
|
|
#############################################################
|
|
|
|
import config
|
|
|
|
is_wsgi_root_present = False
|
|
|
|
if config.SERVER_MODE:
|
|
|
|
pgadmin_root_path = url_for('browser.index')
|
|
|
|
if pgadmin_root_path != '/browser/':
|
|
|
|
is_wsgi_root_present = True
|
|
|
|
wsgi_root_path = pgadmin_root_path.replace(
|
|
|
|
'/browser/', ''
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_full_url_path(url):
|
|
|
|
"""
|
|
|
|
Generate endpoint URL at per WSGI alias
|
|
|
|
"""
|
|
|
|
if is_wsgi_root_present and url:
|
|
|
|
return wsgi_root_path + url
|
|
|
|
else:
|
|
|
|
return url
|
|
|
|
|
|
|
|
# Fetch all endpoints and their respective url
|
2017-06-12 01:31:22 -05:00
|
|
|
for rule in current_app.url_map.iter_rules('static'):
|
2017-07-25 04:15:18 -05:00
|
|
|
yield rule.endpoint, get_full_url_path(rule.rule)
|
2017-06-12 01:31:22 -05:00
|
|
|
|
|
|
|
for module in self.submodules:
|
|
|
|
for endpoint in module.exposed_endpoints:
|
|
|
|
for rule in current_app.url_map.iter_rules(endpoint):
|
2017-07-25 04:15:18 -05:00
|
|
|
yield rule.endpoint, get_full_url_path(rule.rule)
|
2017-06-12 01:31:22 -05:00
|
|
|
|
2015-06-29 02:54:05 -05:00
|
|
|
@property
|
|
|
|
def javascripts(self):
|
2016-04-14 02:04:14 -05:00
|
|
|
scripts = []
|
|
|
|
scripts_names = []
|
|
|
|
|
|
|
|
# Remove duplicate javascripts from the list
|
2015-06-29 02:54:05 -05:00
|
|
|
for module in self.submodules:
|
2016-04-14 02:04:14 -05:00
|
|
|
module_scripts = getattr(module, "javascripts", [])
|
|
|
|
for s in module_scripts:
|
|
|
|
if s['name'] not in scripts_names:
|
|
|
|
scripts.append(s)
|
|
|
|
scripts_names.append(s['name'])
|
|
|
|
|
|
|
|
return scripts
|
2015-06-29 02:54:05 -05:00
|
|
|
|
|
|
|
@property
|
|
|
|
def panels(self):
|
|
|
|
panels = []
|
|
|
|
for module in self.submodules:
|
|
|
|
panels.extend(module.get_panels())
|
|
|
|
return panels
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-06-30 00:51:55 -05:00
|
|
|
@property
|
|
|
|
def menu_items(self):
|
|
|
|
from operator import attrgetter
|
|
|
|
|
|
|
|
menu_items = defaultdict(list)
|
|
|
|
for module in self.submodules:
|
|
|
|
for key, value in module.menu_items.items():
|
|
|
|
menu_items[key].extend(value)
|
2016-01-27 08:59:54 -06:00
|
|
|
menu_items = dict((key, sorted(value, key=attrgetter('priority')))
|
|
|
|
for key, value in menu_items.items())
|
2015-06-30 00:51:55 -05:00
|
|
|
return menu_items
|
|
|
|
|
2019-02-06 07:17:52 -06:00
|
|
|
def register_logout_hook(self, module):
|
|
|
|
if hasattr(module, 'on_logout') and \
|
|
|
|
type(getattr(module, 'on_logout')) == MethodType:
|
|
|
|
self.logout_hooks.append(module)
|
|
|
|
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
def _find_blueprint():
|
|
|
|
if request.blueprint:
|
|
|
|
return current_app.blueprints[request.blueprint]
|
|
|
|
|
2016-06-21 08:21:06 -05:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
current_blueprint = LocalProxy(_find_blueprint)
|
|
|
|
|
2015-01-27 08:18:27 -06:00
|
|
|
|
Resolved quite a few file-system encoding/decoding related cases.
In order to resolve the non-ascii characters in path (in user directory,
storage path, etc) on windows, we have converted the path into the
short-path, so that - we don't need to deal with the encoding issues
(specially with Python 2).
We've resolved majority of the issues with this patch.
We still need couple issues to resolve after this in the same area.
TODO
* Add better support for non-ascii characters in the database name on
windows with Python 3
* Improve the messages created after the background processes by
different modules (such as Backup, Restore, Import/Export, etc.),
which does not show short-paths, and xml representable characters for
non-ascii characters, when found in the database objects, and the file
PATH.
Fixes #2174, #1797, #2166, #1940
Initial patch by: Surinder Kumar
Reviewed by: Murtuza Zabuawala
2017-03-07 04:00:57 -06:00
|
|
|
def create_app(app_name=None):
|
|
|
|
# Configuration settings
|
|
|
|
import config
|
|
|
|
if not app_name:
|
|
|
|
app_name = config.APP_NAME
|
|
|
|
|
2017-11-30 05:16:38 -06:00
|
|
|
# Only enable password related functionality in server mode.
|
|
|
|
if config.SERVER_MODE is True:
|
|
|
|
# Some times we need to access these config params where application
|
|
|
|
# context is not available (we can't use current_app.config in those
|
|
|
|
# cases even with current_app.app_context())
|
|
|
|
# So update these params in config itself.
|
|
|
|
# And also these updated config values will picked up by application
|
|
|
|
# since we are updating config before the application instance is
|
|
|
|
# created.
|
|
|
|
|
|
|
|
config.SECURITY_RECOVERABLE = True
|
|
|
|
config.SECURITY_CHANGEABLE = True
|
|
|
|
# Now we'll open change password page in alertify dialog
|
|
|
|
# we don't want it to redirect to main page after password
|
|
|
|
# change operation so we will open the same password change page again.
|
|
|
|
config.SECURITY_POST_CHANGE_VIEW = 'browser.change_password'
|
|
|
|
|
2015-01-21 06:00:13 -06:00
|
|
|
"""Create the Flask application, startup logging and dynamically load
|
|
|
|
additional modules (blueprints) that are found in this directory."""
|
2015-06-29 01:58:41 -05:00
|
|
|
app = PgAdmin(__name__, static_url_path='/static')
|
2015-12-21 23:13:24 -06:00
|
|
|
# Removes unwanted whitespace from render_template function
|
|
|
|
app.jinja_env.trim_blocks = True
|
2014-12-18 11:49:09 -06:00
|
|
|
app.config.from_object(config)
|
2016-08-29 01:22:50 -05:00
|
|
|
app.config.update(dict(PROPAGATE_EXCEPTIONS=True))
|
2014-12-18 11:49:09 -06:00
|
|
|
|
|
|
|
##########################################################################
|
|
|
|
# Setup logging and log the application startup
|
|
|
|
##########################################################################
|
|
|
|
|
2019-02-28 07:22:12 -06:00
|
|
|
# We won't care about errors in the logging system, we are more
|
|
|
|
# interested in application errors.
|
2019-03-25 01:06:18 -05:00
|
|
|
logging.raiseExceptions = False
|
2019-02-28 07:22:12 -06:00
|
|
|
|
2014-12-18 11:49:09 -06:00
|
|
|
# Add SQL level logging, and set the base logging level
|
|
|
|
logging.addLevelName(25, 'SQL')
|
|
|
|
app.logger.setLevel(logging.DEBUG)
|
2014-12-18 11:56:17 -06:00
|
|
|
app.logger.handlers = []
|
2014-12-18 11:49:09 -06:00
|
|
|
|
2015-10-20 02:03:18 -05:00
|
|
|
# We also need to update the handler on the webserver in order to see
|
|
|
|
# request. Setting the level prevents werkzeug from setting up it's own
|
|
|
|
# stream handler thus ensuring all the logging goes through the pgAdmin
|
|
|
|
# logger.
|
2014-12-18 11:49:09 -06:00
|
|
|
logger = logging.getLogger('werkzeug')
|
|
|
|
logger.setLevel(logging.INFO)
|
|
|
|
|
2017-06-16 04:17:38 -05:00
|
|
|
# Set SQLITE_PATH to TEST_SQLITE_PATH while running test cases
|
2018-03-08 03:33:43 -06:00
|
|
|
if (
|
|
|
|
'PGADMIN_TESTING_MODE' in os.environ and
|
|
|
|
os.environ['PGADMIN_TESTING_MODE'] == '1'
|
|
|
|
):
|
2017-06-16 04:17:38 -05:00
|
|
|
config.SQLITE_PATH = config.TEST_SQLITE_PATH
|
|
|
|
|
2017-05-15 09:59:47 -05:00
|
|
|
# Ensure the various working directories exist
|
|
|
|
from pgadmin.setup import create_app_data_directory, db_upgrade
|
|
|
|
create_app_data_directory(config)
|
|
|
|
|
2014-12-18 11:49:09 -06:00
|
|
|
# File logging
|
2016-07-26 09:05:14 -05:00
|
|
|
fh = logging.FileHandler(config.LOG_FILE, encoding='utf-8')
|
2014-12-18 11:49:09 -06:00
|
|
|
fh.setLevel(config.FILE_LOG_LEVEL)
|
|
|
|
fh.setFormatter(logging.Formatter(config.FILE_LOG_FORMAT))
|
|
|
|
app.logger.addHandler(fh)
|
|
|
|
logger.addHandler(fh)
|
|
|
|
|
|
|
|
# Console logging
|
2019-03-19 10:55:34 -05:00
|
|
|
ch = logging.StreamHandler()
|
2014-12-18 11:49:09 -06:00
|
|
|
ch.setLevel(config.CONSOLE_LOG_LEVEL)
|
|
|
|
ch.setFormatter(logging.Formatter(config.CONSOLE_LOG_FORMAT))
|
|
|
|
app.logger.addHandler(ch)
|
|
|
|
logger.addHandler(ch)
|
|
|
|
|
|
|
|
# Log the startup
|
2015-10-20 02:03:18 -05:00
|
|
|
app.logger.info('########################################################')
|
2014-12-18 11:49:09 -06:00
|
|
|
app.logger.info('Starting %s v%s...', config.APP_NAME, config.APP_VERSION)
|
2015-10-20 02:03:18 -05:00
|
|
|
app.logger.info('########################################################')
|
2015-03-10 08:09:11 -05:00
|
|
|
app.logger.debug("Python syspath: %s", sys.path)
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-02-25 11:06:00 -06:00
|
|
|
##########################################################################
|
|
|
|
# Setup i18n
|
|
|
|
##########################################################################
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-02-25 11:06:00 -06:00
|
|
|
# Initialise i18n
|
|
|
|
babel = Babel(app)
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2015-02-25 11:06:00 -06:00
|
|
|
app.logger.debug('Available translations: %s' % babel.list_translations())
|
|
|
|
|
|
|
|
@babel.localeselector
|
|
|
|
def get_locale():
|
2017-03-24 09:20:10 -05:00
|
|
|
"""Get the language for the user."""
|
|
|
|
language = 'en'
|
|
|
|
if config.SERVER_MODE is False:
|
|
|
|
# Get the user language preference from the miscellaneous module
|
2017-10-30 07:50:25 -05:00
|
|
|
if current_user.is_authenticated:
|
|
|
|
user_id = current_user.id
|
|
|
|
else:
|
|
|
|
user = user_datastore.get_user(config.DESKTOP_USER)
|
|
|
|
if user is not None:
|
|
|
|
user_id = user.id
|
|
|
|
user_language = Preferences.raw_value(
|
|
|
|
'miscellaneous', 'user_language', None, user_id
|
|
|
|
)
|
|
|
|
if user_language is not None:
|
|
|
|
language = user_language
|
2017-03-24 09:20:10 -05:00
|
|
|
else:
|
|
|
|
# If language is available in get request then return the same
|
|
|
|
# otherwise check the session or cookie
|
|
|
|
data = request.form
|
|
|
|
if 'language' in data:
|
|
|
|
language = data['language'] or language
|
|
|
|
setattr(session, 'PGADMIN_LANGUAGE', language)
|
|
|
|
elif hasattr(session, 'PGADMIN_LANGUAGE'):
|
|
|
|
language = getattr(session, 'PGADMIN_LANGUAGE', language)
|
|
|
|
elif hasattr(request.cookies, 'PGADMIN_LANGUAGE'):
|
2018-01-26 10:54:21 -06:00
|
|
|
language = getattr(
|
|
|
|
request.cookies, 'PGADMIN_LANGUAGE', language
|
|
|
|
)
|
2017-03-24 09:20:10 -05:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
return language
|
2015-02-25 11:06:00 -06:00
|
|
|
|
2015-01-22 09:56:23 -06:00
|
|
|
##########################################################################
|
|
|
|
# Setup authentication
|
|
|
|
##########################################################################
|
2015-06-29 01:58:41 -05:00
|
|
|
|
2018-03-08 03:33:43 -06:00
|
|
|
app.config['SQLALCHEMY_DATABASE_URI'] = u'sqlite:///{0}?timeout={1}' \
|
2018-02-26 09:58:48 -06:00
|
|
|
.format(config.SQLITE_PATH.replace(u'\\', u'/'),
|
|
|
|
getattr(config, 'SQLITE_TIMEOUT', 500)
|
|
|
|
)
|
2015-01-26 09:20:28 -06:00
|
|
|
|
2015-01-22 09:56:23 -06:00
|
|
|
# Create database connection object and mailer
|
|
|
|
db.init_app(app)
|
2017-04-23 22:06:55 -05:00
|
|
|
|
|
|
|
##########################################################################
|
|
|
|
# Upgrade the schema (if required)
|
|
|
|
##########################################################################
|
2017-08-29 09:03:02 -05:00
|
|
|
with app.app_context():
|
|
|
|
# Run migration for the first time i.e. create database
|
|
|
|
from config import SQLITE_PATH
|
2018-12-21 04:30:49 -06:00
|
|
|
|
|
|
|
# If version not available, user must have aborted. Tables are not
|
|
|
|
# created and so its an empty db
|
|
|
|
if not os.path.exists(SQLITE_PATH) or get_version() == -1:
|
2017-08-29 09:03:02 -05:00
|
|
|
db_upgrade(app)
|
|
|
|
else:
|
2018-12-21 04:30:49 -06:00
|
|
|
schema_version = get_version()
|
2017-08-29 09:03:02 -05:00
|
|
|
|
|
|
|
# Run migration if current schema version is greater than the
|
|
|
|
# schema version stored in version table
|
|
|
|
if CURRENT_SCHEMA_VERSION >= schema_version:
|
|
|
|
db_upgrade(app)
|
|
|
|
|
|
|
|
# Update schema version to the latest
|
|
|
|
if CURRENT_SCHEMA_VERSION > schema_version:
|
2018-12-21 04:30:49 -06:00
|
|
|
set_version(CURRENT_SCHEMA_VERSION)
|
2017-08-29 09:03:02 -05:00
|
|
|
db.session.commit()
|
2017-04-23 22:06:55 -05:00
|
|
|
|
2019-04-17 10:57:34 -05:00
|
|
|
os.chmod(config.SQLITE_PATH, 0o600)
|
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
Mail(app)
|
2015-01-22 09:56:23 -06:00
|
|
|
|
2016-05-12 14:06:45 -05:00
|
|
|
import pgadmin.utils.paths as paths
|
|
|
|
paths.init_app(app)
|
2016-05-12 13:34:28 -05:00
|
|
|
|
2015-01-22 09:56:23 -06:00
|
|
|
# Setup Flask-Security
|
|
|
|
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
|
2016-10-19 03:22:38 -05:00
|
|
|
security = Security(None, user_datastore)
|
2015-07-22 11:42:39 -05:00
|
|
|
|
2016-10-19 03:22:38 -05:00
|
|
|
##########################################################################
|
|
|
|
# Setup security
|
|
|
|
##########################################################################
|
|
|
|
with app.app_context():
|
2018-01-26 10:54:21 -06:00
|
|
|
config.CSRF_SESSION_KEY = Keys.query.filter_by(
|
|
|
|
name='CSRF_SESSION_KEY').first().value
|
|
|
|
config.SECRET_KEY = Keys.query.filter_by(
|
|
|
|
name='SECRET_KEY').first().value
|
|
|
|
config.SECURITY_PASSWORD_SALT = Keys.query.filter_by(
|
|
|
|
name='SECURITY_PASSWORD_SALT').first().value
|
2016-10-19 03:22:38 -05:00
|
|
|
|
|
|
|
# Update the app.config with proper security keyes for signing CSRF data,
|
|
|
|
# signing cookies, and the SALT for hashing the passwords.
|
2018-01-26 10:54:21 -06:00
|
|
|
app.config.update(dict({
|
|
|
|
'CSRF_SESSION_KEY': config.CSRF_SESSION_KEY,
|
|
|
|
'SECRET_KEY': config.SECRET_KEY,
|
2018-03-19 12:09:19 -05:00
|
|
|
'SECURITY_PASSWORD_SALT': config.SECURITY_PASSWORD_SALT,
|
|
|
|
'SESSION_COOKIE_DOMAIN': config.SESSION_COOKIE_DOMAIN
|
2018-01-26 10:54:21 -06:00
|
|
|
}))
|
2015-01-22 09:56:23 -06:00
|
|
|
|
2017-09-18 05:40:09 -05:00
|
|
|
security.init_app(app, user_datastore)
|
2016-10-19 03:22:38 -05:00
|
|
|
|
2018-02-01 07:29:18 -06:00
|
|
|
# register custom unauthorised handler.
|
|
|
|
app.login_manager.unauthorized_handler(pga_unauthorised)
|
|
|
|
|
2018-10-09 05:34:13 -05:00
|
|
|
# Set the permanent session lifetime to the specified value in config file.
|
|
|
|
app.permanent_session_lifetime = timedelta(
|
|
|
|
days=config.SESSION_EXPIRATION_TIME)
|
|
|
|
|
2018-07-05 05:12:03 -05:00
|
|
|
app.session_interface = create_session_interface(
|
|
|
|
app, config.SESSION_SKIP_PATHS
|
|
|
|
)
|
2016-10-19 03:22:38 -05:00
|
|
|
|
2017-07-20 12:04:33 -05:00
|
|
|
# Make the Session more secure against XSS & CSRF when running in web mode
|
|
|
|
if config.SERVER_MODE:
|
|
|
|
paranoid = Paranoid(app)
|
|
|
|
paranoid.redirect_view = 'browser.index'
|
|
|
|
|
2016-10-19 03:22:38 -05:00
|
|
|
##########################################################################
|
2016-06-23 10:27:06 -05:00
|
|
|
# Load all available server drivers
|
2016-10-19 03:22:38 -05:00
|
|
|
##########################################################################
|
2016-04-25 05:03:48 -05:00
|
|
|
driver.init_app(app)
|
2017-03-24 09:20:10 -05:00
|
|
|
|
|
|
|
##########################################################################
|
|
|
|
# Register language to the preferences after login
|
|
|
|
##########################################################################
|
|
|
|
@user_logged_in.connect_via(app)
|
|
|
|
def register_language(sender, user):
|
|
|
|
# After logged in, set the language in the preferences if we get from
|
|
|
|
# the login page
|
|
|
|
data = request.form
|
|
|
|
if 'language' in data:
|
|
|
|
language = data['language']
|
|
|
|
|
|
|
|
# Set the user language preference
|
|
|
|
misc_preference = Preferences.module('miscellaneous')
|
|
|
|
user_languages = misc_preference.preference(
|
|
|
|
'user_language'
|
|
|
|
)
|
|
|
|
|
|
|
|
if user_languages and language:
|
|
|
|
language = user_languages.set(language)
|
|
|
|
|
2016-06-23 10:27:06 -05:00
|
|
|
##########################################################################
|
|
|
|
# Register any local servers we can discover
|
|
|
|
##########################################################################
|
|
|
|
@user_logged_in.connect_via(app)
|
|
|
|
def on_user_logged_in(sender, user):
|
|
|
|
# Keep hold of the user ID
|
|
|
|
user_id = user.id
|
|
|
|
|
|
|
|
# Get the first server group for the user
|
|
|
|
servergroup_id = 1
|
|
|
|
servergroups = ServerGroup.query.filter_by(
|
|
|
|
user_id=user_id
|
|
|
|
).order_by("id")
|
|
|
|
|
|
|
|
if servergroups.count() > 0:
|
|
|
|
servergroup = servergroups.first()
|
|
|
|
servergroup_id = servergroup.id
|
|
|
|
|
|
|
|
'''Add a server to the config database'''
|
2018-03-08 03:33:43 -06:00
|
|
|
|
2018-02-26 09:58:48 -06:00
|
|
|
def add_server(user_id, servergroup_id, name, superuser, port,
|
|
|
|
discovery_id, comment):
|
2018-01-26 10:54:21 -06:00
|
|
|
# Create a server object if needed, and store it.
|
2016-06-23 10:27:06 -05:00
|
|
|
servers = Server.query.filter_by(
|
|
|
|
user_id=user_id,
|
|
|
|
discovery_id=svr_discovery_id
|
|
|
|
).order_by("id")
|
|
|
|
|
|
|
|
if servers.count() > 0:
|
2018-01-26 10:54:21 -06:00
|
|
|
return
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
svr = Server(user_id=user_id,
|
2018-01-26 10:54:21 -06:00
|
|
|
servergroup_id=servergroup_id,
|
|
|
|
name=name,
|
|
|
|
host='localhost',
|
|
|
|
port=port,
|
|
|
|
maintenance_db='postgres',
|
|
|
|
username=superuser,
|
|
|
|
ssl_mode='prefer',
|
|
|
|
comment=svr_comment,
|
|
|
|
discovery_id=discovery_id)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
db.session.add(svr)
|
|
|
|
db.session.commit()
|
|
|
|
|
|
|
|
# Figure out what servers are present
|
2016-07-09 02:51:47 -05:00
|
|
|
if winreg is not None:
|
2016-09-09 08:50:51 -05:00
|
|
|
arch_keys = set()
|
2016-06-23 10:27:06 -05:00
|
|
|
proc_arch = os.environ['PROCESSOR_ARCHITECTURE'].lower()
|
2016-07-01 09:50:28 -05:00
|
|
|
|
|
|
|
try:
|
|
|
|
proc_arch64 = os.environ['PROCESSOR_ARCHITEW6432'].lower()
|
2018-02-26 09:58:48 -06:00
|
|
|
except Exception as e:
|
2016-07-01 09:50:28 -05:00
|
|
|
proc_arch64 = None
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
if proc_arch == 'x86' and not proc_arch64:
|
2016-09-06 08:06:10 -05:00
|
|
|
arch_keys.add(0)
|
2016-06-23 10:27:06 -05:00
|
|
|
elif proc_arch == 'x86' or proc_arch == 'amd64':
|
2016-09-06 08:06:10 -05:00
|
|
|
arch_keys.add(winreg.KEY_WOW64_32KEY)
|
|
|
|
arch_keys.add(winreg.KEY_WOW64_64KEY)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
for arch_key in arch_keys:
|
2016-09-06 08:06:10 -05:00
|
|
|
for server_type in ('PostgreSQL', 'EnterpriseDB'):
|
2016-06-23 10:27:06 -05:00
|
|
|
try:
|
2016-07-09 02:51:47 -05:00
|
|
|
root_key = winreg.OpenKey(
|
|
|
|
winreg.HKEY_LOCAL_MACHINE,
|
|
|
|
"SOFTWARE\\" + server_type + "\Services", 0,
|
|
|
|
winreg.KEY_READ | arch_key
|
|
|
|
)
|
|
|
|
for i in xrange(0, winreg.QueryInfoKey(root_key)[0]):
|
|
|
|
inst_id = winreg.EnumKey(root_key, i)
|
|
|
|
inst_key = winreg.OpenKey(root_key, inst_id)
|
|
|
|
|
|
|
|
svr_name = winreg.QueryValueEx(
|
|
|
|
inst_key, 'Display Name'
|
|
|
|
)[0]
|
|
|
|
svr_superuser = winreg.QueryValueEx(
|
|
|
|
inst_key, 'Database Superuser'
|
|
|
|
)[0]
|
|
|
|
svr_port = winreg.QueryValueEx(inst_key, 'Port')[0]
|
2016-06-23 10:27:06 -05:00
|
|
|
svr_discovery_id = inst_id
|
2016-07-09 02:51:47 -05:00
|
|
|
svr_comment = gettext(
|
2018-02-26 09:58:48 -06:00
|
|
|
"Auto-detected %s installation with the data "
|
|
|
|
"directory at %s" % (
|
2016-07-09 02:51:47 -05:00
|
|
|
winreg.QueryValueEx(
|
|
|
|
inst_key, 'Display Name'
|
|
|
|
)[0],
|
|
|
|
winreg.QueryValueEx(
|
|
|
|
inst_key, 'Data Directory'
|
|
|
|
)[0]
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
add_server(
|
|
|
|
user_id, servergroup_id, svr_name,
|
|
|
|
svr_superuser, svr_port,
|
|
|
|
svr_discovery_id, svr_comment
|
|
|
|
)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
|
|
|
inst_key.Close()
|
2018-02-26 09:58:48 -06:00
|
|
|
except Exception as e:
|
2016-06-23 10:27:06 -05:00
|
|
|
pass
|
|
|
|
else:
|
2016-07-09 02:51:47 -05:00
|
|
|
# We use the postgres-winreg.ini file on non-Windows
|
2016-06-23 10:27:06 -05:00
|
|
|
try:
|
|
|
|
from configparser import ConfigParser
|
|
|
|
except ImportError:
|
|
|
|
from ConfigParser import ConfigParser # Python 2
|
|
|
|
|
|
|
|
registry = ConfigParser()
|
|
|
|
|
|
|
|
try:
|
|
|
|
registry.read('/etc/postgres-reg.ini')
|
|
|
|
sections = registry.sections()
|
|
|
|
|
|
|
|
# Loop the sections, and get the data from any that are PG or PPAS
|
|
|
|
for section in sections:
|
2018-03-08 03:33:43 -06:00
|
|
|
if (
|
|
|
|
section.startswith('PostgreSQL/') or
|
|
|
|
section.startswith('EnterpriseDB/')
|
|
|
|
):
|
2016-06-23 10:27:06 -05:00
|
|
|
svr_name = registry.get(section, 'Description')
|
|
|
|
svr_superuser = registry.get(section, 'Superuser')
|
2018-06-21 15:34:33 -05:00
|
|
|
|
|
|
|
# getint function throws exception if value is blank.
|
|
|
|
# Ex: Port=
|
|
|
|
# In such case we should handle the exception and continue
|
|
|
|
# to read the next section of the config file.
|
|
|
|
try:
|
|
|
|
svr_port = registry.getint(section, 'Port')
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
|
2016-06-23 10:27:06 -05:00
|
|
|
svr_discovery_id = section
|
2017-02-16 05:00:40 -06:00
|
|
|
description = registry.get(section, 'Description')
|
|
|
|
data_directory = registry.get(section, 'DataDirectory')
|
|
|
|
if hasattr(str, 'decode'):
|
|
|
|
description = description.decode('utf-8')
|
|
|
|
data_directory = data_directory.decode('utf-8')
|
2018-02-26 09:58:48 -06:00
|
|
|
svr_comment = gettext(u"Auto-detected %s installation "
|
|
|
|
u"with the data directory at %s" % (
|
2018-03-08 03:33:43 -06:00
|
|
|
description,
|
|
|
|
data_directory
|
|
|
|
)
|
2018-02-26 09:58:48 -06:00
|
|
|
)
|
2018-01-26 10:54:21 -06:00
|
|
|
add_server(user_id, servergroup_id, svr_name,
|
2018-02-26 09:58:48 -06:00
|
|
|
svr_superuser, svr_port, svr_discovery_id,
|
|
|
|
svr_comment)
|
2016-06-23 10:27:06 -05:00
|
|
|
|
2018-02-26 09:58:48 -06:00
|
|
|
except Exception as e:
|
2016-06-23 10:27:06 -05:00
|
|
|
pass
|
|
|
|
|
2017-09-18 08:39:43 -05:00
|
|
|
@user_logged_in.connect_via(app)
|
|
|
|
@user_logged_out.connect_via(app)
|
|
|
|
def force_session_write(app, user):
|
|
|
|
session.force_write = True
|
2016-06-23 10:27:06 -05:00
|
|
|
|
2018-10-17 06:42:41 -05:00
|
|
|
@user_logged_out.connect_via(app)
|
2019-02-06 07:17:52 -06:00
|
|
|
def current_user_cleanup(app, user):
|
2018-10-17 06:42:41 -05:00
|
|
|
from config import PG_DEFAULT_DRIVER
|
|
|
|
from pgadmin.utils.driver import get_driver
|
2019-02-06 07:17:52 -06:00
|
|
|
from flask import current_app
|
|
|
|
|
|
|
|
for mdl in current_app.logout_hooks:
|
|
|
|
try:
|
|
|
|
mdl.on_logout(user)
|
|
|
|
except Exception as e:
|
|
|
|
current_app.logger.exception(e)
|
|
|
|
|
2018-10-17 06:42:41 -05:00
|
|
|
_driver = get_driver(PG_DEFAULT_DRIVER)
|
|
|
|
_driver.gc_own()
|
|
|
|
|
2015-01-22 09:56:23 -06:00
|
|
|
##########################################################################
|
|
|
|
# Load plugin modules
|
|
|
|
##########################################################################
|
2015-06-29 01:58:41 -05:00
|
|
|
for module in app.find_submodules('pgadmin'):
|
|
|
|
app.logger.info('Registering blueprint module: %s' % module)
|
|
|
|
app.register_blueprint(module)
|
2019-02-06 07:17:52 -06:00
|
|
|
app.register_logout_hook(module)
|
2015-01-19 10:38:47 -06:00
|
|
|
|
2015-01-26 09:20:28 -06:00
|
|
|
##########################################################################
|
|
|
|
# Handle the desktop login
|
|
|
|
##########################################################################
|
|
|
|
|
|
|
|
@app.before_request
|
|
|
|
def before_request():
|
|
|
|
"""Login the default user if running in desktop mode"""
|
2017-03-26 20:52:31 -05:00
|
|
|
|
|
|
|
# Check the auth key is valid, if it's set, and we're not in server
|
|
|
|
# mode, and it's not a help file request.
|
2017-03-06 09:24:52 -06:00
|
|
|
if not config.SERVER_MODE and app.PGADMIN_KEY != '':
|
2017-03-06 08:53:49 -06:00
|
|
|
if (
|
2018-02-26 09:58:48 -06:00
|
|
|
('key' not in request.args or
|
2018-03-08 03:33:43 -06:00
|
|
|
request.args['key'] != app.PGADMIN_KEY) and
|
2017-03-26 20:52:31 -05:00
|
|
|
request.cookies.get('PGADMIN_KEY') != app.PGADMIN_KEY and
|
|
|
|
request.endpoint != 'help.static'
|
2017-03-06 08:53:49 -06:00
|
|
|
):
|
|
|
|
abort(401)
|
|
|
|
|
2017-12-18 03:15:36 -06:00
|
|
|
if not config.SERVER_MODE and not current_user.is_authenticated:
|
2015-01-26 09:20:28 -06:00
|
|
|
user = user_datastore.get_user(config.DESKTOP_USER)
|
|
|
|
# Throw an error if we failed to find the desktop user, to give
|
|
|
|
# the sysadmin a hint. We'll continue to try to login anyway as
|
|
|
|
# that'll through a nice 500 error for us.
|
|
|
|
if user is None:
|
2015-10-20 02:03:18 -05:00
|
|
|
app.logger.error(
|
2018-02-26 09:58:48 -06:00
|
|
|
'The desktop user %s was not found in the configuration '
|
|
|
|
'database.'
|
2016-06-21 08:21:06 -05:00
|
|
|
% config.DESKTOP_USER
|
|
|
|
)
|
2015-01-26 09:20:28 -06:00
|
|
|
abort(401)
|
|
|
|
login_user(user)
|
2015-02-12 04:28:15 -06:00
|
|
|
|
2017-03-06 08:53:49 -06:00
|
|
|
@app.after_request
|
|
|
|
def after_request(response):
|
|
|
|
if 'key' in request.args:
|
2018-03-19 12:09:19 -05:00
|
|
|
domain = dict()
|
2018-03-23 05:14:02 -05:00
|
|
|
if config.COOKIE_DEFAULT_DOMAIN and \
|
|
|
|
config.COOKIE_DEFAULT_DOMAIN != 'localhost':
|
2018-03-19 12:09:19 -05:00
|
|
|
domain['domain'] = config.COOKIE_DEFAULT_DOMAIN
|
|
|
|
response.set_cookie('PGADMIN_KEY', value=request.args['key'],
|
|
|
|
path=config.COOKIE_DEFAULT_PATH,
|
|
|
|
**domain)
|
2017-03-06 08:53:49 -06:00
|
|
|
|
2019-02-12 10:17:14 -06:00
|
|
|
# X-Frame-Options for security
|
|
|
|
if config.X_FRAME_OPTIONS != "" and \
|
|
|
|
config.X_FRAME_OPTIONS.lower() != "deny":
|
|
|
|
response.headers["X-Frame-Options"] = config.X_FRAME_OPTIONS
|
|
|
|
|
2017-03-06 08:53:49 -06:00
|
|
|
return response
|
|
|
|
|
2018-08-06 04:51:10 -05:00
|
|
|
##########################################################################
|
|
|
|
# Cache busting
|
|
|
|
##########################################################################
|
|
|
|
|
|
|
|
# Version number to be added to all static file url requests
|
|
|
|
# This is used by url_for function when generating urls
|
|
|
|
# This will solve caching issues when application is upgrading
|
|
|
|
# This is called - Cache Busting
|
|
|
|
@app.url_defaults
|
|
|
|
def add_internal_version(endpoint, values):
|
|
|
|
extensions = config.APP_VERSION_EXTN
|
|
|
|
|
|
|
|
# Add the internal version only if it is set
|
|
|
|
if config.APP_VERSION_PARAM is not None and \
|
|
|
|
config.APP_VERSION_PARAM != '':
|
|
|
|
# If there is a filename, add the version
|
|
|
|
if 'filename' in values \
|
|
|
|
and values['filename'].endswith(extensions):
|
|
|
|
values[config.APP_VERSION_PARAM] = config.APP_VERSION_INT
|
|
|
|
else:
|
|
|
|
# Sometimes there may be direct endpoint for some files
|
|
|
|
# There will be only one rule for such endpoints
|
|
|
|
urls = [url for url in app.url_map.iter_rules(endpoint)]
|
|
|
|
if len(urls) == 1 and urls[0].rule.endswith(extensions):
|
|
|
|
values[config.APP_VERSION_PARAM] = \
|
|
|
|
config.APP_VERSION_INT
|
|
|
|
|
|
|
|
# Strip away internal version param before sending further to app as it was
|
|
|
|
# required for cache busting only
|
|
|
|
@app.url_value_preprocessor
|
|
|
|
def strip_version_number(endpoint, values):
|
2018-08-16 03:23:58 -05:00
|
|
|
if values and config.APP_VERSION_PARAM in values:
|
2018-08-06 04:51:10 -05:00
|
|
|
values.pop(config.APP_VERSION_PARAM)
|
|
|
|
|
2015-02-12 04:28:15 -06:00
|
|
|
##########################################################################
|
|
|
|
# Minify output
|
2015-06-29 01:58:41 -05:00
|
|
|
##########################################################################
|
2019-02-07 04:32:40 -06:00
|
|
|
if not config.DEBUG:
|
2017-10-10 03:14:23 -05:00
|
|
|
from flask_htmlmin import HTMLMIN
|
2016-11-18 10:40:17 -06:00
|
|
|
HTMLMIN(app)
|
2015-02-12 04:28:15 -06:00
|
|
|
|
2015-06-29 01:58:41 -05:00
|
|
|
@app.context_processor
|
|
|
|
def inject_blueprint():
|
|
|
|
"""Inject a reference to the current blueprint, if any."""
|
|
|
|
return {
|
2015-06-29 02:54:05 -05:00
|
|
|
'current_app': current_app,
|
2015-10-20 02:03:18 -05:00
|
|
|
'current_blueprint': current_blueprint
|
2016-06-21 08:21:06 -05:00
|
|
|
}
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2019-01-23 01:19:05 -06:00
|
|
|
@app.errorhandler(Exception)
|
|
|
|
def all_exception_handler(e):
|
|
|
|
current_app.logger.error(e, exc_info=True)
|
|
|
|
return internal_server_error(errormsg=str(e))
|
|
|
|
|
2019-02-04 04:31:47 -06:00
|
|
|
# Exclude HTTPexception from above handler (all_exception_handler)
|
|
|
|
# HTTPException are user defined exceptions and those should be returned
|
|
|
|
# as is
|
|
|
|
@app.errorhandler(HTTPException)
|
|
|
|
def http_exception_handler(e):
|
|
|
|
current_app.logger.error(e, exc_info=True)
|
|
|
|
return e
|
|
|
|
|
2015-01-26 09:20:28 -06:00
|
|
|
##########################################################################
|
2015-01-22 09:56:23 -06:00
|
|
|
# All done!
|
2015-01-26 09:20:28 -06:00
|
|
|
##########################################################################
|
2015-10-20 02:03:18 -05:00
|
|
|
|
2014-12-18 11:49:09 -06:00
|
|
|
return app
|