Added support for storing configurations of pgAdmin in an external database. #1832

This commit is contained in:
Akshay Joshi
2022-10-20 16:18:41 +05:30
committed by GitHub
parent d50e9f7fc2
commit e17c50d304
52 changed files with 963 additions and 1062 deletions

View File

@@ -341,11 +341,14 @@ def create_app(app_name=None):
##########################################################################
# Setup authentication
##########################################################################
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{0}?timeout={1}' \
.format(config.SQLITE_PATH.replace('\\', '/'),
getattr(config, 'SQLITE_TIMEOUT', 500)
)
if config.CONFIG_DATABASE_URI is not None and \
len(config.CONFIG_DATABASE_URI) > 0:
app.config['SQLALCHEMY_DATABASE_URI'] = config.CONFIG_DATABASE_URI
else:
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///{0}?timeout={1}' \
.format(config.SQLITE_PATH.replace('\\', '/'),
getattr(config, 'SQLITE_TIMEOUT', 500)
)
# Override USER_DOES_NOT_EXIST and INVALID_PASSWORD messages from flask.
app.config['SECURITY_MSG_USER_DOES_NOT_EXIST'] = \
@@ -358,6 +361,9 @@ def create_app(app_name=None):
##########################################################################
# Upgrade the schema (if required)
##########################################################################
from config import SQLITE_PATH
from pgadmin.setup import db_upgrade
def backup_db_file():
"""
Create a backup of the current database file
@@ -396,48 +402,76 @@ def create_app(app_name=None):
' database'.format(invalid_tb_names))
backup_db_file()
with app.app_context():
# Run migration for the first time i.e. create database
from config import SQLITE_PATH
from pgadmin.setup import db_upgrade
# If version not available, user must have aborted. Tables are not
# created and so its an empty db
if not os.path.exists(SQLITE_PATH) or get_version() == -1:
# If running in cli mode then don't try to upgrade, just raise
# the exception
if not cli_mode:
upgrade_db()
def run_migration_for_sqlite():
with app.app_context():
# Run migration for the first time i.e. create database
# If version not available, user must have aborted. Tables are not
# created and so its an empty db
if not os.path.exists(SQLITE_PATH) or get_version() == -1:
# If running in cli mode then don't try to upgrade, just raise
# the exception
if not cli_mode:
upgrade_db()
else:
if not os.path.exists(SQLITE_PATH):
raise FileNotFoundError(
'SQLite database file "' + SQLITE_PATH +
'" does not exists.')
raise RuntimeError(
'The configuration database file is not valid.')
else:
if not os.path.exists(SQLITE_PATH):
raise FileNotFoundError(
'SQLite database file "' + SQLITE_PATH +
'" does not exists.')
raise RuntimeError(
'The configuration database file is not valid.')
else:
schema_version = get_version()
schema_version = get_version()
# Run migration if current schema version is greater than the
# schema version stored in version table
if CURRENT_SCHEMA_VERSION >= schema_version:
upgrade_db()
else:
# check all tables are present in the db.
is_db_error, invalid_tb_names = check_db_tables()
if is_db_error:
app.logger.error(
'Table(s) {0} are missing in the'
' database'.format(invalid_tb_names))
backup_db_file()
# Run migration if current schema version is greater than the
# schema version stored in version table
if CURRENT_SCHEMA_VERSION >= schema_version:
upgrade_db()
else:
# check all tables are present in the db.
is_db_error, invalid_tb_names = check_db_tables()
if is_db_error:
app.logger.error(
'Table(s) {0} are missing in the'
' database'.format(invalid_tb_names))
backup_db_file()
# Update schema version to the latest
if CURRENT_SCHEMA_VERSION > schema_version:
set_version(CURRENT_SCHEMA_VERSION)
db.session.commit()
# Update schema version to the latest
if CURRENT_SCHEMA_VERSION > schema_version:
set_version(CURRENT_SCHEMA_VERSION)
db.session.commit()
if os.name != 'nt':
os.chmod(config.SQLITE_PATH, 0o600)
if os.name != 'nt':
os.chmod(config.SQLITE_PATH, 0o600)
def run_migration_for_others():
with app.app_context():
# Run migration for the first time i.e. create database
# If version not available, user must have aborted. Tables are not
# created and so its an empty db
try:
if get_version() == -1:
db_upgrade(app)
else:
schema_version = get_version()
# Run migration if current schema version is greater than
# the schema version stored in version table
if CURRENT_SCHEMA_VERSION >= schema_version:
db_upgrade(app)
# Update schema version to the latest
if CURRENT_SCHEMA_VERSION > schema_version:
set_version(CURRENT_SCHEMA_VERSION)
db.session.commit()
except Exception as e:
app.logger.error(e)
# Run the migration as per specified by the user.
if config.CONFIG_DATABASE_URI is not None and \
len(config.CONFIG_DATABASE_URI) > 0:
run_migration_for_others()
else:
run_migration_for_sqlite()
Mail(app)
@@ -553,7 +587,7 @@ def create_app(app_name=None):
user_id=user_id
).order_by("id")
if servergroups.count() > 0:
if int(servergroups.count()) > 0:
servergroup = servergroups.first()
servergroup_id = servergroup.id
@@ -567,7 +601,7 @@ def create_app(app_name=None):
discovery_id=svr_discovery_id
).order_by("id")
if servers.count() > 0:
if int(servers.count()) > 0:
return
svr = Server(user_id=user_id,

View File

@@ -347,7 +347,7 @@ def mfa_delete(auth_name: str) -> bool:
user_id=current_user.id, mfa_auth=auth_name
)
if auth.count() != 0:
if int(auth.count()) != 0:
auth.delete()
db.session.commit()

View File

@@ -831,7 +831,8 @@ class ServerNode(PGChildNodeView):
# Delete the shared server from DB if server
# owner uncheck shared property
self.delete_shared_server(server.name, gid, server.id)
if arg == 'sslcompression':
if arg in ('sslcompression', 'use_ssh_tunnel',
'tunnel_authentication', 'kerberos_conn', 'shared'):
value = 1 if value else 0
self._update_server_details(server, sharedserver,
config_param_map, arg, value)
@@ -1091,11 +1092,12 @@ class ServerNode(PGChildNodeView):
fgcolor=data.get('fgcolor', None),
service=data.get('service', None),
connect_timeout=data.get('connect_timeout', 0),
use_ssh_tunnel=data.get('use_ssh_tunnel', 0),
use_ssh_tunnel=1 if data.get('use_ssh_tunnel', False) else 0,
tunnel_host=data.get('tunnel_host', None),
tunnel_port=data.get('tunnel_port', 22),
tunnel_username=data.get('tunnel_username', None),
tunnel_authentication=data.get('tunnel_authentication', 0),
tunnel_authentication=1 if data.get('tunnel_authentication',
False) else 0,
tunnel_identity_file=data.get('tunnel_identity_file', None),
shared=data.get('shared', None),
passfile=data.get('passfile', None),

View File

@@ -137,7 +137,10 @@ class BatchProcess(object):
if p is None:
raise LookupError(PROCESS_NOT_FOUND)
tmp_desc = loads(p.desc)
try:
tmp_desc = loads(bytes.fromhex(p.desc))
except Exception:
tmp_desc = loads(p.desc)
# ID
self.id = _id
@@ -228,7 +231,7 @@ class BatchProcess(object):
csv_writer.writerow(_args)
args_val = args_csv_io.getvalue().strip(str('\r\n'))
tmp_desc = dumps(self.desc)
tmp_desc = dumps(self.desc).hex()
j = Process(
pid=int(uid),
@@ -679,7 +682,11 @@ class BatchProcess(object):
:return: return value for details, type_desc and desc related
to process
"""
desc = loads(p.desc)
try:
desc = loads(bytes.fromhex(p.desc))
except Exception:
desc = loads(p.desc)
details = desc
type_desc = ''
current_storage_dir = None

View File

@@ -169,8 +169,8 @@ class Server(db.Model):
db.CheckConstraint('sslcompression >= 0 AND sslcompression <= 1'),
nullable=False
)
bgcolor = db.Column(db.Text(10), nullable=True)
fgcolor = db.Column(db.Text(10), nullable=True)
bgcolor = db.Column(db.String(10), nullable=True)
fgcolor = db.Column(db.String(10), nullable=True)
service = db.Column(db.Text(), nullable=True)
connect_timeout = db.Column(db.Integer(), nullable=False)
use_ssh_tunnel = db.Column(
@@ -437,8 +437,8 @@ class SharedServer(db.Model):
db.CheckConstraint('sslcompression >= 0 AND sslcompression <= 1'),
nullable=False
)
bgcolor = db.Column(db.Text(10), nullable=True)
fgcolor = db.Column(db.Text(10), nullable=True)
bgcolor = db.Column(db.String(10), nullable=True)
fgcolor = db.Column(db.String(10), nullable=True)
service = db.Column(db.Text(), nullable=True)
connect_timeout = db.Column(db.Integer(), nullable=False)
use_ssh_tunnel = db.Column(

View File

@@ -8,7 +8,7 @@
##########################################################################
from .user_info import user_info
from .db_version import get_version, set_version
from .db_version import get_version, set_version, get_version_for_migration
from .db_upgrade import db_upgrade
from .data_directory import create_app_data_directory
from .db_table_check import check_db_tables

View File

@@ -7,12 +7,27 @@
#
##########################################################################
from pgadmin.model import Version
from pgadmin.model import Version, db
from sqlalchemy.orm.session import Session
def get_version():
try:
version = Version.query.filter_by(name='ConfigDB').first()
except Exception:
db.session.rollback()
return -1
if version:
return version.value
else:
return -1
def get_version_for_migration(op):
try:
session = Session(bind=op.get_bind())
version = session.query(Version).filter_by(name='ConfigDB').first()
except Exception:
return -1

View File

@@ -1730,12 +1730,12 @@ def get_arguments_sqlite(sid, did, scid, func_id):
"""
"""Get the count of the existing data available in sqlite database"""
dbg_func_args_count = DebuggerFunctionArguments.query.filter_by(
dbg_func_args_count = int(DebuggerFunctionArguments.query.filter_by(
server_id=sid,
database_id=did,
schema_id=scid,
function_id=func_id
).count()
).count())
args_data = []
@@ -1819,13 +1819,13 @@ def set_arguments_sqlite(sid, did, scid, func_id):
try:
for i in range(0, len(data)):
dbg_func_args_exists = DebuggerFunctionArguments.query.filter_by(
server_id=data[i]['server_id'],
database_id=data[i]['database_id'],
schema_id=data[i]['schema_id'],
function_id=data[i]['function_id'],
arg_id=data[i]['arg_id']
).count()
dbg_func_args_exists = int(
DebuggerFunctionArguments.query.filter_by(
server_id=data[i]['server_id'],
database_id=data[i]['database_id'],
schema_id=data[i]['schema_id'],
function_id=data[i]['function_id'],
arg_id=data[i]['arg_id']).count())
# handle the Array list sent from the client
array_string = ''