mirror of
https://salsa.debian.org/freeipa-team/freeipa.git
synced 2025-02-25 18:55:28 -06:00
Merge branch 'upstream-next' into master-next
This commit is contained in:
@@ -1,5 +1,19 @@
|
||||
topologies:
|
||||
build: &build
|
||||
name: build
|
||||
cpu: 2
|
||||
memory: 3800
|
||||
master_1repl: &master_1repl
|
||||
name: master_1repl
|
||||
cpu: 4
|
||||
memory: 5750
|
||||
master_1repl_1client: &master_1repl_1client
|
||||
name: master_1repl_1client
|
||||
cpu: 4
|
||||
memory: 6700
|
||||
|
||||
jobs:
|
||||
fedora-25/build:
|
||||
fedora-27/build:
|
||||
requires: []
|
||||
priority: 100
|
||||
job:
|
||||
@@ -7,28 +21,44 @@ jobs:
|
||||
args:
|
||||
git_repo: '{git_repo}'
|
||||
git_refspec: '{git_refspec}'
|
||||
template: &ci-master-f25
|
||||
name: freeipa/ci-master-f25
|
||||
version: 0.2.11
|
||||
template: &ci-master-f27
|
||||
name: freeipa/ci-master-f27
|
||||
version: 1.0.1
|
||||
timeout: 1800
|
||||
topology: *build
|
||||
|
||||
fedora-25/simple_replication:
|
||||
requires: [fedora-25/build]
|
||||
fedora-27/simple_replication:
|
||||
requires: [fedora-27/build]
|
||||
priority: 50
|
||||
job:
|
||||
class: RunPytest
|
||||
args:
|
||||
build_url: '{fedora-25/build_url}'
|
||||
build_url: '{fedora-27/build_url}'
|
||||
test_suite: test_integration/test_simple_replication.py
|
||||
template: *ci-master-f25
|
||||
template: *ci-master-f27
|
||||
timeout: 3600
|
||||
topology: *master_1repl
|
||||
|
||||
fedora-25/caless:
|
||||
requires: [fedora-25/build]
|
||||
fedora-27/caless:
|
||||
requires: [fedora-27/build]
|
||||
priority: 50
|
||||
job:
|
||||
class: RunPytest
|
||||
args:
|
||||
build_url: '{fedora-25/build_url}'
|
||||
build_url: '{fedora-27/build_url}'
|
||||
test_suite: test_integration/test_caless.py::TestServerReplicaCALessToCAFull
|
||||
template: *ci-master-f25
|
||||
template: *ci-master-f27
|
||||
timeout: 3600
|
||||
topology: *master_1repl
|
||||
|
||||
fedora-27/external_ca:
|
||||
requires: [fedora-27/build]
|
||||
priority: 50
|
||||
job:
|
||||
class: RunPytest
|
||||
args:
|
||||
build_url: '{fedora-27/build_url}'
|
||||
test_suite: test_integration/test_external_ca.py::TestExternalCA
|
||||
template: *ci-master-f27
|
||||
timeout: 3600
|
||||
topology: *master_1repl
|
||||
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -108,11 +108,8 @@ freeipa2-dev-doc
|
||||
/client/ipa-join
|
||||
/client/ipa-rmkeytab
|
||||
|
||||
/ipaplatform/override.py
|
||||
/ipapython/version.py
|
||||
/ipapython/.DEFAULT_PLUGINS
|
||||
|
||||
/ipaplatform/__init__.py
|
||||
/ipaplatform/constants.py
|
||||
/ipaplatform/paths.py
|
||||
/ipaplatform/services.py
|
||||
/ipaplatform/tasks.py
|
||||
/ipatests/.cache/
|
||||
|
||||
@@ -24,11 +24,12 @@ server:
|
||||
realm: IPA.TEST
|
||||
steps:
|
||||
build:
|
||||
- make V=0 ${make_target}
|
||||
- make V=0 ${make_target} LOG_COMPILE='gdb -return-child-result -ex run -ex "thread apply all bt" -ex "quit" --args'
|
||||
builddep:
|
||||
- rm -rf /var/cache/dnf/*
|
||||
- "dnf makecache fast || :"
|
||||
- dnf builddep -y ${builddep_opts} --spec freeipa.spec.in --best --allowerasing
|
||||
- "dnf makecache || :"
|
||||
- dnf builddep -y ${builddep_opts} -D "with_wheels 1" --spec freeipa.spec.in --best --allowerasing
|
||||
- dnf install -y gdb
|
||||
cleanup:
|
||||
- chown -R ${uid}:${gid} ${container_working_dir}
|
||||
- journalctl -b --no-pager > systemd_journal.log
|
||||
@@ -40,6 +41,7 @@ steps:
|
||||
/var/log/krb5kdc.log
|
||||
/var/log/pki
|
||||
systemd_journal.log
|
||||
`find daemons -name '*.log' -print`
|
||||
- chown ${uid}:${gid} ${container_working_dir}/var_log.tar
|
||||
configure:
|
||||
- ./autogen.sh
|
||||
@@ -53,6 +55,9 @@ steps:
|
||||
lint:
|
||||
- make PYTHON=/usr/bin/python2 V=0 lint
|
||||
- make PYTHON=/usr/bin/python3 V=0 pylint
|
||||
tox:
|
||||
# just run one pylint and one Python 3 target (time/coverage trade-off)
|
||||
- tox -e py27,py36,pypi,pylint3
|
||||
prepare_tests:
|
||||
- echo ${server_password} | kinit admin && ipa ping
|
||||
- cp -r /etc/ipa/* ~/.ipa/
|
||||
|
||||
@@ -26,13 +26,24 @@ server:
|
||||
realm: IPA.TEST
|
||||
steps:
|
||||
build:
|
||||
- make V=0 ${make_target}
|
||||
- make V=0 ${make_target} LOG_COMPILE='gdb -return-child-result -ex run -ex "thread apply all bt" -ex "quit" --args'
|
||||
builddep:
|
||||
- rm -rf /var/cache/dnf/*
|
||||
- "dnf makecache fast || :"
|
||||
- "dnf makecache || :"
|
||||
- dnf builddep -y ${builddep_opts} --spec freeipa.spec.in --best --allowerasing
|
||||
- dnf install -y gdb
|
||||
cleanup:
|
||||
- chown -R ${uid}:${gid} ${container_working_dir}
|
||||
- >
|
||||
tar --ignore-failed-read -cvf ${container_working_dir}/var_log.tar
|
||||
/var/log/dirsrv
|
||||
/var/log/httpd
|
||||
/var/log/ipa*
|
||||
/var/log/krb5kdc.log
|
||||
/var/log/pki
|
||||
systemd_journal.log
|
||||
`find daemons -name '*.log' -print`
|
||||
- chown ${uid}:${gid} ${container_working_dir}/var_log.tar
|
||||
configure:
|
||||
- ./autogen.sh
|
||||
install_packages:
|
||||
|
||||
75
.travis.yml
75
.travis.yml
@@ -3,12 +3,12 @@ services:
|
||||
- docker
|
||||
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.6"
|
||||
cache: pip
|
||||
env:
|
||||
global:
|
||||
- TEST_RUNNER_IMAGE="martbab/freeipa-fedora-test-runner:master-latest"
|
||||
PEP8_ERROR_LOG="pep8_errors.log"
|
||||
- TEST_RUNNER_IMAGE="freeipa/freeipa-test-runner:master-latest"
|
||||
PEP8_ERROR_LOG="pycodestyle_errors.log"
|
||||
CI_RESULTS_LOG="ci_results_${TRAVIS_BRANCH}.log"
|
||||
CI_BACKLOG_SIZE=5000
|
||||
CI_RUNNER_LOGS_DIR="/tmp/test-runner-logs"
|
||||
@@ -27,85 +27,38 @@ env:
|
||||
test_install
|
||||
test_ipaclient
|
||||
test_ipalib
|
||||
test_ipaplatform
|
||||
test_ipapython
|
||||
test_ipaserver
|
||||
test_pkcs10
|
||||
test_xmlrpc/test_[l-z]*.py"
|
||||
- TASK_TO_RUN="run-tests"
|
||||
PYTHON=/usr/bin/python3
|
||||
TEST_RUNNER_CONFIG=".test_runner_config_py3_temp.yaml"
|
||||
TESTS_TO_RUN="test_xmlrpc/test_add_remove_cert_cmd.py
|
||||
test_xmlrpc/test_attr.py
|
||||
test_xmlrpc/test_automember_plugin.py
|
||||
test_xmlrpc/test_automount_plugin.py
|
||||
test_xmlrpc/test_baseldap_plugin.py
|
||||
test_xmlrpc/test_batch_plugin.py
|
||||
test_xmlrpc/test_cert_plugin.py
|
||||
test_xmlrpc/test_certprofile_plugin.py
|
||||
test_xmlrpc/test_config_plugin.py
|
||||
test_xmlrpc/test_delegation_plugin.py
|
||||
test_xmlrpc/test_group_plugin.py
|
||||
test_xmlrpc/test_hbac_plugin.py
|
||||
test_xmlrpc/test_hbacsvcgroup_plugin.py
|
||||
test_xmlrpc/test_hbactest_plugin.py
|
||||
test_xmlrpc/test_host_plugin.py
|
||||
test_xmlrpc/test_hostgroup_plugin.py
|
||||
test_xmlrpc/test_krbtpolicy.py
|
||||
test_xmlrpc/test_kerberos_principal_aliases.py"
|
||||
### Tests which haven't been ported to py3 yet ###
|
||||
## test_xmlrpc/test_[a-k]*.py
|
||||
# test_xmlrpc/test_ca_plugin.py
|
||||
# test_xmlrpc/test_caacl_plugin.py
|
||||
# test_xmlrpc/test_caacl_profile_enforcement.py
|
||||
# test_xmlrpc/test_dns_plugin.py
|
||||
# test_xmlrpc/test_dns_realmdomains_integration.py
|
||||
# test_xmlrpc/test_external_members.py
|
||||
# test_xmlrpc/test_idviews_plugin.py
|
||||
TESTS_TO_RUN="test_xmlrpc/test_[a-k]*.py"
|
||||
- TASK_TO_RUN="run-tests"
|
||||
PYTHON=/usr/bin/python3
|
||||
TEST_RUNNER_CONFIG=".test_runner_config_py3_temp.yaml"
|
||||
TESTS_TO_RUN="test_cmdline
|
||||
test_install
|
||||
test_ipaclient
|
||||
test_ipalib
|
||||
test_ipaplatform
|
||||
test_ipapython
|
||||
test_ipaserver
|
||||
test_pkcs10
|
||||
test_xmlrpc/test_location_plugin.py
|
||||
test_xmlrpc/test_nesting.py
|
||||
test_xmlrpc/test_netgroup_plugin.py
|
||||
test_xmlrpc/test_old_permission_plugin.py
|
||||
test_xmlrpc/test_passwd_plugin.py
|
||||
test_xmlrpc/test_permission_plugin.py
|
||||
test_xmlrpc/test_ping_plugin.py
|
||||
test_xmlrpc/test_privilege_plugin.py
|
||||
test_xmlrpc/test_pwpolicy_plugin.py
|
||||
test_xmlrpc/test_radiusproxy_plugin.py
|
||||
test_xmlrpc/test_realmdomains_plugin.py
|
||||
test_xmlrpc/test_replace.py
|
||||
test_xmlrpc/test_role_plugin.py
|
||||
test_xmlrpc/test_selfservice_plugin.py
|
||||
test_xmlrpc/test_selinuxusermap_plugin.py
|
||||
test_xmlrpc/test_service_plugin.py
|
||||
test_xmlrpc/test_servicedelegation_plugin.py
|
||||
test_xmlrpc/test_stageuser_plugin.py
|
||||
test_xmlrpc/test_sudocmd_plugin.py
|
||||
test_xmlrpc/test_sudocmdgroup_plugin.py
|
||||
test_xmlrpc/test_sudorule_plugin.py"
|
||||
### Tests which haven't been ported to py3 yet ###
|
||||
## test_xmlrpc/test_[l-z]*.py
|
||||
# test_xmlrpc/test_range_plugin.py
|
||||
# test_xmlrpc/test_trust_plugin.py
|
||||
# test_xmlrpc/test_vault_plugin.py
|
||||
test_xmlrpc/test_[l-z]*.py"
|
||||
- TASK_TO_RUN="tox"
|
||||
TEST_RUNNER_CONFIG=".test_runner_config.yaml"
|
||||
install:
|
||||
- pip install --upgrade pip
|
||||
- pip3 install --upgrade pip
|
||||
- pip install pep8
|
||||
- pip3 install pycodestyle
|
||||
- >
|
||||
pip3 install
|
||||
git+https://github.com/freeipa/ipa-docker-test-runner@release-0-2-1
|
||||
git+https://github.com/freeipa/ipa-docker-test-runner@release-0-2-2
|
||||
|
||||
script:
|
||||
- mkdir -p $CI_RUNNER_LOGS_DIR
|
||||
- travis_wait 50 ./.travis_run_task.sh
|
||||
- test -z "`cat $PEP8_ERROR_LOG`"
|
||||
after_failure:
|
||||
- echo "Test runner output:"; tail -n $CI_BACKLOG_SIZE $CI_RESULTS_LOG
|
||||
- echo "PEP-8 errors:"; cat $PEP8_ERROR_LOG
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
# NOTE: this script is intended to run in Travis CI only
|
||||
|
||||
test_set=""
|
||||
developer_mode_opt="--developer-mode"
|
||||
|
||||
if [[ $PYTHON == "/usr/bin/python2" ]]
|
||||
then
|
||||
@@ -14,6 +13,15 @@ else
|
||||
env_opt=""
|
||||
fi
|
||||
|
||||
case "$TASK_TO_RUN" in
|
||||
lint|tox)
|
||||
# disable developer mode for lint and tox tasks.
|
||||
developer_mode_opt=""
|
||||
;;
|
||||
*)
|
||||
developer_mode_opt="--developer-mode"
|
||||
;;
|
||||
esac
|
||||
|
||||
function truncate_log_to_test_failures() {
|
||||
# chop off everything in the CI_RESULTS_LOG preceding pytest error output
|
||||
@@ -30,11 +38,8 @@ if [[ "$TASK_TO_RUN" == "lint" ]]
|
||||
then
|
||||
if [[ "$TRAVIS_EVENT_TYPE" == "pull_request" ]]
|
||||
then
|
||||
git diff origin/$TRAVIS_BRANCH -U0 | pep8 --diff &> $PEP8_ERROR_LOG ||:
|
||||
git diff origin/$TRAVIS_BRANCH -U0 | pycodestyle --diff &> $PEP8_ERROR_LOG ||:
|
||||
fi
|
||||
|
||||
# disable developer mode for lint task, otherwise we get an error
|
||||
developer_mode_opt=""
|
||||
fi
|
||||
|
||||
if [[ -n "$TESTS_TO_RUN" ]]
|
||||
|
||||
@@ -9,5 +9,5 @@ ipapython == @VERSION@
|
||||
ipaserver == @VERSION@
|
||||
ipatests == @VERSION@
|
||||
|
||||
# see https://pagure.io/freeipa/issue/6874
|
||||
pylint < 1.7
|
||||
# we include some checks available only in pylint-1.7 and on
|
||||
pylint >= 1.7
|
||||
|
||||
2
API.txt
2
API.txt
@@ -783,7 +783,7 @@ option: Str('version?')
|
||||
output: Output('result')
|
||||
command: cert_request/1
|
||||
args: 1,9,3
|
||||
arg: Str('csr', cli_name='csr_file')
|
||||
arg: CertificateSigningRequest('csr', cli_name='csr_file')
|
||||
option: Flag('add', autofill=True, default=False)
|
||||
option: Flag('all', autofill=True, cli_name='all', default=False)
|
||||
option: Str('cacn?', autofill=True, cli_name='ca', default=u'ipa')
|
||||
|
||||
14
BUILD.txt
14
BUILD.txt
@@ -7,7 +7,7 @@ For more information, see http://www.freeipa.org/page/Build
|
||||
|
||||
The quickest way to get the dependencies needed for building is:
|
||||
|
||||
# dnf builddep -b -D "with_lint 1" --spec freeipa.spec.in
|
||||
# dnf builddep -b -D "with_python3 1" -D "with_wheels 1" -D "with_lint 1" --spec freeipa.spec.in --best --allowerasing
|
||||
|
||||
TIP: For building with latest dependencies for freeipa master enable copr repo:
|
||||
|
||||
@@ -66,9 +66,9 @@ changes are required.
|
||||
Testing
|
||||
-------
|
||||
|
||||
For more information, see http://www.freeipa.org/page/Testing
|
||||
For more information, see https://www.freeipa.org/page/Testing
|
||||
|
||||
We use python nosetests to test for regressions in the management framework
|
||||
We use python pytest to test for regressions in the management framework
|
||||
and plugins. All test dependencies are required by the freeipa-tests package.
|
||||
|
||||
To run all of the tests you will need 2 sessions, one to run the lite-server
|
||||
@@ -82,6 +82,14 @@ Some tests may be skipped. For example, all the XML-RPC tests will be skipped
|
||||
if you haven't started the lite-server. The DNS tests will be skipped if
|
||||
the underlying IPA installation doesn't configure DNS, etc.
|
||||
|
||||
To just execute fast unittest and code linters, use the fastcheck target.
|
||||
Fast tests only execute a subset of the test suite that does not depend on
|
||||
an initialized API and server instance. Fast linting just verifies modified
|
||||
files / lines.
|
||||
|
||||
% make fastcheck
|
||||
|
||||
|
||||
API.txt
|
||||
-------
|
||||
The purpose of the file API.txt is to prevent accidental API changes. The
|
||||
|
||||
@@ -10,6 +10,7 @@ Developers:
|
||||
Tomáš Babej
|
||||
Martin Babinsky
|
||||
Kyle Baker
|
||||
Felipe Barreto
|
||||
Jan Barta
|
||||
Martin Bašti
|
||||
Sylvain Baubeau
|
||||
@@ -26,6 +27,7 @@ Developers:
|
||||
Rob Crittenden
|
||||
Frank Cusack
|
||||
Nalin Dahyabhai
|
||||
Rishabh Dave
|
||||
Don Davis
|
||||
John Dennis
|
||||
Jason Gerard DeRose
|
||||
@@ -38,6 +40,7 @@ Developers:
|
||||
Lewis Eason
|
||||
Drew Erny
|
||||
Oleg Fayans
|
||||
felipe
|
||||
Jérôme Fenal
|
||||
Fabiano Fidêncio
|
||||
Stephen Gallagher
|
||||
@@ -52,6 +55,7 @@ Developers:
|
||||
Abhijeet Kasurde
|
||||
Nathan Kinder
|
||||
Krzysztof Klimonda
|
||||
Alexander Koksharov
|
||||
Nikolai Kondrashov
|
||||
Martin Košek
|
||||
David Kreitschmann
|
||||
@@ -77,6 +81,7 @@ Developers:
|
||||
Rich Megginson
|
||||
Jim Meyering
|
||||
Adam Misnyovszki
|
||||
John Morris
|
||||
Niranjan MR
|
||||
Marko Myllynen
|
||||
Martin Nagy
|
||||
@@ -111,6 +116,7 @@ Developers:
|
||||
Andrew Wnuk
|
||||
Jason Woods
|
||||
Adam Young
|
||||
Mohammad Rizwan Yusuf
|
||||
Jan Zelený
|
||||
Alex Zeleznikov
|
||||
Michal Židek
|
||||
|
||||
74
Makefile.am
74
Makefile.am
@@ -8,22 +8,23 @@ if WITH_IPATESTS
|
||||
IPATESTS_SUBDIRS = ipatests
|
||||
endif
|
||||
|
||||
IPACLIENT_SUBDIRS = ipaclient ipalib ipapython
|
||||
IPA_PLACEHOLDERS = freeipa ipa ipaplatform ipaserver ipatests
|
||||
IPACLIENT_SUBDIRS = ipaclient ipalib ipaplatform ipapython
|
||||
IPA_PLACEHOLDERS = freeipa ipa ipaserver ipatests
|
||||
SUBDIRS = asn1 util client contrib po pypi \
|
||||
$(IPACLIENT_SUBDIRS) ipaplatform $(IPATESTS_SUBDIRS) $(SERVER_SUBDIRS)
|
||||
$(IPACLIENT_SUBDIRS) $(IPATESTS_SUBDIRS) $(SERVER_SUBDIRS)
|
||||
|
||||
GENERATED_PYTHON_FILES = \
|
||||
$(top_builddir)/ipaplatform/override.py \
|
||||
$(top_builddir)/ipapython/version.py
|
||||
|
||||
MOSTLYCLEANFILES = ipasetup.pyc ipasetup.pyo \
|
||||
ignore_import_errors.pyc ignore_import_errors.pyo \
|
||||
ipasetup.pyc ipasetup.pyo \
|
||||
pylint_plugins.pyc pylint_plugins.pyo
|
||||
|
||||
# user-facing scripts
|
||||
dist_bin_SCRIPTS = ipa
|
||||
|
||||
# files required for build but not installed
|
||||
dist_noinst_SCRIPTS = ignore_import_errors.py \
|
||||
makeapi \
|
||||
dist_noinst_SCRIPTS = makeapi \
|
||||
makeaci \
|
||||
make-doc \
|
||||
make-test \
|
||||
@@ -136,7 +137,7 @@ _srpms-body: _rpms-prep
|
||||
rm -f rm -f $(top_builddir)/.version
|
||||
|
||||
.PHONY: lite-server
|
||||
lite-server: $(top_builddir)/ipapython/version.py
|
||||
lite-server: $(GENERATED_PYTHON_FILES)
|
||||
+$(MAKE) -C $(top_builddir)/install/ui
|
||||
PYTHONPATH=$(top_srcdir) $(PYTHON) -bb \
|
||||
contrib/lite-server.py $(LITESERVER_ARGS)
|
||||
@@ -168,13 +169,13 @@ if ! WITH_PYTHON2
|
||||
@echo "ERROR: python2 not available"; exit 1
|
||||
endif
|
||||
@ # run all linters, tests, and check with Python 2
|
||||
PYTHONPATH=$(top_srcdir) $(PYTHON2) ipatests/ipa-run-tests \
|
||||
PYTHONPATH=$(abspath $(top_srcdir)) $(PYTHON2) ipatests/ipa-run-tests \
|
||||
--ipaclient-unittests
|
||||
$(MAKE) $(AM_MAKEFLAGS) acilint apilint polint jslint check
|
||||
$(MAKE) $(AM_MAKEFLAGS) PYTHON=$(PYTHON2) pylint
|
||||
if WITH_PYTHON3
|
||||
@ # just tests and pylint on Python 3
|
||||
PYTHONPATH=$(top_srcdir) $(PYTHON3) ipatests/ipa-run-tests \
|
||||
PYTHONPATH=$(abspath $(top_srcdir)) $(PYTHON3) ipatests/ipa-run-tests \
|
||||
--ipaclient-unittests
|
||||
$(MAKE) $(AM_MAKEFLAGS) PYTHON=$(PYTHON3) pylint
|
||||
else
|
||||
@@ -182,16 +183,63 @@ else
|
||||
endif
|
||||
@echo "All tests passed."
|
||||
|
||||
.PHONY: fastcheck fasttest fastlint
|
||||
fastcheck:
|
||||
if WITH_PYTHON2
|
||||
@$(MAKE) -j1 $(AM_MAKEFLAGS) PYTHON=$(PYTHON2) fastlint fasttest
|
||||
endif
|
||||
if WITH_PYTHON3
|
||||
@$(MAKE) -j1 $(AM_MAKEFLAGS) PYTHON=$(PYTHON3) fastlint fasttest
|
||||
endif
|
||||
|
||||
fasttest: $(GENERATED_PYTHON_FILES) ipasetup.py
|
||||
@ # --ignore doubles speed of total test run compared to pytest.skip()
|
||||
@ # on module.
|
||||
PYTHONPATH=$(abspath $(top_srcdir)) $(PYTHON3) ipatests/ipa-run-tests \
|
||||
--skip-ipaapi \
|
||||
--ignore $(abspath $(top_srcdir))/ipatests/test_integration \
|
||||
--ignore $(abspath $(top_srcdir))/ipatests/test_xmlrpc
|
||||
|
||||
fastlint: $(GENERATED_PYTHON_FILES) ipasetup.py
|
||||
if ! WITH_PYLINT
|
||||
@echo "ERROR: pylint not available"; exit 1
|
||||
endif
|
||||
@echo "Fast linting with $(PYTHON) from branch '$(GIT_BRANCH)'"
|
||||
|
||||
@MERGEBASE=$$(git merge-base --fork-point $(GIT_BRANCH)); \
|
||||
FILES=$$(git diff --name-only $${MERGEBASE} \
|
||||
| grep -E '\.py$$'); \
|
||||
if [ -n "$${FILES}" ]; then \
|
||||
echo "Fast linting files: $${FILES}"; \
|
||||
echo "pylint"; \
|
||||
echo "------"; \
|
||||
PYTHONPATH=$(abspath $(top_srcdir)) $(PYTHON) -m pylint \
|
||||
--rcfile=$(top_srcdir)/pylintrc \
|
||||
--load-plugins pylint_plugins \
|
||||
$${FILES} || exit $?; \
|
||||
echo "pycodestyle"; \
|
||||
echo "-----------"; \
|
||||
git diff $${MERGEBASE} | \
|
||||
$(PYTHON) -m pycodestyle --diff || exit $?; \
|
||||
else \
|
||||
echo "No modified Python files found"; \
|
||||
fi
|
||||
|
||||
|
||||
.PHONY: $(top_builddir)/ipaplatform/override.py
|
||||
$(top_builddir)/ipaplatform/override.py:
|
||||
(cd $(top_builddir)/ipaplatform && make override.py)
|
||||
|
||||
.PHONY: $(top_builddir)/ipapython/version.py
|
||||
$(top_builddir)/ipapython/version.py:
|
||||
(cd $(top_builddir)/ipapython && make version.py)
|
||||
|
||||
.PHONY: acilint
|
||||
acilint: $(top_builddir)/ipapython/version.py
|
||||
acilint: $(GENERATED_PYTHON_FILES)
|
||||
cd $(srcdir); ./makeaci --validate
|
||||
|
||||
.PHONY: apilint
|
||||
apilint: $(top_builddir)/ipapython/version.py
|
||||
apilint: $(GENERATED_PYTHON_FILES)
|
||||
cd $(srcdir); ./makeapi --validate
|
||||
|
||||
.PHONY: polint
|
||||
@@ -206,7 +254,7 @@ polint:
|
||||
.PHONY: pylint
|
||||
|
||||
if WITH_PYLINT
|
||||
pylint: $(top_builddir)/ipapython/version.py ipasetup.py
|
||||
pylint: $(GENERATED_PYTHON_FILES) ipasetup.py
|
||||
FILES=`find $(top_srcdir) \
|
||||
-type d -exec test -e '{}/__init__.py' \; -print -prune -o \
|
||||
-path './rpmbuild' -prune -o \
|
||||
|
||||
12
VERSION.m4
12
VERSION.m4
@@ -21,7 +21,7 @@
|
||||
########################################################
|
||||
define(IPA_VERSION_MAJOR, 4)
|
||||
define(IPA_VERSION_MINOR, 6)
|
||||
define(IPA_VERSION_RELEASE, 0)
|
||||
define(IPA_VERSION_RELEASE, 2)
|
||||
|
||||
########################################################
|
||||
# For 'pre' releases the version will be #
|
||||
@@ -48,6 +48,15 @@ define(IPA_VERSION_PRE_RELEASE, )
|
||||
########################################################
|
||||
define(IPA_VERSION_IS_GIT_SNAPSHOT, no)
|
||||
|
||||
########################################################
|
||||
# git development branch: #
|
||||
# #
|
||||
# - master: define(IPA_GIT_BRANCH, master) #
|
||||
# - ipa-X-X: define(IPA_GIT_BRANCH, #
|
||||
# ipa-IPA_VERSION_MAJOR-IPA_VERSION_MINOR) #
|
||||
########################################################
|
||||
define(IPA_GIT_BRANCH, ipa-IPA_VERSION_MAJOR-IPA_VERSION_MINOR)
|
||||
|
||||
########################################################
|
||||
# The version of IPA data. This is used to identify #
|
||||
# incompatibilities in data that could cause issues #
|
||||
@@ -128,6 +137,7 @@ NEWLINE)) dnl IPA_VERSION end
|
||||
dnl DEBUG: uncomment following lines and run command m4 VERSION.m4
|
||||
dnl `IPA_VERSION: ''IPA_VERSION'
|
||||
dnl `IPA_GIT_VERSION: ''IPA_GIT_VERSION'
|
||||
dnf `IPA_GIT_BRANCH: ''IPA_GIT_BRANCH'
|
||||
dnl `IPA_API_VERSION: ''IPA_API_VERSION'
|
||||
dnl `IPA_DATA_VERSION: ''IPA_DATA_VERSION'
|
||||
dnl `IPA_NUM_VERSION: ''IPA_NUM_VERSION'
|
||||
|
||||
@@ -44,10 +44,15 @@ provided, so the principal name is just the service
|
||||
name and hostname (ldap/foo.example.com from the
|
||||
example above).
|
||||
|
||||
ipa-getkeytab is used during IPA client enrollment to retrieve a host service principal and store it in /etc/krb5.keytab. It is possible to retrieve the keytab without Kerberos credentials if the host was pre\-created with a one\-time password. The keytab can be retrieved by binding as the host and authenticating with this one\-time password. The \fB\-D|\-\-binddn\fR and \fB\-w|\-\-bindpw\fR options are used for this authentication.
|
||||
|
||||
\fBWARNING:\fR retrieving the keytab resets the secret for the Kerberos principal.
|
||||
This renders all other keytabs for that principal invalid.
|
||||
When multiple hosts or services need to share the same key (for instance in high availability or load balancing clusters), the \fB\-r\fR option must be used to retrieve the existing key instead of generating a new one (please refer to the EXAMPLES section).
|
||||
|
||||
Note that the user or host calling \fBipa-getkeytab\fR needs to be allowed to generate the key with \fBipa host\-allow\-create\-keytab\fR or \fBipa service\-allow\-create\-keytab\fR,
|
||||
and the user or host calling \fBipa-getkeytab \-r\fR needs to be allowed to retrieve the keytab for the host or service with \fBipa host\-allow\-retrieve\-keytab\fR or \fBipa service\-allow\-retrieve\-keytab\fR.
|
||||
|
||||
This is used during IPA client enrollment to retrieve a host service principal and store it in /etc/krb5.keytab. It is possible to retrieve the keytab without Kerberos credentials if the host was pre\-created with a one\-time password. The keytab can be retrieved by binding as the host and authenticating with this one\-time password. The \fB\-D|\-\-binddn\fR and \fB\-w|\-\-bindpw\fR options are used for this authentication.
|
||||
.SH "OPTIONS"
|
||||
.TP
|
||||
\fB\-p principal\-name\fR
|
||||
@@ -118,16 +123,44 @@ keytab must have access to the keys for this operation to succeed.
|
||||
Add and retrieve a keytab for the NFS service principal on
|
||||
the host foo.example.com and save it in the file /tmp/nfs.keytab and retrieve just the des\-cbc\-crc key.
|
||||
|
||||
.nf
|
||||
# ipa\-getkeytab \-p nfs/foo.example.com \-k /tmp/nfs.keytab \-e des\-cbc\-crc
|
||||
.fi
|
||||
|
||||
Add and retrieve a keytab for the ldap service principal on
|
||||
the host foo.example.com and save it in the file /tmp/ldap.keytab.
|
||||
|
||||
.nf
|
||||
# ipa\-getkeytab \-s ipaserver.example.com \-p ldap/foo.example.com \-k /tmp/ldap.keytab
|
||||
.fi
|
||||
|
||||
Retrieve a keytab using LDAP credentials (this will typically be done by \fBipa\-join(1)\fR when enrolling a client using the \fBipa\-client\-install(1)\fR command:
|
||||
|
||||
.nf
|
||||
# ipa\-getkeytab \-s ipaserver.example.com \-p host/foo.example.com \-k /etc/krb5.keytab \-D fqdn=foo.example.com,cn=computers,cn=accounts,dc=example,dc=com \-w password
|
||||
.fi
|
||||
|
||||
Add and retrieve a keytab for a clustered HTTP service deployed on client1.example.com and client2.example.com (already enrolled), using the client-frontend.example.com host name:
|
||||
|
||||
.nf
|
||||
# ipa host-add client-frontend.example.com --ip-address 10.1.2.3
|
||||
# ipa service-add HTTP/client-frontend.example.com
|
||||
# ipa service-allow-retrieve-keytab HTTP/client-frontend.example.com --hosts={client1.example.com,client2.example.com}
|
||||
# ipa server-allow-create-keytab HTTP/client-frontend.example.com --hosts=client1.example.com
|
||||
.fi
|
||||
|
||||
On client1, generate and retrieve a new keytab for client-frontend.example.com:
|
||||
.nf
|
||||
# kinit -k
|
||||
# ipa-getkeytab -p HTTP/client-frontend.example.com -k /tmp/http.keytab
|
||||
|
||||
.fi
|
||||
On client2, retrieve the existing keytab for client-frontend.example.com:
|
||||
.nf
|
||||
# kinit -k
|
||||
# ipa-getkeytab -r -p HTTP/client-frontend.example.com -k /tmp/http.keytab
|
||||
.fi
|
||||
|
||||
.SH "EXIT STATUS"
|
||||
The exit status is 0 on success, nonzero on error.
|
||||
|
||||
|
||||
34
configure.ac
34
configure.ac
@@ -140,30 +140,6 @@ PKG_CHECK_EXISTS(cmocka,
|
||||
)
|
||||
AM_CONDITIONAL([HAVE_CMOCKA], [test x$have_cmocka = xyes])
|
||||
|
||||
dnl A macro to check presence of a cwrap (http://cwrap.org) wrapper on the system
|
||||
dnl Usage:
|
||||
dnl AM_CHECK_WRAPPER(name, conditional)
|
||||
dnl If the cwrap library is found, sets the HAVE_$name conditional
|
||||
AC_DEFUN([AM_CHECK_WRAPPER],
|
||||
[
|
||||
FOUND_WRAPPER=0
|
||||
|
||||
AC_MSG_CHECKING([for $1])
|
||||
PKG_CHECK_EXISTS([$1],
|
||||
[
|
||||
AC_MSG_RESULT([yes])
|
||||
FOUND_WRAPPER=1
|
||||
],
|
||||
[
|
||||
AC_MSG_RESULT([no])
|
||||
AC_MSG_WARN([cwrap library $1 not found, some tests will not run])
|
||||
])
|
||||
|
||||
AM_CONDITIONAL($2, [ test x$FOUND_WRAPPER = x1])
|
||||
])
|
||||
|
||||
AM_CHECK_WRAPPER(nss_wrapper, HAVE_NSS_WRAPPER)
|
||||
|
||||
dnl ---------------------------------------------------------------------------
|
||||
dnl - Check for POPT
|
||||
dnl ---------------------------------------------------------------------------
|
||||
@@ -235,6 +211,7 @@ dnl ---------------------------------------------------------------------------
|
||||
AM_COND_IF([ENABLE_SERVER], [
|
||||
m4_include(server.m4)
|
||||
])
|
||||
AM_CONDITIONAL([USE_SSS_NSS_TIMEOUT], [test "x$ac_cv_have_decl_sss_nss_getpwnam_timeout" = xyes])
|
||||
|
||||
dnl ---------------------------------------------------------------------------
|
||||
dnl - Check if IPA certauth plugin can be build
|
||||
@@ -387,6 +364,7 @@ AC_SUBST([NUM_VERSION], [IPA_NUM_VERSION])
|
||||
AC_SUBST(VENDOR_SUFFIX)
|
||||
AC_SUBST([VERSION], [IPA_VERSION])
|
||||
AC_SUBST([GIT_VERSION], [IPA_GIT_VERSION])
|
||||
AC_SUBST([GIT_BRANCH], [IPA_GIT_BRANCH])
|
||||
# used by Makefile.am for files depending on templates
|
||||
AC_SUBST([CONFIG_STATUS])
|
||||
|
||||
@@ -520,13 +498,6 @@ AC_SUBST(LDFLAGS)
|
||||
|
||||
|
||||
# Files
|
||||
AC_CONFIG_LINKS([ipaplatform/__init__.py:ipaplatform/$IPAPLATFORM/__init__.py
|
||||
ipaplatform/constants.py:ipaplatform/$IPAPLATFORM/constants.py
|
||||
ipaplatform/paths.py:ipaplatform/$IPAPLATFORM/paths.py
|
||||
ipaplatform/services.py:ipaplatform/$IPAPLATFORM/services.py
|
||||
ipaplatform/tasks.py:ipaplatform/$IPAPLATFORM/tasks.py
|
||||
])
|
||||
|
||||
AC_CONFIG_FILES([
|
||||
Makefile
|
||||
asn1/Makefile
|
||||
@@ -594,7 +565,6 @@ AC_CONFIG_FILES([
|
||||
pypi/Makefile
|
||||
pypi/freeipa/Makefile
|
||||
pypi/ipa/Makefile
|
||||
pypi/ipaplatform/Makefile
|
||||
pypi/ipaserver/Makefile
|
||||
pypi/ipatests/Makefile
|
||||
po/Makefile.in
|
||||
|
||||
@@ -62,6 +62,8 @@ ipadb_la_LIBADD = \
|
||||
$(NULL)
|
||||
|
||||
if HAVE_CMOCKA
|
||||
TESTS_ENVIRONMENT = . $(srcdir)/tests/test_setup.sh;
|
||||
dist_ipa_kdb_tests_SOURCES = tests/test_setup.sh
|
||||
TESTS = ipa_kdb_tests
|
||||
check_PROGRAMS = ipa_kdb_tests
|
||||
endif
|
||||
|
||||
@@ -1259,6 +1259,17 @@ krb5_error_code ipadb_get_principal(krb5_context kcontext,
|
||||
realm,
|
||||
upn->length - (realm - upn->data),
|
||||
&trusted_realm);
|
||||
if (kerr == KRB5_KDB_NOENTRY) {
|
||||
/* try to refresh trusted domain data and try again */
|
||||
kerr = ipadb_reinit_mspac(ipactx, false);
|
||||
if (kerr != 0) {
|
||||
kerr = KRB5_KDB_NOENTRY;
|
||||
goto done;
|
||||
}
|
||||
kerr = ipadb_is_princ_from_trusted_realm(kcontext, realm,
|
||||
upn->length - (realm - upn->data),
|
||||
&trusted_realm);
|
||||
}
|
||||
if (kerr == 0) {
|
||||
kentry = calloc(1, sizeof(krb5_db_entry));
|
||||
if (!kentry) {
|
||||
|
||||
2
daemons/ipa-kdb/tests/test_setup.sh
Executable file
2
daemons/ipa-kdb/tests/test_setup.sh
Executable file
@@ -0,0 +1,2 @@
|
||||
KRB5_CONFIG=/dev/null
|
||||
export KRB5_CONFIG
|
||||
@@ -25,6 +25,7 @@ libipa_extdom_extop_la_SOURCES = \
|
||||
ipa_extdom.h \
|
||||
ipa_extdom_extop.c \
|
||||
ipa_extdom_common.c \
|
||||
back_extdom.h \
|
||||
$(NULL)
|
||||
|
||||
libipa_extdom_extop_la_LDFLAGS = -avoid-version
|
||||
@@ -34,20 +35,29 @@ libipa_extdom_extop_la_LIBADD = \
|
||||
$(SSSNSSIDMAP_LIBS) \
|
||||
$(NULL)
|
||||
|
||||
# We have two backends for nss operations:
|
||||
# (1) directly loading nss_sss.so.2
|
||||
# (2) using timeout-enabled API from libsss_nss_idmap
|
||||
# We prefer (2) if available
|
||||
if USE_SSS_NSS_TIMEOUT
|
||||
libipa_extdom_extop_la_SOURCES += back_extdom_sss_idmap.c
|
||||
else
|
||||
libipa_extdom_extop_la_SOURCES += back_extdom_nss_sss.c
|
||||
endif
|
||||
|
||||
|
||||
TESTS =
|
||||
check_PROGRAMS =
|
||||
|
||||
if HAVE_CMOCKA
|
||||
if HAVE_NSS_WRAPPER
|
||||
TESTS_ENVIRONMENT = . ./test_data/test_setup.sh;
|
||||
TESTS += extdom_cmocka_tests
|
||||
check_PROGRAMS += extdom_cmocka_tests
|
||||
endif
|
||||
endif
|
||||
|
||||
extdom_cmocka_tests_SOURCES = \
|
||||
ipa_extdom_cmocka_tests.c \
|
||||
ipa_extdom_common.c \
|
||||
back_extdom_nss_sss.c \
|
||||
$(NULL)
|
||||
extdom_cmocka_tests_CFLAGS = $(CMOCKA_CFLAGS)
|
||||
extdom_cmocka_tests_LDFLAGS = \
|
||||
@@ -58,6 +68,7 @@ extdom_cmocka_tests_LDADD = \
|
||||
$(LDAP_LIBS) \
|
||||
$(DIRSRV_LIBS) \
|
||||
$(SSSNSSIDMAP_LIBS) \
|
||||
-ldl \
|
||||
$(NULL)
|
||||
|
||||
|
||||
|
||||
79
daemons/ipa-slapi-plugins/ipa-extdom-extop/back_extdom.h
Normal file
79
daemons/ipa-slapi-plugins/ipa-extdom-extop/back_extdom.h
Normal file
@@ -0,0 +1,79 @@
|
||||
/*
|
||||
* Copyright 2017 Red Hat, Inc.
|
||||
*
|
||||
* This Program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; version 2 of the License.
|
||||
*
|
||||
* This Program is distributed in the hope that it will be useful, but
|
||||
* WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this Program; if not, write to the
|
||||
*
|
||||
* Free Software Foundation, Inc.
|
||||
* 59 Temple Place, Suite 330
|
||||
* Boston, MA 02111-1307 USA
|
||||
*
|
||||
*/
|
||||
|
||||
#ifndef BACK_EXTDOM_H
|
||||
#define BACK_EXTDOM_H
|
||||
#include <unistd.h>
|
||||
#include <pwd.h>
|
||||
#include <grp.h>
|
||||
|
||||
/* Possible results of lookup using a nss_* function.
|
||||
* Note: don't include nss.h as its path gets overriden by NSS library */
|
||||
enum nss_status {
|
||||
NSS_STATUS_TRYAGAIN = -2,
|
||||
NSS_STATUS_UNAVAIL,
|
||||
NSS_STATUS_NOTFOUND,
|
||||
NSS_STATUS_SUCCESS,
|
||||
NSS_STATUS_RETURN
|
||||
};
|
||||
|
||||
/* NSS backend operations implemented using either nss_sss.so.2 or libsss_nss_idmap API */
|
||||
struct nss_ops_ctx;
|
||||
|
||||
int back_extdom_init_context(struct nss_ops_ctx **nss_context);
|
||||
void back_extdom_free_context(struct nss_ops_ctx **nss_context);
|
||||
void back_extdom_set_timeout(struct nss_ops_ctx *nss_context,
|
||||
unsigned int timeout);
|
||||
void back_extdom_evict_user(struct nss_ops_ctx *nss_context,
|
||||
const char *name);
|
||||
void back_extdom_evict_group(struct nss_ops_ctx *nss_context,
|
||||
const char *name);
|
||||
|
||||
enum nss_status back_extdom_getpwnam(struct nss_ops_ctx *nss_context,
|
||||
const char *name, struct passwd *pwd,
|
||||
char *buffer, size_t buflen,
|
||||
struct passwd **result,
|
||||
int *lerrno);
|
||||
|
||||
enum nss_status back_extdom_getpwuid(struct nss_ops_ctx *nss_context,
|
||||
uid_t uid, struct passwd *pwd,
|
||||
char *buffer, size_t buflen,
|
||||
struct passwd **result,
|
||||
int *lerrno);
|
||||
|
||||
enum nss_status back_extdom_getgrnam(struct nss_ops_ctx *nss_context,
|
||||
const char *name, struct group *grp,
|
||||
char *buffer, size_t buflen,
|
||||
struct group **result,
|
||||
int *lerrno);
|
||||
|
||||
enum nss_status back_extdom_getgrgid(struct nss_ops_ctx *nss_context,
|
||||
gid_t gid, struct group *grp,
|
||||
char *buffer, size_t buflen,
|
||||
struct group **result,
|
||||
int *lerrno);
|
||||
|
||||
enum nss_status back_extdom_getgrouplist(struct nss_ops_ctx *nss_context,
|
||||
const char *name, gid_t group,
|
||||
gid_t *groups, int *ngroups,
|
||||
int *lerrno);
|
||||
|
||||
#endif /* BACK_EXTDOM_H */
|
||||
276
daemons/ipa-slapi-plugins/ipa-extdom-extop/back_extdom_nss_sss.c
Normal file
276
daemons/ipa-slapi-plugins/ipa-extdom-extop/back_extdom_nss_sss.c
Normal file
@@ -0,0 +1,276 @@
|
||||
/*
|
||||
* Copyright 2013-2017 Red Hat, Inc.
|
||||
*
|
||||
* This Program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; version 2 of the License.
|
||||
*
|
||||
* This Program is distributed in the hope that it will be useful, but
|
||||
* WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this Program; if not, write to the
|
||||
*
|
||||
* Free Software Foundation, Inc.
|
||||
* 59 Temple Place, Suite 330
|
||||
* Boston, MA 02111-1307 USA
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
#endif
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <time.h>
|
||||
#include <unistd.h>
|
||||
#include <dlfcn.h>
|
||||
#include <errno.h>
|
||||
#include <pwd.h>
|
||||
#include <grp.h>
|
||||
#include <sys/param.h>
|
||||
#include "back_extdom.h"
|
||||
|
||||
struct nss_ops_ctx {
|
||||
void *dl_handle;
|
||||
long int initgroups_start;
|
||||
|
||||
enum nss_status (*getpwnam_r)(const char *name, struct passwd *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*getpwuid_r)(uid_t uid, struct passwd *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*getgrnam_r)(const char *name, struct group *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*getgrgid_r)(gid_t gid, struct group *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*initgroups_dyn)(const char *user, gid_t group,
|
||||
long int *start, long int *size,
|
||||
gid_t **groups, long int limit,
|
||||
int *errnop);
|
||||
};
|
||||
|
||||
void back_extdom_free_context(struct nss_ops_ctx **nss_context)
|
||||
{
|
||||
if ((nss_context == NULL) || (*nss_context == NULL)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ((*nss_context)->dl_handle != NULL) {
|
||||
dlclose((*nss_context)->dl_handle);
|
||||
}
|
||||
|
||||
free((*nss_context));
|
||||
*nss_context = NULL;
|
||||
}
|
||||
|
||||
int back_extdom_init_context(struct nss_ops_ctx **nss_context)
|
||||
{
|
||||
struct nss_ops_ctx *ctx = NULL;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
ctx = calloc(1, sizeof(struct nss_ops_ctx));
|
||||
if (ctx == NULL) {
|
||||
return ENOMEM;
|
||||
}
|
||||
*nss_context = ctx;
|
||||
|
||||
ctx->dl_handle = dlopen("libnss_sss.so.2", RTLD_NOW);
|
||||
if (ctx->dl_handle == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getpwnam_r = dlsym(ctx->dl_handle, "_nss_sss_getpwnam_r");
|
||||
if (ctx->getpwnam_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getpwuid_r = dlsym(ctx->dl_handle, "_nss_sss_getpwuid_r");
|
||||
if (ctx->getpwuid_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getgrnam_r = dlsym(ctx->dl_handle, "_nss_sss_getgrnam_r");
|
||||
if (ctx->getgrnam_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getgrgid_r = dlsym(ctx->dl_handle, "_nss_sss_getgrgid_r");
|
||||
if (ctx->getgrgid_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->initgroups_dyn = dlsym(ctx->dl_handle, "_nss_sss_initgroups_dyn");
|
||||
if (ctx->initgroups_dyn == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
back_extdom_free_context(nss_context);
|
||||
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
|
||||
/* Following three functions cannot be implemented with nss_sss.so.2
|
||||
* As result, we simply do nothing here */
|
||||
|
||||
void back_extdom_set_timeout(struct nss_ops_ctx *nss_context,
|
||||
unsigned int timeout) {
|
||||
/* no operation */
|
||||
}
|
||||
|
||||
void back_extdom_evict_user(struct nss_ops_ctx *nss_context,
|
||||
const char *name) {
|
||||
/* no operation */
|
||||
}
|
||||
|
||||
void back_extdom_evict_group(struct nss_ops_ctx *nss_context,
|
||||
const char *name) {
|
||||
/* no operation */
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getpwnam(struct nss_ops_ctx *nss_context,
|
||||
const char *name, struct passwd *pwd,
|
||||
char *buffer, size_t buflen,
|
||||
struct passwd **result,
|
||||
int *lerrno) {
|
||||
enum nss_status ret;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = nss_context->getpwnam_r(name, pwd,
|
||||
buffer, buflen,
|
||||
lerrno);
|
||||
|
||||
if ((ret == NSS_STATUS_SUCCESS) && (result != NULL)) {
|
||||
*result = pwd;
|
||||
*lerrno = 0;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getpwuid(struct nss_ops_ctx *nss_context,
|
||||
uid_t uid, struct passwd *pwd,
|
||||
char *buffer, size_t buflen,
|
||||
struct passwd **result,
|
||||
int *lerrno) {
|
||||
enum nss_status ret;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = nss_context->getpwuid_r(uid, pwd,
|
||||
buffer, buflen,
|
||||
lerrno);
|
||||
|
||||
if ((ret == NSS_STATUS_SUCCESS) && (result != NULL)) {
|
||||
*result = pwd;
|
||||
*lerrno = 0;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getgrnam(struct nss_ops_ctx *nss_context,
|
||||
const char *name, struct group *grp,
|
||||
char *buffer, size_t buflen,
|
||||
struct group **result,
|
||||
int *lerrno) {
|
||||
enum nss_status ret;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = nss_context->getgrnam_r(name, grp,
|
||||
buffer, buflen,
|
||||
lerrno);
|
||||
|
||||
if ((ret == NSS_STATUS_SUCCESS) && (result != NULL)) {
|
||||
*result = grp;
|
||||
*lerrno = 0;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getgrgid(struct nss_ops_ctx *nss_context,
|
||||
gid_t gid, struct group *grp,
|
||||
char *buffer, size_t buflen,
|
||||
struct group **result,
|
||||
int *lerrno) {
|
||||
|
||||
enum nss_status ret;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = nss_context->getgrgid_r(gid, grp,
|
||||
buffer, buflen,
|
||||
lerrno);
|
||||
|
||||
if ((ret == NSS_STATUS_SUCCESS) && (result != NULL)) {
|
||||
*result = grp;
|
||||
*lerrno = 0;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getgrouplist(struct nss_ops_ctx *nss_context,
|
||||
const char *name, gid_t group,
|
||||
gid_t *groups, int *ngroups,
|
||||
int *lerrno) {
|
||||
|
||||
enum nss_status ret = NSS_STATUS_UNAVAIL;
|
||||
long int tsize = MAX (1, *ngroups);
|
||||
gid_t *newgroups = NULL;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
newgroups = (gid_t *) calloc (tsize, sizeof (gid_t));
|
||||
if (newgroups == NULL) {
|
||||
*lerrno = ENOMEM;
|
||||
return NSS_STATUS_TRYAGAIN;
|
||||
}
|
||||
|
||||
newgroups[0] = group;
|
||||
nss_context->initgroups_start = 1;
|
||||
|
||||
ret = nss_context->initgroups_dyn(name, group,
|
||||
&nss_context->initgroups_start,
|
||||
&tsize, &newgroups,
|
||||
-1, lerrno);
|
||||
|
||||
(void) memcpy(groups, newgroups,
|
||||
MIN(*ngroups, nss_context->initgroups_start) * sizeof(gid_t));
|
||||
free(newgroups);
|
||||
|
||||
if (*ngroups < nss_context->initgroups_start) {
|
||||
ret = NSS_STATUS_TRYAGAIN;
|
||||
*lerrno = ERANGE;
|
||||
}
|
||||
|
||||
*ngroups = (int) nss_context->initgroups_start;
|
||||
|
||||
nss_context->initgroups_start = 0;
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,260 @@
|
||||
/*
|
||||
* Copyright 2013-2017 Red Hat, Inc.
|
||||
*
|
||||
* This Program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation; version 2 of the License.
|
||||
*
|
||||
* This Program is distributed in the hope that it will be useful, but
|
||||
* WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this Program; if not, write to the
|
||||
*
|
||||
* Free Software Foundation, Inc.
|
||||
* 59 Temple Place, Suite 330
|
||||
* Boston, MA 02111-1307 USA
|
||||
*
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
#endif
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <time.h>
|
||||
#include <unistd.h>
|
||||
#include <errno.h>
|
||||
#include <pwd.h>
|
||||
#include <grp.h>
|
||||
#include "back_extdom.h"
|
||||
|
||||
/* SSSD only exposes *_timeout() variants if the following symbol is defined */
|
||||
#define IPA_389DS_PLUGIN_HELPER_CALLS
|
||||
#include <sss_nss_idmap.h>
|
||||
|
||||
struct nss_ops_ctx {
|
||||
unsigned int timeout;
|
||||
};
|
||||
|
||||
static enum nss_status __convert_sss_nss2nss_status(int errcode) {
|
||||
switch(errcode) {
|
||||
case 0:
|
||||
return NSS_STATUS_SUCCESS;
|
||||
case ENOENT:
|
||||
return NSS_STATUS_NOTFOUND;
|
||||
case ETIME:
|
||||
/* fall-through */
|
||||
case ERANGE:
|
||||
return NSS_STATUS_TRYAGAIN;
|
||||
case ETIMEDOUT:
|
||||
/* fall-through */
|
||||
default:
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
int back_extdom_init_context(struct nss_ops_ctx **nss_context)
|
||||
{
|
||||
struct nss_ops_ctx *ctx = NULL;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return EINVAL;
|
||||
}
|
||||
|
||||
ctx = calloc(1, sizeof(struct nss_ops_ctx));
|
||||
|
||||
if (ctx == NULL) {
|
||||
return ENOMEM;
|
||||
}
|
||||
*nss_context = ctx;
|
||||
return 0;
|
||||
}
|
||||
|
||||
void back_extdom_free_context(struct nss_ops_ctx **nss_context)
|
||||
{
|
||||
if ((nss_context == NULL) || (*nss_context == NULL)) {
|
||||
return;
|
||||
}
|
||||
|
||||
free((*nss_context));
|
||||
*nss_context = NULL;
|
||||
}
|
||||
|
||||
|
||||
void back_extdom_set_timeout(struct nss_ops_ctx *nss_context,
|
||||
unsigned int timeout) {
|
||||
if (nss_context == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
nss_context->timeout = timeout;
|
||||
}
|
||||
|
||||
void back_extdom_evict_user(struct nss_ops_ctx *nss_context,
|
||||
const char *name) {
|
||||
if (nss_context == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
(void) sss_nss_getpwnam_timeout(name, NULL,
|
||||
NULL, 0,
|
||||
NULL,
|
||||
SSS_NSS_EX_FLAG_INVALIDATE_CACHE,
|
||||
nss_context->timeout);
|
||||
}
|
||||
|
||||
void back_extdom_evict_group(struct nss_ops_ctx *nss_context,
|
||||
const char *name) {
|
||||
if (nss_context == NULL) {
|
||||
return;
|
||||
}
|
||||
|
||||
(void) sss_nss_getgrnam_timeout(name, NULL,
|
||||
NULL, 0,
|
||||
NULL,
|
||||
SSS_NSS_EX_FLAG_INVALIDATE_CACHE,
|
||||
nss_context->timeout);
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getpwnam(struct nss_ops_ctx *nss_context,
|
||||
const char *name, struct passwd *pwd,
|
||||
char *buffer, size_t buflen,
|
||||
struct passwd **result,
|
||||
int *lerrno) {
|
||||
int ret = 0;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = sss_nss_getpwnam_timeout(name, pwd,
|
||||
buffer, buflen,
|
||||
result,
|
||||
SSS_NSS_EX_FLAG_NO_FLAGS,
|
||||
nss_context->timeout);
|
||||
|
||||
/* SSSD uses the same infrastructure to handle sss_nss_get* calls
|
||||
* as nss_sss.so.2 module where 'int *errno' is passed to the helper
|
||||
* but writes down errno into return code so we propagate it in case
|
||||
* of error and translate the return code */
|
||||
if (lerrno != NULL) {
|
||||
*lerrno = ret;
|
||||
}
|
||||
return __convert_sss_nss2nss_status(ret);
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getpwuid(struct nss_ops_ctx *nss_context,
|
||||
uid_t uid, struct passwd *pwd,
|
||||
char *buffer, size_t buflen,
|
||||
struct passwd **result,
|
||||
int *lerrno) {
|
||||
|
||||
int ret = 0;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = sss_nss_getpwuid_timeout(uid, pwd,
|
||||
buffer, buflen,
|
||||
result,
|
||||
SSS_NSS_EX_FLAG_NO_FLAGS,
|
||||
nss_context->timeout);
|
||||
|
||||
/* SSSD uses the same infrastructure to handle sss_nss_get* calls
|
||||
* as nss_sss.so.2 module where 'int *errno' is passed to the helper
|
||||
* but writes down errno into return code so we propagate it in case
|
||||
* of error and translate the return code */
|
||||
if (lerrno != NULL) {
|
||||
*lerrno = ret;
|
||||
}
|
||||
return __convert_sss_nss2nss_status(ret);
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getgrnam(struct nss_ops_ctx *nss_context,
|
||||
const char *name, struct group *grp,
|
||||
char *buffer, size_t buflen,
|
||||
struct group **result,
|
||||
int *lerrno) {
|
||||
|
||||
int ret = 0;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = sss_nss_getgrnam_timeout(name, grp,
|
||||
buffer, buflen,
|
||||
result,
|
||||
SSS_NSS_EX_FLAG_NO_FLAGS,
|
||||
nss_context->timeout);
|
||||
|
||||
/* SSSD uses the same infrastructure to handle sss_nss_get* calls
|
||||
* as nss_sss.so.2 module where 'int *errno' is passed to the helper
|
||||
* but writes down errno into return code so we propagate it in case
|
||||
* of error and translate the return code */
|
||||
if (lerrno != NULL) {
|
||||
*lerrno = ret;
|
||||
}
|
||||
return __convert_sss_nss2nss_status(ret);
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getgrgid(struct nss_ops_ctx *nss_context,
|
||||
gid_t gid, struct group *grp,
|
||||
char *buffer, size_t buflen,
|
||||
struct group **result,
|
||||
int *lerrno) {
|
||||
|
||||
int ret = 0;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = sss_nss_getgrgid_timeout(gid, grp,
|
||||
buffer, buflen,
|
||||
result,
|
||||
SSS_NSS_EX_FLAG_NO_FLAGS,
|
||||
nss_context->timeout);
|
||||
|
||||
/* SSSD uses the same infrastructure to handle sss_nss_get* calls
|
||||
* as nss_sss.so.2 module where 'int *errno' is passed to the helper
|
||||
* but writes down errno into return code so we propagate it in case
|
||||
* of error and translate the return code */
|
||||
if (lerrno != NULL) {
|
||||
*lerrno = ret;
|
||||
}
|
||||
return __convert_sss_nss2nss_status(ret);
|
||||
}
|
||||
|
||||
enum nss_status back_extdom_getgrouplist(struct nss_ops_ctx *nss_context,
|
||||
const char *name, gid_t group,
|
||||
gid_t *groups, int *ngroups,
|
||||
int *lerrno) {
|
||||
int ret = 0;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return NSS_STATUS_UNAVAIL;
|
||||
}
|
||||
|
||||
ret = sss_nss_getgrouplist_timeout(name, group,
|
||||
groups, ngroups,
|
||||
SSS_NSS_EX_FLAG_NO_FLAGS,
|
||||
nss_context->timeout);
|
||||
|
||||
/* SSSD uses the same infrastructure to handle sss_nss_get* calls
|
||||
* as nss_sss.so.2 module where 'int *errno' is passed to the helper
|
||||
* but writes down errno into return code so we propagate it in case
|
||||
* of error and translate the return code */
|
||||
if (lerrno != NULL) {
|
||||
*lerrno = ret;
|
||||
}
|
||||
return __convert_sss_nss2nss_status(ret);
|
||||
}
|
||||
|
||||
@@ -150,10 +150,13 @@ struct extdom_res {
|
||||
} data;
|
||||
};
|
||||
|
||||
struct nss_ops_ctx;
|
||||
|
||||
struct ipa_extdom_ctx {
|
||||
Slapi_ComponentId *plugin_id;
|
||||
char *base_dn;
|
||||
size_t max_nss_buf_size;
|
||||
struct nss_ops_ctx *nss_ctx;
|
||||
};
|
||||
|
||||
struct domain_info {
|
||||
@@ -179,15 +182,15 @@ int handle_request(struct ipa_extdom_ctx *ctx, struct extdom_req *req,
|
||||
struct berval **berval);
|
||||
int pack_response(struct extdom_res *res, struct berval **ret_val);
|
||||
int get_buffer(size_t *_buf_len, char **_buf);
|
||||
int getpwnam_r_wrapper(size_t buf_max, const char *name,
|
||||
int getpwnam_r_wrapper(struct ipa_extdom_ctx *ctx, const char *name,
|
||||
struct passwd *pwd, char **_buf, size_t *_buf_len);
|
||||
int getpwuid_r_wrapper(size_t buf_max, uid_t uid,
|
||||
int getpwuid_r_wrapper(struct ipa_extdom_ctx *ctx, uid_t uid,
|
||||
struct passwd *pwd, char **_buf, size_t *_buf_len);
|
||||
int getgrnam_r_wrapper(size_t buf_max, const char *name,
|
||||
int getgrnam_r_wrapper(struct ipa_extdom_ctx *ctx, const char *name,
|
||||
struct group *grp, char **_buf, size_t *_buf_len);
|
||||
int getgrgid_r_wrapper(size_t buf_max, gid_t gid,
|
||||
int getgrgid_r_wrapper(struct ipa_extdom_ctx *ctx, gid_t gid,
|
||||
struct group *grp, char **_buf, size_t *_buf_len);
|
||||
int get_user_grouplist(const char *name, gid_t gid,
|
||||
int get_user_grouplist(struct ipa_extdom_ctx *ctx, const char *name, gid_t gid,
|
||||
size_t *_ngroups, gid_t **_groups);
|
||||
int pack_ber_sid(const char *sid, struct berval **berval);
|
||||
int pack_ber_name(const char *domain_name, const char *name,
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
#define _GNU_SOURCE
|
||||
|
||||
#include <errno.h>
|
||||
#include <stdarg.h>
|
||||
@@ -31,24 +32,166 @@
|
||||
|
||||
|
||||
#include "ipa_extdom.h"
|
||||
#include "back_extdom.h"
|
||||
#include <stdio.h>
|
||||
#include <dlfcn.h>
|
||||
|
||||
#define MAX_BUF (1024*1024*1024)
|
||||
struct test_data {
|
||||
struct extdom_req *req;
|
||||
struct ipa_extdom_ctx *ctx;
|
||||
};
|
||||
|
||||
/*
|
||||
* redefine logging for mocks
|
||||
*/
|
||||
#ifdef __GNUC__
|
||||
__attribute__((format(printf, 3, 4)))
|
||||
#endif
|
||||
int slapi_log_error(int loglevel, char *subsystem, char *fmt, ...)
|
||||
{
|
||||
va_list ap;
|
||||
va_start(ap, fmt);
|
||||
vprint_error(fmt, ap);
|
||||
va_end(ap);
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* We cannot run cmocka tests against SSSD as that would require to set up SSSD
|
||||
* and the rest of environment. Instead, we compile cmocka tests against
|
||||
* back_extdom_nss_sss.c and re-define context initialization to use
|
||||
* nsswrapper with our test data.
|
||||
*
|
||||
* This means we have to keep struct nss_ops_ctx definition in sync with tests!
|
||||
*/
|
||||
|
||||
struct nss_ops_ctx {
|
||||
void *dl_handle;
|
||||
long int initgroups_start;
|
||||
|
||||
enum nss_status (*getpwnam_r)(const char *name, struct passwd *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*getpwuid_r)(uid_t uid, struct passwd *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*getgrnam_r)(const char *name, struct group *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*getgrgid_r)(gid_t gid, struct group *result,
|
||||
char *buffer, size_t buflen, int *errnop);
|
||||
enum nss_status (*initgroups_dyn)(const char *user, gid_t group,
|
||||
long int *start, long int *size,
|
||||
gid_t **groups, long int limit,
|
||||
int *errnop);
|
||||
};
|
||||
|
||||
int cmocka_extdom_init_context(struct nss_ops_ctx **nss_context)
|
||||
{
|
||||
struct nss_ops_ctx *ctx = NULL;
|
||||
|
||||
if (nss_context == NULL) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
ctx = calloc(1, sizeof(struct nss_ops_ctx));
|
||||
|
||||
if (ctx == NULL) {
|
||||
return ENOMEM;
|
||||
}
|
||||
*nss_context = ctx;
|
||||
|
||||
ctx->dl_handle = dlopen("libnss_files.so.2", RTLD_NOW);
|
||||
if (ctx->dl_handle == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getpwnam_r = dlsym(ctx->dl_handle, "_nss_files_getpwnam_r");
|
||||
if (ctx->getpwnam_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getpwuid_r = dlsym(ctx->dl_handle, "_nss_files_getpwuid_r");
|
||||
if (ctx->getpwuid_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getgrnam_r = dlsym(ctx->dl_handle, "_nss_files_getgrnam_r");
|
||||
if (ctx->getgrnam_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->getgrgid_r = dlsym(ctx->dl_handle, "_nss_files_getgrgid_r");
|
||||
if (ctx->getgrgid_r == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
ctx->initgroups_dyn = dlsym(ctx->dl_handle, "_nss_files_initgroups_dyn");
|
||||
if (ctx->initgroups_dyn == NULL) {
|
||||
goto fail;
|
||||
}
|
||||
|
||||
return 0;
|
||||
|
||||
fail:
|
||||
back_extdom_free_context(nss_context);
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
struct {
|
||||
const char *o, *n;
|
||||
} path_table[] = {
|
||||
{ .o = "/etc/passwd", .n = "./test_data/passwd"},
|
||||
{ .o = "/etc/group", .n = "./test_data/group"},
|
||||
{ .o = NULL, .n = NULL}};
|
||||
|
||||
FILE *(*original_fopen)(const char*, const char*) = NULL;
|
||||
|
||||
FILE *fopen(const char *path, const char *mode) {
|
||||
const char *_path = NULL;
|
||||
|
||||
/* Do not handle before-main() cases */
|
||||
if (original_fopen == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
for(int i=0; path_table[i].o != NULL; i++) {
|
||||
if (strcmp(path, path_table[i].o) == 0) {
|
||||
_path = path_table[i].n;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return (*original_fopen)(_path ? _path : path, mode);
|
||||
}
|
||||
|
||||
/* Attempt to initialize original_fopen before main()
|
||||
* There is no explicit order when all initializers are called,
|
||||
* so we might still be late here compared to a code in a shared
|
||||
* library initializer, like libselinux */
|
||||
void redefined_fopen_ctor (void) __attribute__ ((constructor));
|
||||
void redefined_fopen_ctor(void) {
|
||||
original_fopen = dlsym(RTLD_NEXT, "fopen");
|
||||
}
|
||||
|
||||
void test_getpwnam_r_wrapper(void **state)
|
||||
{
|
||||
int ret;
|
||||
struct passwd pwd;
|
||||
char *buf;
|
||||
size_t buf_len;
|
||||
size_t buf_len, max_big_buf_len;
|
||||
struct test_data *test_data;
|
||||
|
||||
test_data = (struct test_data *) *state;
|
||||
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getpwnam_r_wrapper(MAX_BUF, "non_exisiting_user", &pwd, &buf,
|
||||
&buf_len);
|
||||
ret = getpwnam_r_wrapper(test_data->ctx,
|
||||
"non_exisiting_user", &pwd,
|
||||
&buf, &buf_len);
|
||||
assert_int_equal(ret, ENOENT);
|
||||
|
||||
ret = getpwnam_r_wrapper(MAX_BUF, "user", &pwd, &buf, &buf_len);
|
||||
ret = getpwnam_r_wrapper(test_data->ctx,
|
||||
"user", &pwd, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(pwd.pw_name, "user");
|
||||
assert_string_equal(pwd.pw_passwd, "x");
|
||||
@@ -62,7 +205,8 @@ void test_getpwnam_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getpwnam_r_wrapper(MAX_BUF, "user_big", &pwd, &buf, &buf_len);
|
||||
ret = getpwnam_r_wrapper(test_data->ctx,
|
||||
"user_big", &pwd, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(pwd.pw_name, "user_big");
|
||||
assert_string_equal(pwd.pw_passwd, "x");
|
||||
@@ -76,7 +220,11 @@ void test_getpwnam_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getpwnam_r_wrapper(1024, "user_big", &pwd, &buf, &buf_len);
|
||||
max_big_buf_len = test_data->ctx->max_nss_buf_size;
|
||||
test_data->ctx->max_nss_buf_size = 1024;
|
||||
ret = getpwnam_r_wrapper(test_data->ctx,
|
||||
"user_big", &pwd, &buf, &buf_len);
|
||||
test_data->ctx->max_nss_buf_size = max_big_buf_len;
|
||||
assert_int_equal(ret, ERANGE);
|
||||
free(buf);
|
||||
}
|
||||
@@ -86,15 +234,18 @@ void test_getpwuid_r_wrapper(void **state)
|
||||
int ret;
|
||||
struct passwd pwd;
|
||||
char *buf;
|
||||
size_t buf_len;
|
||||
size_t buf_len, max_big_buf_len;
|
||||
struct test_data *test_data;
|
||||
|
||||
test_data = (struct test_data *) *state;
|
||||
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getpwuid_r_wrapper(MAX_BUF, 99999, &pwd, &buf, &buf_len);
|
||||
ret = getpwuid_r_wrapper(test_data->ctx, 99999, &pwd, &buf, &buf_len);
|
||||
assert_int_equal(ret, ENOENT);
|
||||
|
||||
ret = getpwuid_r_wrapper(MAX_BUF, 12345, &pwd, &buf, &buf_len);
|
||||
ret = getpwuid_r_wrapper(test_data->ctx, 12345, &pwd, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(pwd.pw_name, "user");
|
||||
assert_string_equal(pwd.pw_passwd, "x");
|
||||
@@ -108,7 +259,7 @@ void test_getpwuid_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getpwuid_r_wrapper(MAX_BUF, 12346, &pwd, &buf, &buf_len);
|
||||
ret = getpwuid_r_wrapper(test_data->ctx, 12346, &pwd, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(pwd.pw_name, "user_big");
|
||||
assert_string_equal(pwd.pw_passwd, "x");
|
||||
@@ -122,7 +273,10 @@ void test_getpwuid_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getpwuid_r_wrapper(1024, 12346, &pwd, &buf, &buf_len);
|
||||
max_big_buf_len = test_data->ctx->max_nss_buf_size;
|
||||
test_data->ctx->max_nss_buf_size = 1024;
|
||||
ret = getpwuid_r_wrapper(test_data->ctx, 12346, &pwd, &buf, &buf_len);
|
||||
test_data->ctx->max_nss_buf_size = max_big_buf_len;
|
||||
assert_int_equal(ret, ERANGE);
|
||||
free(buf);
|
||||
}
|
||||
@@ -132,15 +286,19 @@ void test_getgrnam_r_wrapper(void **state)
|
||||
int ret;
|
||||
struct group grp;
|
||||
char *buf;
|
||||
size_t buf_len;
|
||||
size_t buf_len, max_big_buf_len;
|
||||
struct test_data *test_data;
|
||||
|
||||
test_data = (struct test_data *) *state;
|
||||
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getgrnam_r_wrapper(MAX_BUF, "non_exisiting_group", &grp, &buf, &buf_len);
|
||||
ret = getgrnam_r_wrapper(test_data->ctx,
|
||||
"non_exisiting_group", &grp, &buf, &buf_len);
|
||||
assert_int_equal(ret, ENOENT);
|
||||
|
||||
ret = getgrnam_r_wrapper(MAX_BUF, "group", &grp, &buf, &buf_len);
|
||||
ret = getgrnam_r_wrapper(test_data->ctx, "group", &grp, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(grp.gr_name, "group");
|
||||
assert_string_equal(grp.gr_passwd, "x");
|
||||
@@ -153,7 +311,7 @@ void test_getgrnam_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getgrnam_r_wrapper(MAX_BUF, "group_big", &grp, &buf, &buf_len);
|
||||
ret = getgrnam_r_wrapper(test_data->ctx, "group_big", &grp, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(grp.gr_name, "group_big");
|
||||
assert_string_equal(grp.gr_passwd, "x");
|
||||
@@ -165,7 +323,10 @@ void test_getgrnam_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getgrnam_r_wrapper(1024, "group_big", &grp, &buf, &buf_len);
|
||||
max_big_buf_len = test_data->ctx->max_nss_buf_size;
|
||||
test_data->ctx->max_nss_buf_size = 1024;
|
||||
ret = getgrnam_r_wrapper(test_data->ctx, "group_big", &grp, &buf, &buf_len);
|
||||
test_data->ctx->max_nss_buf_size = max_big_buf_len;
|
||||
assert_int_equal(ret, ERANGE);
|
||||
free(buf);
|
||||
}
|
||||
@@ -175,15 +336,18 @@ void test_getgrgid_r_wrapper(void **state)
|
||||
int ret;
|
||||
struct group grp;
|
||||
char *buf;
|
||||
size_t buf_len;
|
||||
size_t buf_len, max_big_buf_len;
|
||||
struct test_data *test_data;
|
||||
|
||||
test_data = (struct test_data *) *state;
|
||||
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getgrgid_r_wrapper(MAX_BUF, 99999, &grp, &buf, &buf_len);
|
||||
ret = getgrgid_r_wrapper(test_data->ctx, 99999, &grp, &buf, &buf_len);
|
||||
assert_int_equal(ret, ENOENT);
|
||||
|
||||
ret = getgrgid_r_wrapper(MAX_BUF, 11111, &grp, &buf, &buf_len);
|
||||
ret = getgrgid_r_wrapper(test_data->ctx, 11111, &grp, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(grp.gr_name, "group");
|
||||
assert_string_equal(grp.gr_passwd, "x");
|
||||
@@ -196,7 +360,7 @@ void test_getgrgid_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getgrgid_r_wrapper(MAX_BUF, 22222, &grp, &buf, &buf_len);
|
||||
ret = getgrgid_r_wrapper(test_data->ctx, 22222, &grp, &buf, &buf_len);
|
||||
assert_int_equal(ret, 0);
|
||||
assert_string_equal(grp.gr_name, "group_big");
|
||||
assert_string_equal(grp.gr_passwd, "x");
|
||||
@@ -208,7 +372,10 @@ void test_getgrgid_r_wrapper(void **state)
|
||||
ret = get_buffer(&buf_len, &buf);
|
||||
assert_int_equal(ret, 0);
|
||||
|
||||
ret = getgrgid_r_wrapper(1024, 22222, &grp, &buf, &buf_len);
|
||||
max_big_buf_len = test_data->ctx->max_nss_buf_size;
|
||||
test_data->ctx->max_nss_buf_size = 1024;
|
||||
ret = getgrgid_r_wrapper(test_data->ctx, 22222, &grp, &buf, &buf_len);
|
||||
test_data->ctx->max_nss_buf_size = max_big_buf_len;
|
||||
assert_int_equal(ret, ERANGE);
|
||||
free(buf);
|
||||
}
|
||||
@@ -219,16 +386,21 @@ void test_get_user_grouplist(void **state)
|
||||
size_t ngroups;
|
||||
gid_t *groups;
|
||||
size_t c;
|
||||
struct test_data *test_data;
|
||||
|
||||
test_data = (struct test_data *) *state;
|
||||
|
||||
/* This is a bit odd behaviour of getgrouplist() it does not check if the
|
||||
* user exists, only if memberships of the user can be found. */
|
||||
ret = get_user_grouplist("non_exisiting_user", 23456, &ngroups, &groups);
|
||||
ret = get_user_grouplist(test_data->ctx,
|
||||
"non_exisiting_user", 23456, &ngroups, &groups);
|
||||
assert_int_equal(ret, LDAP_SUCCESS);
|
||||
assert_int_equal(ngroups, 1);
|
||||
assert_int_equal(groups[0], 23456);
|
||||
free(groups);
|
||||
|
||||
ret = get_user_grouplist("member0001", 23456, &ngroups, &groups);
|
||||
ret = get_user_grouplist(test_data->ctx,
|
||||
"member0001", 23456, &ngroups, &groups);
|
||||
assert_int_equal(ret, LDAP_SUCCESS);
|
||||
assert_int_equal(ngroups, 3);
|
||||
assert_int_equal(groups[0], 23456);
|
||||
@@ -236,14 +408,16 @@ void test_get_user_grouplist(void **state)
|
||||
assert_int_equal(groups[2], 22222);
|
||||
free(groups);
|
||||
|
||||
ret = get_user_grouplist("member0003", 23456, &ngroups, &groups);
|
||||
ret = get_user_grouplist(test_data->ctx,
|
||||
"member0003", 23456, &ngroups, &groups);
|
||||
assert_int_equal(ret, LDAP_SUCCESS);
|
||||
assert_int_equal(ngroups, 2);
|
||||
assert_int_equal(groups[0], 23456);
|
||||
assert_int_equal(groups[1], 22222);
|
||||
free(groups);
|
||||
|
||||
ret = get_user_grouplist("user_big", 23456, &ngroups, &groups);
|
||||
ret = get_user_grouplist(test_data->ctx,
|
||||
"user_big", 23456, &ngroups, &groups);
|
||||
assert_int_equal(ret, LDAP_SUCCESS);
|
||||
assert_int_equal(ngroups, 1001);
|
||||
assert_int_equal(groups[0], 23456);
|
||||
@@ -253,11 +427,6 @@ void test_get_user_grouplist(void **state)
|
||||
free(groups);
|
||||
}
|
||||
|
||||
struct test_data {
|
||||
struct extdom_req *req;
|
||||
struct ipa_extdom_ctx *ctx;
|
||||
};
|
||||
|
||||
static int extdom_req_setup(void **state)
|
||||
{
|
||||
struct test_data *test_data;
|
||||
@@ -269,8 +438,14 @@ static int extdom_req_setup(void **state)
|
||||
assert_non_null(test_data->req);
|
||||
|
||||
test_data->ctx = calloc(sizeof(struct ipa_extdom_ctx), 1);
|
||||
assert_non_null(test_data->req);
|
||||
assert_non_null(test_data->ctx);
|
||||
|
||||
test_data->ctx->max_nss_buf_size = MAX_BUF;
|
||||
|
||||
assert_int_equal(cmocka_extdom_init_context(&test_data->ctx->nss_ctx), 0);
|
||||
assert_non_null(test_data->ctx->nss_ctx);
|
||||
|
||||
back_extdom_set_timeout(test_data->ctx->nss_ctx, 10000);
|
||||
*state = test_data;
|
||||
|
||||
return 0;
|
||||
@@ -283,6 +458,7 @@ static int extdom_req_teardown(void **state)
|
||||
test_data = (struct test_data *) *state;
|
||||
|
||||
free_req_data(test_data->req);
|
||||
back_extdom_free_context(&test_data->ctx->nss_ctx);
|
||||
free(test_data->ctx);
|
||||
free(test_data);
|
||||
|
||||
@@ -450,5 +626,6 @@ int main(int argc, const char *argv[])
|
||||
cmocka_unit_test(test_decode),
|
||||
};
|
||||
|
||||
return cmocka_run_group_tests(tests, NULL, NULL);
|
||||
assert_non_null(original_fopen);
|
||||
return cmocka_run_group_tests(tests, extdom_req_setup, extdom_req_teardown);
|
||||
}
|
||||
|
||||
@@ -43,11 +43,12 @@
|
||||
|
||||
#include <errno.h>
|
||||
#include <stdio.h>
|
||||
#include <sys/param.h>
|
||||
|
||||
#include "ipa_extdom.h"
|
||||
#include "back_extdom.h"
|
||||
#include "util.h"
|
||||
|
||||
#define MAX(a,b) (((a)>(b))?(a):(b))
|
||||
#define SSSD_DOMAIN_SEPARATOR '@'
|
||||
|
||||
int get_buffer(size_t *_buf_len, char **_buf)
|
||||
@@ -97,134 +98,137 @@ static int inc_buffer(size_t buf_max, size_t *_buf_len, char **_buf)
|
||||
return 0;
|
||||
}
|
||||
|
||||
int getpwnam_r_wrapper(size_t buf_max, const char *name,
|
||||
struct passwd *pwd, char **_buf, size_t *_buf_len)
|
||||
int __nss_to_err(enum nss_status errcode)
|
||||
{
|
||||
char *buf = NULL;
|
||||
size_t buf_len = 0;
|
||||
int ret;
|
||||
switch(errcode) {
|
||||
case NSS_STATUS_SUCCESS:
|
||||
return 0;
|
||||
case NSS_STATUS_NOTFOUND:
|
||||
return ENOENT;
|
||||
case NSS_STATUS_TRYAGAIN:
|
||||
return ERANGE;
|
||||
case NSS_STATUS_UNAVAIL:
|
||||
return ETIMEDOUT;
|
||||
}
|
||||
|
||||
return -1;
|
||||
}
|
||||
|
||||
int getpwnam_r_wrapper(struct ipa_extdom_ctx *ctx, const char *name,
|
||||
struct passwd *pwd, char **buf, size_t *buf_len)
|
||||
{
|
||||
int ret, lerrno = 0;
|
||||
struct passwd *result = NULL;
|
||||
enum nss_status rc;
|
||||
|
||||
buf = *_buf;
|
||||
buf_len = *_buf_len;
|
||||
|
||||
while (buf != NULL
|
||||
&& (ret = getpwnam_r(name, pwd, buf, buf_len, &result)) == ERANGE) {
|
||||
ret = inc_buffer(buf_max, &buf_len, &buf);
|
||||
if (ret != 0) {
|
||||
if (ret == ERANGE) {
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
}
|
||||
goto done;
|
||||
for(rc = NSS_STATUS_TRYAGAIN; rc == NSS_STATUS_TRYAGAIN;) {
|
||||
rc = back_extdom_getpwnam(ctx->nss_ctx, name, pwd, *buf, *buf_len, &result, &lerrno);
|
||||
ret = __nss_to_err(rc);
|
||||
if (ret == ERANGE) {
|
||||
ret = inc_buffer(ctx->max_nss_buf_size, buf_len, buf);
|
||||
if (ret != 0) goto done;
|
||||
}
|
||||
}
|
||||
|
||||
if (ret == 0 && result == NULL) {
|
||||
ret = ENOENT;
|
||||
}
|
||||
|
||||
done:
|
||||
*_buf = buf;
|
||||
*_buf_len = buf_len;
|
||||
|
||||
switch(ret) {
|
||||
case 0:
|
||||
if (result == NULL) ret = ENOENT;
|
||||
break;
|
||||
case ERANGE:
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
int getpwuid_r_wrapper(size_t buf_max, uid_t uid,
|
||||
struct passwd *pwd, char **_buf, size_t *_buf_len)
|
||||
int getpwuid_r_wrapper(struct ipa_extdom_ctx *ctx, uid_t uid,
|
||||
struct passwd *pwd, char **buf, size_t *buf_len)
|
||||
{
|
||||
char *buf = NULL;
|
||||
size_t buf_len = 0;
|
||||
int ret;
|
||||
int ret, lerrno;
|
||||
struct passwd *result = NULL;
|
||||
enum nss_status rc;
|
||||
|
||||
buf = *_buf;
|
||||
buf_len = *_buf_len;
|
||||
|
||||
while (buf != NULL
|
||||
&& (ret = getpwuid_r(uid, pwd, buf, buf_len, &result)) == ERANGE) {
|
||||
ret = inc_buffer(buf_max, &buf_len, &buf);
|
||||
if (ret != 0) {
|
||||
if (ret == ERANGE) {
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
}
|
||||
goto done;
|
||||
for(rc = NSS_STATUS_TRYAGAIN; rc == NSS_STATUS_TRYAGAIN;) {
|
||||
rc = back_extdom_getpwuid(ctx->nss_ctx, uid, pwd, *buf, *buf_len, &result, &lerrno);
|
||||
ret = __nss_to_err(rc);
|
||||
if (ret == ERANGE) {
|
||||
ret = inc_buffer(ctx->max_nss_buf_size, buf_len, buf);
|
||||
if (ret != 0) goto done;
|
||||
}
|
||||
}
|
||||
|
||||
if (ret == 0 && result == NULL) {
|
||||
ret = ENOENT;
|
||||
}
|
||||
|
||||
done:
|
||||
*_buf = buf;
|
||||
*_buf_len = buf_len;
|
||||
switch(ret) {
|
||||
case 0:
|
||||
if (result == NULL) ret = ENOENT;
|
||||
break;
|
||||
case ERANGE:
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int getgrnam_r_wrapper(size_t buf_max, const char *name,
|
||||
struct group *grp, char **_buf, size_t *_buf_len)
|
||||
int getgrnam_r_wrapper(struct ipa_extdom_ctx *ctx, const char *name,
|
||||
struct group *grp, char **buf, size_t *buf_len)
|
||||
{
|
||||
char *buf = NULL;
|
||||
size_t buf_len = 0;
|
||||
int ret;
|
||||
int ret, lerrno;
|
||||
struct group *result = NULL;
|
||||
enum nss_status rc;
|
||||
|
||||
buf = *_buf;
|
||||
buf_len = *_buf_len;
|
||||
|
||||
while (buf != NULL
|
||||
&& (ret = getgrnam_r(name, grp, buf, buf_len, &result)) == ERANGE) {
|
||||
ret = inc_buffer(buf_max, &buf_len, &buf);
|
||||
if (ret != 0) {
|
||||
if (ret == ERANGE) {
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
}
|
||||
goto done;
|
||||
for(rc = NSS_STATUS_TRYAGAIN; rc == NSS_STATUS_TRYAGAIN;) {
|
||||
rc = back_extdom_getgrnam(ctx->nss_ctx, name, grp, *buf, *buf_len, &result, &lerrno);
|
||||
ret = __nss_to_err(rc);
|
||||
if (ret == ERANGE) {
|
||||
ret = inc_buffer(ctx->max_nss_buf_size, buf_len, buf);
|
||||
if (ret != 0) goto done;
|
||||
}
|
||||
}
|
||||
|
||||
if (ret == 0 && result == NULL) {
|
||||
ret = ENOENT;
|
||||
}
|
||||
|
||||
done:
|
||||
*_buf = buf;
|
||||
*_buf_len = buf_len;
|
||||
switch(ret) {
|
||||
case 0:
|
||||
if (result == NULL) ret = ENOENT;
|
||||
break;
|
||||
case ERANGE:
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int getgrgid_r_wrapper(size_t buf_max, gid_t gid,
|
||||
struct group *grp, char **_buf, size_t *_buf_len)
|
||||
int getgrgid_r_wrapper(struct ipa_extdom_ctx *ctx, gid_t gid,
|
||||
struct group *grp, char **buf, size_t *buf_len)
|
||||
{
|
||||
char *buf = NULL;
|
||||
size_t buf_len = 0;
|
||||
int ret;
|
||||
int ret, lerrno;
|
||||
struct group *result = NULL;
|
||||
enum nss_status rc;
|
||||
|
||||
buf = *_buf;
|
||||
buf_len = *_buf_len;
|
||||
|
||||
while (buf != NULL
|
||||
&& (ret = getgrgid_r(gid, grp, buf, buf_len, &result)) == ERANGE) {
|
||||
ret = inc_buffer(buf_max, &buf_len, &buf);
|
||||
if (ret != 0) {
|
||||
if (ret == ERANGE) {
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
}
|
||||
goto done;
|
||||
for(rc = NSS_STATUS_TRYAGAIN; rc == NSS_STATUS_TRYAGAIN;) {
|
||||
rc = back_extdom_getgrgid(ctx->nss_ctx, gid, grp, *buf, *buf_len, &result, &lerrno);
|
||||
ret = __nss_to_err(rc);
|
||||
if (ret == ERANGE) {
|
||||
ret = inc_buffer(ctx->max_nss_buf_size, buf_len, buf);
|
||||
if (ret != 0) goto done;
|
||||
}
|
||||
}
|
||||
|
||||
if (ret == 0 && result == NULL) {
|
||||
ret = ENOENT;
|
||||
}
|
||||
|
||||
done:
|
||||
*_buf = buf;
|
||||
*_buf_len = buf_len;
|
||||
switch(ret) {
|
||||
case 0:
|
||||
if (result == NULL) ret = ENOENT;
|
||||
break;
|
||||
case ERANGE:
|
||||
LOG("Buffer too small, increase ipaExtdomMaxNssBufSize.\n");
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
@@ -406,13 +410,14 @@ int check_request(struct extdom_req *req, enum extdom_version version)
|
||||
return LDAP_SUCCESS;
|
||||
}
|
||||
|
||||
int get_user_grouplist(const char *name, gid_t gid,
|
||||
int get_user_grouplist(struct ipa_extdom_ctx *ctx, const char *name, gid_t gid,
|
||||
size_t *_ngroups, gid_t **_groups)
|
||||
{
|
||||
int ret;
|
||||
int lerrno;
|
||||
int ngroups;
|
||||
gid_t *groups;
|
||||
gid_t *new_groups;
|
||||
enum nss_status rc;
|
||||
|
||||
ngroups = 128;
|
||||
groups = malloc(ngroups * sizeof(gid_t));
|
||||
@@ -420,19 +425,18 @@ int get_user_grouplist(const char *name, gid_t gid,
|
||||
return LDAP_OPERATIONS_ERROR;
|
||||
}
|
||||
|
||||
ret = getgrouplist(name, gid, groups, &ngroups);
|
||||
if (ret == -1) {
|
||||
new_groups = realloc(groups, ngroups * sizeof(gid_t));
|
||||
if (new_groups == NULL) {
|
||||
free(groups);
|
||||
return LDAP_OPERATIONS_ERROR;
|
||||
}
|
||||
groups = new_groups;
|
||||
|
||||
ret = getgrouplist(name, gid, groups, &ngroups);
|
||||
if (ret == -1) {
|
||||
free(groups);
|
||||
return LDAP_OPERATIONS_ERROR;
|
||||
for(rc = NSS_STATUS_TRYAGAIN; rc == NSS_STATUS_TRYAGAIN;) {
|
||||
rc = back_extdom_getgrouplist(ctx->nss_ctx, name, gid, groups, &ngroups, &lerrno);
|
||||
if (rc == NSS_STATUS_TRYAGAIN) {
|
||||
new_groups = NULL;
|
||||
if (lerrno == ERANGE) {
|
||||
new_groups = realloc(groups, ngroups * sizeof(gid_t));
|
||||
}
|
||||
if ((new_groups == NULL) || (lerrno == ENOMEM)) {
|
||||
free(groups);
|
||||
return LDAP_OPERATIONS_ERROR;
|
||||
}
|
||||
groups = new_groups;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -538,7 +542,7 @@ int pack_ber_user(struct ipa_extdom_ctx *ctx,
|
||||
}
|
||||
|
||||
if (response_type == RESP_USER_GROUPLIST) {
|
||||
ret = get_user_grouplist(user_name, gid, &ngroups, &groups);
|
||||
ret = get_user_grouplist(ctx, user_name, gid, &ngroups, &groups);
|
||||
if (ret != LDAP_SUCCESS) {
|
||||
goto done;
|
||||
}
|
||||
@@ -561,7 +565,7 @@ int pack_ber_user(struct ipa_extdom_ctx *ctx,
|
||||
}
|
||||
|
||||
for (c = 0; c < ngroups; c++) {
|
||||
ret = getgrgid_r_wrapper(ctx->max_nss_buf_size,
|
||||
ret = getgrgid_r_wrapper(ctx,
|
||||
groups[c], &grp, &buf, &buf_len);
|
||||
if (ret != 0) {
|
||||
if (ret == ENOMEM || ret == ERANGE) {
|
||||
@@ -841,8 +845,7 @@ static int handle_uid_request(struct ipa_extdom_ctx *ctx,
|
||||
|
||||
ret = pack_ber_sid(sid_str, berval);
|
||||
} else {
|
||||
ret = getpwuid_r_wrapper(ctx->max_nss_buf_size, uid, &pwd, &buf,
|
||||
&buf_len);
|
||||
ret = getpwuid_r_wrapper(ctx, uid, &pwd, &buf, &buf_len);
|
||||
if (ret != 0) {
|
||||
if (ret == ENOMEM || ret == ERANGE) {
|
||||
ret = LDAP_OPERATIONS_ERROR;
|
||||
@@ -913,8 +916,7 @@ static int handle_gid_request(struct ipa_extdom_ctx *ctx,
|
||||
|
||||
ret = pack_ber_sid(sid_str, berval);
|
||||
} else {
|
||||
ret = getgrgid_r_wrapper(ctx->max_nss_buf_size, gid, &grp, &buf,
|
||||
&buf_len);
|
||||
ret = getgrgid_r_wrapper(ctx, gid, &grp, &buf, &buf_len);
|
||||
if (ret != 0) {
|
||||
if (ret == ENOMEM || ret == ERANGE) {
|
||||
ret = LDAP_OPERATIONS_ERROR;
|
||||
@@ -1053,8 +1055,7 @@ static int handle_sid_request(struct ipa_extdom_ctx *ctx,
|
||||
switch(id_type) {
|
||||
case SSS_ID_TYPE_UID:
|
||||
case SSS_ID_TYPE_BOTH:
|
||||
ret = getpwnam_r_wrapper(ctx->max_nss_buf_size, fq_name, &pwd, &buf,
|
||||
&buf_len);
|
||||
ret = getpwnam_r_wrapper(ctx, fq_name, &pwd, &buf, &buf_len);
|
||||
if (ret != 0) {
|
||||
if (ret == ENOMEM || ret == ERANGE) {
|
||||
ret = LDAP_OPERATIONS_ERROR;
|
||||
@@ -1086,8 +1087,7 @@ static int handle_sid_request(struct ipa_extdom_ctx *ctx,
|
||||
pwd.pw_shell, kv_list, berval);
|
||||
break;
|
||||
case SSS_ID_TYPE_GID:
|
||||
ret = getgrnam_r_wrapper(ctx->max_nss_buf_size, fq_name, &grp, &buf,
|
||||
&buf_len);
|
||||
ret = getgrnam_r_wrapper(ctx, fq_name, &grp, &buf, &buf_len);
|
||||
if (ret != 0) {
|
||||
if (ret == ENOMEM || ret == ERANGE) {
|
||||
ret = LDAP_OPERATIONS_ERROR;
|
||||
@@ -1181,8 +1181,7 @@ static int handle_name_request(struct ipa_extdom_ctx *ctx,
|
||||
goto done;
|
||||
}
|
||||
|
||||
ret = getpwnam_r_wrapper(ctx->max_nss_buf_size, fq_name, &pwd, &buf,
|
||||
&buf_len);
|
||||
ret = getpwnam_r_wrapper(ctx, fq_name, &pwd, &buf, &buf_len);
|
||||
if (ret == 0) {
|
||||
if (request_type == REQ_FULL_WITH_GROUPS) {
|
||||
ret = sss_nss_getorigbyname(pwd.pw_name, &kv_list, &id_type);
|
||||
@@ -1211,8 +1210,7 @@ static int handle_name_request(struct ipa_extdom_ctx *ctx,
|
||||
* error codes which can indicate that the user was not found. To
|
||||
* be on the safe side we fail back to the group lookup on all
|
||||
* errors. */
|
||||
ret = getgrnam_r_wrapper(ctx->max_nss_buf_size, fq_name, &grp, &buf,
|
||||
&buf_len);
|
||||
ret = getgrnam_r_wrapper(ctx, fq_name, &grp, &buf, &buf_len);
|
||||
if (ret != 0) {
|
||||
if (ret == ENOMEM || ret == ERANGE) {
|
||||
ret = LDAP_OPERATIONS_ERROR;
|
||||
|
||||
@@ -38,9 +38,11 @@
|
||||
* END COPYRIGHT BLOCK **/
|
||||
|
||||
#include "ipa_extdom.h"
|
||||
#include "back_extdom.h"
|
||||
#include "util.h"
|
||||
|
||||
#define DEFAULT_MAX_NSS_BUFFER (128*1024*1024)
|
||||
#define DEFAULT_MAX_NSS_TIMEOUT (10*1000)
|
||||
|
||||
Slapi_PluginDesc ipa_extdom_plugin_desc = {
|
||||
IPA_EXTDOM_FEATURE_DESC,
|
||||
@@ -166,6 +168,7 @@ static int ipa_extdom_init_ctx(Slapi_PBlock *pb, struct ipa_extdom_ctx **_ctx)
|
||||
struct ipa_extdom_ctx *ctx;
|
||||
Slapi_Entry *e;
|
||||
int ret;
|
||||
unsigned int timeout;
|
||||
|
||||
ctx = calloc(1, sizeof(struct ipa_extdom_ctx));
|
||||
if (!ctx) {
|
||||
@@ -202,6 +205,20 @@ static int ipa_extdom_init_ctx(Slapi_PBlock *pb, struct ipa_extdom_ctx **_ctx)
|
||||
}
|
||||
LOG("Maximal nss buffer size set to [%zu]!\n", ctx->max_nss_buf_size);
|
||||
|
||||
|
||||
ret = back_extdom_init_context(&ctx->nss_ctx);
|
||||
if (ret != 0) {
|
||||
LOG("Unable to initialize nss interface: returned [%d]!\n", ret);
|
||||
goto done;
|
||||
}
|
||||
|
||||
timeout = slapi_entry_attr_get_uint(e, "ipaExtdomMaxNssTimeout");
|
||||
if (timeout == 0) {
|
||||
timeout = DEFAULT_MAX_NSS_TIMEOUT;
|
||||
}
|
||||
back_extdom_set_timeout(ctx->nss_ctx, timeout);
|
||||
LOG("Maximal nss timeout (in ms) set to [%u]!\n", timeout);
|
||||
|
||||
ret = 0;
|
||||
|
||||
done:
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
export LD_PRELOAD=$(pkg-config --libs nss_wrapper)
|
||||
export NSS_WRAPPER_PASSWD=./test_data/passwd
|
||||
export NSS_WRAPPER_GROUP=./test_data/group
|
||||
@@ -911,6 +911,7 @@ static int ipauuid_pre_op(Slapi_PBlock *pb, int modtype)
|
||||
list != ipauuid_global_config;
|
||||
list = PR_NEXT_LINK(list)) {
|
||||
cfgentry = (struct configEntry *) list;
|
||||
char *current_dn = NULL;
|
||||
|
||||
generate = false;
|
||||
set_attr = false;
|
||||
@@ -920,16 +921,21 @@ static int ipauuid_pre_op(Slapi_PBlock *pb, int modtype)
|
||||
cfgentry->attr)) {
|
||||
continue;
|
||||
}
|
||||
/* Current DN may have been reset by
|
||||
* slapi_pblock_set(pb, SLAPI_ADD_TARGET,..) see below
|
||||
* need to reread it
|
||||
*/
|
||||
current_dn = ipauuid_get_dn(pb);
|
||||
|
||||
/* is the entry in scope? */
|
||||
if (cfgentry->scope) {
|
||||
if (!slapi_dn_issuffix(dn, cfgentry->scope)) {
|
||||
if (!slapi_dn_issuffix(current_dn, cfgentry->scope)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (cfgentry->exclude_subtree) {
|
||||
if (slapi_dn_issuffix(dn, cfgentry->exclude_subtree)) {
|
||||
if (slapi_dn_issuffix(current_dn, cfgentry->exclude_subtree)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@@ -1108,7 +1114,7 @@ static int ipauuid_pre_op(Slapi_PBlock *pb, int modtype)
|
||||
ret = LDAP_OPERATIONS_ERROR;
|
||||
goto done;
|
||||
}
|
||||
sdn = slapi_sdn_new_dn_byval(dn);
|
||||
sdn = slapi_sdn_new_dn_byval(current_dn);
|
||||
if (!sdn) {
|
||||
LOG_OOM();
|
||||
ret = LDAP_OPERATIONS_ERROR;
|
||||
|
||||
@@ -15,4 +15,4 @@ libotp_la_LIBADD = libhotp.la
|
||||
|
||||
check_PROGRAMS = t_hotp
|
||||
TESTS = $(check_PROGRAMS)
|
||||
t_hotp_LDADD = $(NSPR_LIBS) $(NSS_LIBS) libhotp.la
|
||||
t_hotp_LDADD = libhotp.la $(NSPR_LIBS) $(NSS_LIBS)
|
||||
|
||||
220
freeipa.spec.in
220
freeipa.spec.in
@@ -25,6 +25,12 @@
|
||||
%endif
|
||||
%endif
|
||||
|
||||
%if 0%{?with_python3}
|
||||
%global python %{__python3}
|
||||
%else
|
||||
%global python %{__python2}
|
||||
%endif
|
||||
|
||||
# lint is not executed during rpmbuild
|
||||
# %%global with_lint 1
|
||||
%if 0%{?with_lint}
|
||||
@@ -39,21 +45,24 @@
|
||||
%global krb5_version 1.15.1-4
|
||||
# 0.7.16: https://github.com/drkjam/netaddr/issues/71
|
||||
%global python_netaddr_version 0.7.5-8
|
||||
# Require 4.6.0-4 which brings RC4 for FIPS + trust fixes to priv. separation
|
||||
%global samba_version 4.6.0-4
|
||||
# Require 4.7.0 which brings Python 3 bindings
|
||||
%global samba_version 4.7.0
|
||||
%global selinux_policy_version 3.12.1-153
|
||||
%global slapi_nis_version 0.56.0-4
|
||||
%global python_ldap_version 2.4.15
|
||||
%else
|
||||
# 1.15.1-7: certauth (http://krbdev.mit.edu/rt/Ticket/Display.html?id=8561)
|
||||
%global krb5_version 1.15.1-7
|
||||
# 0.7.16: https://github.com/drkjam/netaddr/issues/71
|
||||
%global python_netaddr_version 0.7.16
|
||||
# Require 4.6.0-4 which brings RC4 for FIPS + trust fixes to priv. separation
|
||||
%global samba_version 2:4.6.0-4
|
||||
# Require 4.7.0 which brings Python 3 bindings
|
||||
%global samba_version 2:4.7.0
|
||||
%global selinux_policy_version 3.13.1-158.4
|
||||
%global slapi_nis_version 0.56.1
|
||||
%global python_ldap_version 3.0.0
|
||||
%endif
|
||||
|
||||
|
||||
%define krb5_base_version %(LC_ALL=C rpm -q --qf '%%{VERSION}' krb5-devel | grep -Eo '^[^.]+\.[^.]+')
|
||||
|
||||
%global plugin_dir %{_libdir}/dirsrv/plugins
|
||||
@@ -98,8 +107,8 @@ BuildRequires: automake
|
||||
BuildRequires: libtool
|
||||
BuildRequires: gettext
|
||||
BuildRequires: gettext-devel
|
||||
BuildRequires: python-devel
|
||||
BuildRequires: python-setuptools
|
||||
BuildRequires: python2-devel
|
||||
BuildRequires: python2-setuptools
|
||||
%if 0%{?with_python3}
|
||||
BuildRequires: python3-devel
|
||||
BuildRequires: python3-setuptools
|
||||
@@ -139,12 +148,12 @@ BuildRequires: python-lesscpy
|
||||
# Build dependencies for makeapi/makeaci
|
||||
# makeapi/makeaci is using Python 2 only for now
|
||||
#
|
||||
BuildRequires: python-ldap
|
||||
BuildRequires: python-netaddr
|
||||
BuildRequires: python2-ldap >= %{python_ldap_version}
|
||||
BuildRequires: python2-netaddr
|
||||
BuildRequires: python2-pyasn1
|
||||
BuildRequires: python2-pyasn1-modules
|
||||
BuildRequires: python2-dns
|
||||
BuildRequires: python-six
|
||||
BuildRequires: python2-six
|
||||
BuildRequires: python2-libsss_nss_idmap
|
||||
BuildRequires: python2-cffi
|
||||
|
||||
@@ -165,18 +174,20 @@ BuildRequires: python3-wheel
|
||||
%endif # with_wheels
|
||||
|
||||
#
|
||||
# Build dependencies for lint
|
||||
# Build dependencies for lint and fastcheck
|
||||
#
|
||||
%if 0%{?with_lint}
|
||||
BuildRequires: samba-python
|
||||
BuildRequires: python2-samba
|
||||
# 1.6: x509.Name.rdns (https://github.com/pyca/cryptography/issues/3199)
|
||||
BuildRequires: python2-cryptography >= 1.6
|
||||
BuildRequires: python-gssapi >= 1.2.0-5
|
||||
BuildRequires: python2-gssapi >= 1.2.0-5
|
||||
BuildRequires: softhsm
|
||||
%if 0%{?fedora} >= 26
|
||||
BuildRequires: python2-pylint
|
||||
%else
|
||||
BuildRequires: pylint >= 1.6
|
||||
BuildRequires: pylint >= 1.7
|
||||
%endif
|
||||
BuildRequires: python2-pycodestyle
|
||||
# workaround for https://bugzilla.redhat.com/show_bug.cgi?id=1096506
|
||||
BuildRequires: python2-polib
|
||||
BuildRequires: python2-libipa_hbac
|
||||
@@ -188,33 +199,33 @@ BuildRequires: python2-dns >= 1.15
|
||||
BuildRequires: jsl
|
||||
BuildRequires: python2-yubico
|
||||
# pki Python package
|
||||
BuildRequires: pki-base-python2
|
||||
BuildRequires: python-pytest-multihost
|
||||
BuildRequires: python-pytest-sourceorder
|
||||
BuildRequires: pki-base-python2 >= 10.5.1-2
|
||||
BuildRequires: python2-pytest-multihost
|
||||
BuildRequires: python2-pytest-sourceorder
|
||||
# 0.4.2: Py3 fix https://bugzilla.redhat.com/show_bug.cgi?id=1476150
|
||||
BuildRequires: python-jwcrypto >= 0.4.2
|
||||
BuildRequires: python2-jwcrypto >= 0.4.2
|
||||
# 0.3: sd_notify (https://pagure.io/freeipa/issue/5825)
|
||||
BuildRequires: python2-custodia >= 0.3.1
|
||||
BuildRequires: dbus-python
|
||||
BuildRequires: python2-dbus
|
||||
BuildRequires: python2-dateutil
|
||||
BuildRequires: python-enum34
|
||||
BuildRequires: python-netifaces
|
||||
BuildRequires: python2-enum34
|
||||
BuildRequires: python2-netifaces
|
||||
BuildRequires: python2-sss
|
||||
BuildRequires: python2-sss-murmur
|
||||
BuildRequires: python2-sssdconfig
|
||||
BuildRequires: python2-nose
|
||||
BuildRequires: python2-paste
|
||||
BuildRequires: systemd-python
|
||||
BuildRequires: python2-systemd
|
||||
BuildRequires: python2-jinja2
|
||||
BuildRequires: python2-augeas
|
||||
|
||||
%if 0%{?with_python3}
|
||||
# FIXME: this depedency is missing - server will not work
|
||||
#BuildRequires: python3-samba
|
||||
BuildRequires: python3-samba
|
||||
# 1.6: x509.Name.rdns (https://github.com/pyca/cryptography/issues/3199)
|
||||
BuildRequires: python3-cryptography >= 1.6
|
||||
BuildRequires: python3-gssapi >= 1.2.0
|
||||
BuildRequires: python3-pylint >= 1.6
|
||||
BuildRequires: python3-pylint >= 1.7
|
||||
BuildRequires: python3-pycodestyle
|
||||
# workaround for https://bugzilla.redhat.com/show_bug.cgi?id=1096506
|
||||
BuildRequires: python3-polib
|
||||
BuildRequires: python3-libipa_hbac
|
||||
@@ -226,7 +237,7 @@ BuildRequires: python3-qrcode-core >= 5.0.0
|
||||
BuildRequires: python3-dns >= 1.15
|
||||
BuildRequires: python3-yubico
|
||||
# pki Python package
|
||||
BuildRequires: pki-base-python3
|
||||
BuildRequires: pki-base-python3 >= 10.5.1-2
|
||||
BuildRequires: python3-pytest-multihost
|
||||
BuildRequires: python3-pytest-sourceorder
|
||||
# 0.4.2: Py3 fix https://bugzilla.redhat.com/show_bug.cgi?id=1476150
|
||||
@@ -249,7 +260,7 @@ BuildRequires: python3-augeas
|
||||
BuildRequires: python3-netaddr
|
||||
BuildRequires: python3-pyasn1
|
||||
BuildRequires: python3-pyasn1-modules
|
||||
BuildRequires: python3-pyldap
|
||||
BuildRequires: python3-ldap >= %{python_ldap_version}
|
||||
%endif # with_python3
|
||||
%endif # with_lint
|
||||
|
||||
@@ -258,7 +269,6 @@ BuildRequires: python3-pyldap
|
||||
#
|
||||
%if ! %{ONLY_CLIENT}
|
||||
BuildRequires: libcmocka-devel
|
||||
BuildRequires: nss_wrapper
|
||||
# Required by ipa_kdb_tests
|
||||
BuildRequires: %{_libdir}/krb5/plugins/kdb/db2.so
|
||||
%endif # ONLY_CLIENT
|
||||
@@ -281,10 +291,13 @@ Requires: %{name}-client = %{version}-%{release}
|
||||
Requires: %{name}-common = %{version}-%{release}
|
||||
%if 0%{?with_python3}
|
||||
Requires: python3-ipaserver = %{version}-%{release}
|
||||
Requires: python3-ldap >= %{python_ldap_version}
|
||||
%else
|
||||
Requires: python2-ipaserver = %{version}-%{release}
|
||||
Requires: python2-ldap >= %{python_ldap_version}
|
||||
%endif
|
||||
Requires: 389-ds-base >= 1.3.5.14
|
||||
# 1.3.7.6-1: https://bugzilla.redhat.com/show_bug.cgi?id=1488295
|
||||
Requires: 389-ds-base >= 1.3.7.6-1
|
||||
Requires: openldap-clients > 2.4.35-4
|
||||
Requires: nss >= 3.14.3-12.0
|
||||
Requires: nss-tools >= 3.14.3-12.0
|
||||
@@ -295,9 +308,19 @@ Requires: cyrus-sasl-gssapi%{?_isa}
|
||||
Requires: ntp
|
||||
Requires: httpd >= 2.4.6-31
|
||||
%if 0%{with_python3}
|
||||
Requires(preun): python3
|
||||
Requires(postun): python3
|
||||
Requires: python3-gssapi >= 1.2.0-5
|
||||
Requires: python3-systemd
|
||||
Requires: python3-mod_wsgi
|
||||
Conflicts: mod_wsgi
|
||||
%else
|
||||
Requires(preun): python2
|
||||
Requires(postun): python2
|
||||
Requires: python2-gssapi >= 1.2.0-5
|
||||
Requires: python2-systemd
|
||||
Requires: mod_wsgi
|
||||
Conflicts: python3-mod_wsgi
|
||||
%endif
|
||||
Requires: mod_auth_gssapi >= 1.5.0
|
||||
# 1.0.14-3: https://bugzilla.redhat.com/show_bug.cgi?id=1431206
|
||||
@@ -305,8 +328,6 @@ Requires: mod_nss >= 1.0.14-3
|
||||
Requires: mod_session
|
||||
# 0.9.9: https://github.com/adelton/mod_lookup_identity/pull/3
|
||||
Requires: mod_lookup_identity >= 0.9.9
|
||||
Requires: python-ldap >= 2.4.15
|
||||
Requires: python-gssapi >= 1.2.0-5
|
||||
Requires: acl
|
||||
Requires: systemd-units >= 38
|
||||
Requires(pre): shadow-utils
|
||||
@@ -315,21 +336,21 @@ Requires(post): systemd-units
|
||||
Requires: selinux-policy >= %{selinux_policy_version}
|
||||
Requires(post): selinux-policy-base >= %{selinux_policy_version}
|
||||
Requires: slapi-nis >= %{slapi_nis_version}
|
||||
Requires: pki-ca >= 10.4.0-1
|
||||
Requires: pki-kra >= 10.4.0-1
|
||||
Requires(preun): python systemd-units
|
||||
Requires(postun): python systemd-units
|
||||
# 10.5.1-2 contains Python 3 vault fix
|
||||
Requires: pki-ca >= 10.5.1-2
|
||||
Requires: pki-kra >= 10.5.1-2
|
||||
Requires(preun): systemd-units
|
||||
Requires(postun): systemd-units
|
||||
Requires: policycoreutils >= 2.1.12-5
|
||||
Requires: tar
|
||||
# certmonger-0.79.4-2 fixes newlines in PEM files
|
||||
Requires(pre): certmonger >= 0.79.4-2
|
||||
Requires(pre): 389-ds-base >= 1.3.5.14
|
||||
Requires(pre): certmonger >= 0.79.5-1
|
||||
# 1.3.7.6-1: https://bugzilla.redhat.com/show_bug.cgi?id=1488295
|
||||
Requires(pre): 389-ds-base >= 1.3.7.6-1
|
||||
Requires: fontawesome-fonts
|
||||
Requires: open-sans-fonts
|
||||
Requires: openssl
|
||||
Requires: softhsm >= 2.0.0rc1-1
|
||||
Requires: p11-kit
|
||||
Requires: systemd-python
|
||||
Requires: %{etc_systemd_dir}
|
||||
Requires: gzip
|
||||
Requires: oddjob
|
||||
@@ -372,16 +393,16 @@ Requires: %{name}-server-common = %{version}-%{release}
|
||||
Requires: %{name}-common = %{version}-%{release}
|
||||
Requires: python2-ipaclient = %{version}-%{release}
|
||||
Requires: python2-custodia >= 0.3.1
|
||||
Requires: python-ldap >= 2.4.15
|
||||
Requires: python2-ldap >= %{python_ldap_version}
|
||||
Requires: python2-lxml
|
||||
Requires: python-gssapi >= 1.2.0-5
|
||||
Requires: python2-gssapi >= 1.2.0-5
|
||||
Requires: python2-sssdconfig
|
||||
Requires: python2-pyasn1
|
||||
Requires: dbus-python
|
||||
Requires: python2-pyasn1 >= 0.3.2-2
|
||||
Requires: python2-dbus
|
||||
Requires: python2-dns >= 1.15
|
||||
Requires: python-kdcproxy >= 0.3
|
||||
Requires: python2-kdcproxy >= 0.3
|
||||
Requires: rpm-libs
|
||||
Requires: pki-base-python2
|
||||
Requires: pki-base-python2 >= 10.5.1-2
|
||||
Requires: python2-augeas
|
||||
|
||||
%description -n python2-ipaserver
|
||||
@@ -405,17 +426,17 @@ Requires: %{name}-common = %{version}-%{release}
|
||||
Requires: python3-ipaclient = %{version}-%{release}
|
||||
Requires: python3-custodia >= 0.3.1
|
||||
# we need pre-requires since earlier versions may break upgrade
|
||||
Requires(pre): python3-pyldap >= 2.4.35.1-2
|
||||
Requires(pre): python3-ldap >= %{python_ldap_version}
|
||||
Requires: python3-lxml
|
||||
Requires: python3-gssapi >= 1.2.0
|
||||
Requires: python3-sssdconfig
|
||||
Requires: python3-pyasn1
|
||||
Requires: python3-pyasn1 >= 0.3.2-2
|
||||
Requires: python3-dbus
|
||||
Requires: python3-dns >= 1.15
|
||||
Requires: python3-kdcproxy >= 0.3
|
||||
Requires: python3-augeas
|
||||
Requires: rpm-libs
|
||||
Requires: pki-base-python3
|
||||
Requires: pki-base-python3 >= 10.5.1-2
|
||||
|
||||
%description -n python3-ipaserver
|
||||
IPA is an integrated solution to provide centrally managed Identity (users,
|
||||
@@ -483,18 +504,28 @@ Summary: Virtual package to install packages required for Active Directory trust
|
||||
Group: System Environment/Base
|
||||
Requires: %{name}-server = %{version}-%{release}
|
||||
Requires: %{name}-common = %{version}-%{release}
|
||||
Requires: samba-python
|
||||
|
||||
Requires: samba >= %{samba_version}
|
||||
Requires: samba-winbind
|
||||
Requires: libsss_idmap
|
||||
Requires: python-libsss_nss_idmap
|
||||
Requires: python-sss
|
||||
|
||||
%if 0%{?with_python3}
|
||||
Requires(post): python3
|
||||
Requires: python3-samba
|
||||
Requires: python3-libsss_nss_idmap
|
||||
Requires: python3-sss
|
||||
%else
|
||||
Requires(post): python2
|
||||
Requires: python2-samba
|
||||
Requires: python2-libsss_nss_idmap
|
||||
Requires: python2-sss
|
||||
%endif # with_python3
|
||||
|
||||
# We use alternatives to divert winbind_krb5_locator.so plugin to libkrb5
|
||||
# on the installes where server-trust-ad subpackage is installed because
|
||||
# IPA AD trusts cannot be used at the same time with the locator plugin
|
||||
# since Winbindd will be configured in a different mode
|
||||
Requires(post): %{_sbindir}/update-alternatives
|
||||
Requires(post): python
|
||||
Requires(postun): %{_sbindir}/update-alternatives
|
||||
Requires(preun): %{_sbindir}/update-alternatives
|
||||
|
||||
@@ -516,11 +547,14 @@ Group: System Environment/Base
|
||||
Requires: %{name}-client-common = %{version}-%{release}
|
||||
Requires: %{name}-common = %{version}-%{release}
|
||||
%if 0%{?with_python3}
|
||||
Requires: python3-gssapi >= 1.2.0-5
|
||||
Requires: python3-ipaclient = %{version}-%{release}
|
||||
Requires: python3-ldap >= %{python_ldap_version}
|
||||
%else
|
||||
Requires: python2-gssapi >= 1.2.0-5
|
||||
Requires: python2-ipaclient = %{version}-%{release}
|
||||
Requires: python2-ldap >= %{python_ldap_version}
|
||||
%endif
|
||||
Requires: python-ldap
|
||||
Requires: cyrus-sasl-gssapi%{?_isa}
|
||||
Requires: ntp
|
||||
Requires: krb5-workstation >= %{krb5_version}
|
||||
@@ -531,13 +565,11 @@ Requires: initscripts
|
||||
Requires: libcurl >= 7.21.7-2
|
||||
Requires: xmlrpc-c >= 1.27.4
|
||||
Requires: sssd >= 1.14.0
|
||||
Requires: python-sssdconfig
|
||||
# certmonger-0.79.4-2 fixes newlines in PEM files
|
||||
Requires: certmonger >= 0.79.4-2
|
||||
Requires: python2-sssdconfig
|
||||
Requires: certmonger >= 0.79.5-1
|
||||
Requires: nss-tools
|
||||
Requires: bind-utils
|
||||
Requires: oddjob-mkhomedir
|
||||
Requires: python-gssapi >= 1.2.0-5
|
||||
Requires: libsss_autofs
|
||||
Requires: autofs
|
||||
Requires: libnfsidmap
|
||||
@@ -678,31 +710,31 @@ Provides: python2-ipaplatform = %{version}-%{release}
|
||||
%{?python_provide:%python_provide python2-ipaplatform}
|
||||
%{!?python_provide:Provides: python-ipaplatform = %{version}-%{release}}
|
||||
Requires: %{name}-common = %{version}-%{release}
|
||||
Requires: python-gssapi >= 1.2.0-5
|
||||
Requires: python2-gssapi >= 1.2.0-5
|
||||
Requires: gnupg
|
||||
Requires: keyutils
|
||||
Requires: pyOpenSSL
|
||||
Requires: python >= 2.7.9
|
||||
Requires: python2 >= 2.7.9
|
||||
Requires: python2-cryptography >= 1.6
|
||||
Requires: python-netaddr >= %{python_netaddr_version}
|
||||
Requires: python2-netaddr >= %{python_netaddr_version}
|
||||
Requires: python2-libipa_hbac
|
||||
Requires: python-qrcode-core >= 5.0.0
|
||||
Requires: python2-pyasn1
|
||||
Requires: python2-pyasn1-modules
|
||||
Requires: python2-pyasn1 >= 0.3.2-2
|
||||
Requires: python2-pyasn1-modules >= 0.3.2-2
|
||||
Requires: python2-dateutil
|
||||
Requires: python2-yubico >= 1.2.3
|
||||
Requires: python2-sss-murmur
|
||||
Requires: dbus-python
|
||||
Requires: python2-dbus
|
||||
Requires: python2-setuptools
|
||||
Requires: python-six
|
||||
Requires: python2-six
|
||||
# 0.4.2: Py3 fix https://bugzilla.redhat.com/show_bug.cgi?id=1476150
|
||||
Requires: python-jwcrypto >= 0.4.2
|
||||
Requires: python2-jwcrypto >= 0.4.2
|
||||
Requires: python2-cffi
|
||||
Requires: python-ldap >= 2.4.15
|
||||
Requires: python2-ldap >= %{python_ldap_version}
|
||||
Requires: python2-requests
|
||||
Requires: python2-dns >= 1.15
|
||||
Requires: python-enum34
|
||||
Requires: python-netifaces >= 0.10.4
|
||||
Requires: python2-enum34
|
||||
Requires: python2-netifaces >= 0.10.4
|
||||
Requires: pyusb
|
||||
|
||||
Conflicts: %{alt_name}-python < %{version}
|
||||
@@ -736,8 +768,8 @@ Requires: python3-cryptography >= 1.6
|
||||
Requires: python3-netaddr >= %{python_netaddr_version}
|
||||
Requires: python3-libipa_hbac
|
||||
Requires: python3-qrcode-core >= 5.0.0
|
||||
Requires: python3-pyasn1
|
||||
Requires: python3-pyasn1-modules
|
||||
Requires: python3-pyasn1 >= 0.3.2-2
|
||||
Requires: python3-pyasn1-modules >= 0.3.2-2
|
||||
Requires: python3-dateutil
|
||||
# fixes searching for yubikeys in python3
|
||||
Requires: python3-yubico >= 1.3.2-7
|
||||
@@ -749,7 +781,7 @@ Requires: python3-six
|
||||
Requires: python3-jwcrypto >= 0.4.2
|
||||
Requires: python3-cffi
|
||||
# we need pre-requires since earlier versions may break upgrade
|
||||
Requires(pre): python3-pyldap >= 2.4.35.1-2
|
||||
Requires(pre): python3-ldap >= %{python_ldap_version}
|
||||
Requires: python3-requests
|
||||
Requires: python3-dns >= 1.15
|
||||
Requires: python3-netifaces >= 0.10.4
|
||||
@@ -806,8 +838,8 @@ Requires: python2-paste
|
||||
Requires: python2-coverage
|
||||
# workaround for https://bugzilla.redhat.com/show_bug.cgi?id=1096506
|
||||
Requires: python2-polib
|
||||
Requires: python-pytest-multihost >= 0.5
|
||||
Requires: python-pytest-sourceorder
|
||||
Requires: python2-pytest-multihost >= 0.5
|
||||
Requires: python2-pytest-sourceorder
|
||||
Requires: ldns-utils
|
||||
Requires: python2-sssdconfig
|
||||
Requires: python2-cryptography >= 1.6
|
||||
@@ -911,6 +943,7 @@ install/tools/ipa-cacert-manage
|
||||
install/tools/ipa-compat-manage
|
||||
install/tools/ipa-csreplica-manage
|
||||
install/tools/ipa-custodia
|
||||
install/tools/ipa-custodia-check
|
||||
install/tools/ipa-dns-install
|
||||
install/tools/ipa-httpd-kdcproxy
|
||||
install/tools/ipa-kra-install
|
||||
@@ -1019,10 +1052,21 @@ mv %{buildroot}%{_bindir}/ipa-test-task %{buildroot}%{_bindir}/ipa-test-task-%{p
|
||||
ln -s %{_bindir}/ipa-run-tests-%{python2_version} %{buildroot}%{_bindir}/ipa-run-tests-2
|
||||
ln -s %{_bindir}/ipa-test-config-%{python2_version} %{buildroot}%{_bindir}/ipa-test-config-2
|
||||
ln -s %{_bindir}/ipa-test-task-%{python2_version} %{buildroot}%{_bindir}/ipa-test-task-2
|
||||
# test framework defaults to Python 2
|
||||
%endif # with_ipatests
|
||||
|
||||
# Decide which Python (2 or 3) should be used as default for tests
|
||||
%if 0%{?with_ipatests}
|
||||
%if 0%{?with_python3}
|
||||
# Building with python3 => make it default for tests
|
||||
ln -s %{_bindir}/ipa-run-tests-%{python3_version} %{buildroot}%{_bindir}/ipa-run-tests
|
||||
ln -s %{_bindir}/ipa-test-config-%{python3_version} %{buildroot}%{_bindir}/ipa-test-config
|
||||
ln -s %{_bindir}/ipa-test-task-%{python3_version} %{buildroot}%{_bindir}/ipa-test-task
|
||||
%else
|
||||
# Building python2 only => make it default for tests
|
||||
ln -s %{_bindir}/ipa-run-tests-%{python2_version} %{buildroot}%{_bindir}/ipa-run-tests
|
||||
ln -s %{_bindir}/ipa-test-config-%{python2_version} %{buildroot}%{_bindir}/ipa-test-config
|
||||
ln -s %{_bindir}/ipa-test-task-%{python2_version} %{buildroot}%{_bindir}/ipa-test-task
|
||||
%endif # with_python3
|
||||
%endif # with_ipatests
|
||||
|
||||
# remove files which are useful only for make uninstall
|
||||
@@ -1059,9 +1103,7 @@ mkdir -p %{buildroot}%{_sysconfdir}/httpd/conf.d/
|
||||
/bin/touch %{buildroot}%{_sysconfdir}/httpd/conf.d/ipa-pki-proxy.conf
|
||||
/bin/touch %{buildroot}%{_sysconfdir}/httpd/conf.d/ipa-rewrite.conf
|
||||
/bin/touch %{buildroot}%{_usr}/share/ipa/html/ca.crt
|
||||
/bin/touch %{buildroot}%{_usr}/share/ipa/html/kerberosauth.xpi
|
||||
/bin/touch %{buildroot}%{_usr}/share/ipa/html/krb.con
|
||||
/bin/touch %{buildroot}%{_usr}/share/ipa/html/krb.js
|
||||
/bin/touch %{buildroot}%{_usr}/share/ipa/html/krb5.ini
|
||||
/bin/touch %{buildroot}%{_usr}/share/ipa/html/krbrealm.con
|
||||
|
||||
@@ -1098,24 +1140,21 @@ fi
|
||||
|
||||
%posttrans server
|
||||
# don't execute upgrade and restart of IPA when server is not installed
|
||||
python2 -c "import sys; from ipaserver.install import installutils; sys.exit(0 if installutils.is_ipa_configured() else 1);" > /dev/null 2>&1
|
||||
%{python} -c "import sys; from ipaserver.install import installutils; sys.exit(0 if installutils.is_ipa_configured() else 1);" > /dev/null 2>&1
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
# This is necessary for Fedora system upgrades which by default
|
||||
# work with the network being offline
|
||||
/bin/systemctl start network-online.target
|
||||
|
||||
# This must be run in posttrans so that updates from previous
|
||||
# execution that may no longer be shipped are not applied.
|
||||
/usr/sbin/ipa-server-upgrade --quiet >/dev/null || :
|
||||
|
||||
# Restart IPA processes. This must be also run in postrans so that plugins
|
||||
# and software is in consistent state
|
||||
# and software is in consistent state. This will also perform the
|
||||
# system upgrade.
|
||||
# NOTE: systemd specific section
|
||||
|
||||
/bin/systemctl is-enabled ipa.service >/dev/null 2>&1
|
||||
if [ $? -eq 0 ]; then
|
||||
/bin/systemctl restart ipa.service >/dev/null 2>&1 || :
|
||||
/bin/systemctl restart ipa.service >/dev/null
|
||||
fi
|
||||
fi
|
||||
# END
|
||||
@@ -1167,7 +1206,7 @@ fi
|
||||
|
||||
|
||||
%posttrans server-trust-ad
|
||||
python2 -c "import sys; from ipaserver.install import installutils; sys.exit(0 if installutils.is_ipa_configured() else 1);" > /dev/null 2>&1
|
||||
%{python} -c "import sys; from ipaserver.install import installutils; sys.exit(0 if installutils.is_ipa_configured() else 1);" > /dev/null 2>&1
|
||||
if [ $? -eq 0 ]; then
|
||||
# NOTE: systemd specific section
|
||||
/bin/systemctl try-restart httpd.service >/dev/null 2>&1 || :
|
||||
@@ -1218,7 +1257,7 @@ if [ $1 -gt 1 ] ; then
|
||||
fi
|
||||
|
||||
if [ $restore -ge 2 ]; then
|
||||
python2 -c 'from ipaclient.install.client import update_ipa_nssdb; update_ipa_nssdb()' >/var/log/ipaupgrade.log 2>&1
|
||||
%{python} -c 'from ipaclient.install.client import update_ipa_nssdb; update_ipa_nssdb()' >/var/log/ipaupgrade.log 2>&1
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -1291,6 +1330,7 @@ fi
|
||||
%{_libexecdir}/certmonger/ipa-server-guard
|
||||
%dir %{_libexecdir}/ipa
|
||||
%{_libexecdir}/ipa/ipa-custodia
|
||||
%{_libexecdir}/ipa/ipa-custodia-check
|
||||
%{_libexecdir}/ipa/ipa-dnskeysyncd
|
||||
%{_libexecdir}/ipa/ipa-dnskeysync-replica
|
||||
%{_libexecdir}/ipa/ipa-ods-exporter
|
||||
@@ -1399,10 +1439,7 @@ fi
|
||||
%{_usr}/share/ipa/profiles/README
|
||||
%{_usr}/share/ipa/profiles/*.cfg
|
||||
%dir %{_usr}/share/ipa/html
|
||||
%{_usr}/share/ipa/html/ffconfig.js
|
||||
%{_usr}/share/ipa/html/ffconfig_page.js
|
||||
%{_usr}/share/ipa/html/ssbrowser.html
|
||||
%{_usr}/share/ipa/html/browserconfig.html
|
||||
%{_usr}/share/ipa/html/unauthorized.html
|
||||
%dir %{_usr}/share/ipa/migration
|
||||
%{_usr}/share/ipa/migration/error.html
|
||||
@@ -1434,11 +1471,8 @@ fi
|
||||
%{_usr}/share/ipa/wsgi/plugins.py*
|
||||
%dir %{_sysconfdir}/ipa
|
||||
%dir %{_sysconfdir}/ipa/html
|
||||
%config(noreplace) %{_sysconfdir}/ipa/html/ffconfig.js
|
||||
%config(noreplace) %{_sysconfdir}/ipa/html/ffconfig_page.js
|
||||
%config(noreplace) %{_sysconfdir}/ipa/html/ssbrowser.html
|
||||
%config(noreplace) %{_sysconfdir}/ipa/html/unauthorized.html
|
||||
%config(noreplace) %{_sysconfdir}/ipa/html/browserconfig.html
|
||||
%ghost %attr(0644,root,apache) %config(noreplace) %{_sysconfdir}/httpd/conf.d/ipa-rewrite.conf
|
||||
%ghost %attr(0644,root,apache) %config(noreplace) %{_sysconfdir}/httpd/conf.d/ipa.conf
|
||||
%ghost %attr(0644,root,apache) %config(noreplace) %{_sysconfdir}/httpd/conf.d/ipa-kdc-proxy.conf
|
||||
@@ -1449,9 +1483,7 @@ fi
|
||||
%{_usr}/share/ipa/ipa-rewrite.conf
|
||||
%{_usr}/share/ipa/ipa-pki-proxy.conf
|
||||
%ghost %attr(0644,root,apache) %config(noreplace) %{_usr}/share/ipa/html/ca.crt
|
||||
%ghost %attr(0644,root,apache) %{_usr}/share/ipa/html/kerberosauth.xpi
|
||||
%ghost %attr(0644,root,apache) %{_usr}/share/ipa/html/krb.con
|
||||
%ghost %attr(0644,root,apache) %{_usr}/share/ipa/html/krb.js
|
||||
%ghost %attr(0644,root,apache) %{_usr}/share/ipa/html/krb5.ini
|
||||
%ghost %attr(0644,root,apache) %{_usr}/share/ipa/html/krbrealm.con
|
||||
%dir %{_usr}/share/ipa/updates/
|
||||
@@ -1459,7 +1491,7 @@ fi
|
||||
%dir %{_localstatedir}/lib/ipa
|
||||
%attr(700,root,root) %dir %{_localstatedir}/lib/ipa/backup
|
||||
%attr(700,root,root) %dir %{_localstatedir}/lib/ipa/gssproxy
|
||||
%attr(700,root,root) %dir %{_localstatedir}/lib/ipa/sysrestore
|
||||
%attr(711,root,root) %dir %{_localstatedir}/lib/ipa/sysrestore
|
||||
%attr(700,root,root) %dir %{_localstatedir}/lib/ipa/sysupgrade
|
||||
%attr(755,root,root) %dir %{_localstatedir}/lib/ipa/pki-ca
|
||||
%ghost %{_localstatedir}/lib/ipa/pki-ca/publish
|
||||
@@ -1614,6 +1646,7 @@ fi
|
||||
%{python_sitelib}/ipapython-*.egg-info
|
||||
%{python_sitelib}/ipalib-*.egg-info
|
||||
%{python_sitelib}/ipaplatform-*.egg-info
|
||||
%{python_sitelib}/ipaplatform-*-nspkg.pth
|
||||
|
||||
|
||||
%files common -f %{gettext_domain}.lang
|
||||
@@ -1635,6 +1668,7 @@ fi
|
||||
%{python3_sitelib}/ipapython-*.egg-info
|
||||
%{python3_sitelib}/ipalib-*.egg-info
|
||||
%{python3_sitelib}/ipaplatform-*.egg-info
|
||||
%{python3_sitelib}/ipaplatform-*-nspkg.pth
|
||||
|
||||
%endif # with_python3
|
||||
|
||||
|
||||
@@ -1,87 +0,0 @@
|
||||
#
|
||||
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
|
||||
#
|
||||
|
||||
"""
|
||||
ImportError ignoring import hook.
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import imp
|
||||
import inspect
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
DIRNAME = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class FailedImport(object):
|
||||
def __init__(self, loader, name):
|
||||
self.__file__ = __file__
|
||||
self.__name__ = name
|
||||
self.__path__ = []
|
||||
self.__loader__ = loader
|
||||
self.__package__ = name
|
||||
|
||||
def __repr__(self):
|
||||
return '<failed import {!r}>'.format(self.__name__)
|
||||
|
||||
|
||||
class IgnoringImporter(object):
|
||||
def find_module(self, fullname, path=None):
|
||||
parentname, dot, name = fullname.rpartition('.')
|
||||
assert (not dot and path is None) or (dot and path is not None)
|
||||
|
||||
# check if the module can be found
|
||||
try:
|
||||
file, _filename, _description = imp.find_module(name, path)
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
if file is not None:
|
||||
file.close()
|
||||
# it can be found, do normal import
|
||||
return None
|
||||
|
||||
# check if the parent module import failed
|
||||
if dot and isinstance(sys.modules[parentname], FailedImport):
|
||||
# it did fail, so this import will fail as well
|
||||
return self
|
||||
|
||||
# find out from where are we importing
|
||||
if path is None:
|
||||
path = sys.path
|
||||
for pathname in path:
|
||||
pathname = os.path.abspath(pathname)
|
||||
if not pathname.startswith(DIRNAME):
|
||||
break
|
||||
else:
|
||||
# importing from our source tree, do normal import
|
||||
return None
|
||||
|
||||
# find out into what .py file are we importing
|
||||
frame = inspect.currentframe().f_back
|
||||
filename = frame.f_code.co_filename
|
||||
if filename.startswith('<'):
|
||||
# not a file, do normal import
|
||||
return None
|
||||
filename = os.path.abspath(filename)
|
||||
if not filename.startswith(DIRNAME):
|
||||
# not a file in our source tree, do normal import
|
||||
return None
|
||||
|
||||
return self
|
||||
|
||||
def load_module(self, fullname):
|
||||
frame = inspect.currentframe().f_back
|
||||
print("{}: {}:{}: ignoring ImportError: No module named {}".format(
|
||||
sys.argv[0],
|
||||
os.path.relpath(frame.f_code.co_filename),
|
||||
frame.f_lineno,
|
||||
fullname))
|
||||
|
||||
return sys.modules.setdefault(fullname, FailedImport(self, fullname))
|
||||
|
||||
|
||||
sys.meta_path.insert(0, IgnoringImporter())
|
||||
@@ -438,7 +438,7 @@ def renew_ca_cert(reuse_existing, **kwargs):
|
||||
if operation == 'SUBMIT':
|
||||
state = 'retrieve'
|
||||
|
||||
if is_self_signed and not reuse_existing and is_renewal_master():
|
||||
if not reuse_existing and is_renewal_master():
|
||||
state = 'request'
|
||||
|
||||
csr_file = paths.IPA_CA_CSR
|
||||
|
||||
@@ -27,10 +27,6 @@ KeepAliveTimeout 30
|
||||
|
||||
# ipa-rewrite.conf is loaded separately
|
||||
|
||||
# This is required so the auto-configuration works with Firefox 2+
|
||||
AddType application/java-archive jar
|
||||
AddType application/x-xpinstall xpi
|
||||
|
||||
# Proper header for .tff fonts
|
||||
AddType application/x-font-ttf ttf
|
||||
|
||||
@@ -52,7 +48,8 @@ WSGISocketPrefix /run/httpd/wsgi
|
||||
|
||||
# Configure mod_wsgi handler for /ipa
|
||||
WSGIDaemonProcess ipa processes=2 threads=1 maximum-requests=500 \
|
||||
user=ipaapi group=ipaapi display-name=%{GROUP} socket-timeout=2147483647
|
||||
user=ipaapi group=ipaapi display-name=%{GROUP} socket-timeout=2147483647 \
|
||||
lang=C.UTF-8 locale=C.UTF-8
|
||||
WSGIImportScript /usr/share/ipa/wsgi.py process-group=ipa application-group=ipa
|
||||
WSGIScriptAlias /ipa /usr/share/ipa/wsgi.py
|
||||
WSGIScriptReloading Off
|
||||
|
||||
@@ -2,10 +2,7 @@ NULL =
|
||||
|
||||
appdir = $(IPA_SYSCONF_DIR)/html
|
||||
app_DATA = \
|
||||
ffconfig.js \
|
||||
ffconfig_page.js \
|
||||
ssbrowser.html \
|
||||
browserconfig.html \
|
||||
unauthorized.html \
|
||||
$(NULL)
|
||||
|
||||
|
||||
@@ -1,128 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>IPA: Identity Policy Audit</title>
|
||||
<script type="text/javascript" src="../ui/js/libs/loader.js"></script>
|
||||
<script type="text/javascript">
|
||||
(function() {
|
||||
var styles = [
|
||||
'../ui/css/patternfly.css',
|
||||
'../ui/css/ipa.css'
|
||||
];
|
||||
var scripts = [
|
||||
'../ui/js/libs/jquery.js',
|
||||
'krb.js',
|
||||
'ffconfig.js',
|
||||
'ffconfig_page.js'
|
||||
];
|
||||
ipa_loader.scripts(scripts);
|
||||
ipa_loader.styles(styles);
|
||||
})();
|
||||
</script>
|
||||
</head>
|
||||
|
||||
<body class="info-page">
|
||||
|
||||
<nav class="navbar navbar-default navbar-pf" role="navigation">
|
||||
<div class="navbar-header">
|
||||
<a class="brand" href="../ui/index.html"><img src="../ui/images/header-logo.png" alt="FreeIPA"></a>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container-fluid">
|
||||
<div class="row">
|
||||
<div class="col-sm-12">
|
||||
<div class="browser-config">
|
||||
<h1>Firefox configuration</h1>
|
||||
|
||||
<div id="wrongbrowser" style="display:none" class="step">
|
||||
<h2>Wrong Browser!</h2>
|
||||
<p>
|
||||
<a href="ssbrowser.html" id="notfirefox-link" class="btn btn-default" >Proceed to configuration for other browsers</a>
|
||||
</p>
|
||||
<p>
|
||||
You are not using Firefox. Please use configuration page for other browsers.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div id="step1" class="step">
|
||||
<h2>Step 1</h2>
|
||||
<p>
|
||||
<a href="ca.crt" id="ca-link" class="btn btn-default">Import Certificate Authority certificate</a>
|
||||
</p>
|
||||
<p>
|
||||
Make sure you select <b>all three</b> checkboxes.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div id="step2a" style="display:none" class="step">
|
||||
<h2>Step 2a</h2>
|
||||
<p>
|
||||
<a href="ssbrowser.html#oldfirefox" id="oldfirefox-link" class="btn btn-default">Proceed to configuration page for older versions of Firefox</a>
|
||||
</p>
|
||||
<!--<p id="older-compatible">
|
||||
Your version of Firefox can be configured by an older method. You don't have to use the Kerberos Configuration extension.
|
||||
</p>-->
|
||||
<p id="older-required">
|
||||
Your version of Firefox is not compatible with configuration method on this page. Please use configuration page for older versions of Firefox.
|
||||
<p>
|
||||
</div>
|
||||
|
||||
<div id="step2" style="display:none" class="step">
|
||||
<h2>Step 2</h2>
|
||||
<div id="ext-installed">
|
||||
<p>
|
||||
<a href="kerberosauth.xpi" id="reinstall-link" class="btn btn-default">Re-install extension</a>
|
||||
</p>
|
||||
<p>
|
||||
Extension installed. You can proceed to Step 3.
|
||||
</p>
|
||||
</div>
|
||||
<div id="ext-missing">
|
||||
<p><a href="kerberosauth.xpi" id="install-link" class="btn btn-default">Install Kerberos Configuration Firefox extension</a></p>
|
||||
<p>Kerberos Configuration extension is required for Step 3</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="step3" style="display:none" class="step">
|
||||
<h2>Step 3</h2>
|
||||
<p><a href="#configure-browser" id="configure-link" class="btn btn-default">Configure browser</a></p>
|
||||
<p id="config-success" style="display:none">Browser successfully configured</p>
|
||||
<p id="config-aborted" style="display:none">Configuration aborted</p>
|
||||
<p id="config-noext" style="display:none">Configuration was not successful, extension isn't installed or is disabled. Please proceed to step 2.</p>
|
||||
<p id="config-error" style="display:none">Configuration was not successful, unknown error occurred.</p>
|
||||
</div>
|
||||
|
||||
<div id="step2b" class="step">
|
||||
<h2>Step 2</h2>
|
||||
<ol>
|
||||
<li>
|
||||
In the address bar of Firefox, type <code>about:config</code> and press enter to display the list of current configuration options.
|
||||
</li>
|
||||
<li>
|
||||
In the Filter field, type <code>network.negotiate-auth.trusted-uris</code> to restrict the list of options.
|
||||
</li>
|
||||
<li>
|
||||
Double-click the entry to display the "Enter string value" dialog box.
|
||||
</li>
|
||||
<li>
|
||||
Enter the name of the domain against which you want to authenticate, <code class="example-domain">.example.com</code> as an example.
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
|
||||
<div id="step4" class="step">
|
||||
<h2 id="step4header" style="display:none">Step 4</h2>
|
||||
<h2 id="step3bheader">Step 3</h2>
|
||||
<p><a href="../ui/index.html" id="return-link" class="btn btn-default">Return to Web UI</a></p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</body>
|
||||
|
||||
</html>
|
||||
@@ -1,146 +0,0 @@
|
||||
/* Authors:
|
||||
* Petr Vobornik <pvoborni@redhat.com>
|
||||
*
|
||||
* Copyright (C) 2012 Red Hat
|
||||
* see file 'COPYING' for use and warranty information
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
var IPA = IPA || {};
|
||||
|
||||
IPA.browser_config = {
|
||||
|
||||
configure_firefox: function(domain) {
|
||||
|
||||
var self = this;
|
||||
|
||||
domain = domain || self.get_domain();
|
||||
|
||||
self.send_message({
|
||||
method: 'configure',
|
||||
predefined: 'ipa',
|
||||
trusted_uris: domain
|
||||
});
|
||||
},
|
||||
|
||||
|
||||
get_configure_result: function() {
|
||||
|
||||
var self = this;
|
||||
|
||||
var el = self.get_data_element();
|
||||
|
||||
var answer = el.getAttribute('answer');
|
||||
|
||||
return answer;
|
||||
},
|
||||
|
||||
get_domain: function() {
|
||||
return "."+IPA_DOMAIN;
|
||||
},
|
||||
|
||||
send_message: function(options) {
|
||||
|
||||
options = options || {};
|
||||
|
||||
var self = this;
|
||||
|
||||
self.clear_data_element();
|
||||
var opt_element = self.get_data_element();
|
||||
|
||||
for (var opt in options) {
|
||||
opt_element.setAttribute(opt, options[opt]);
|
||||
}
|
||||
|
||||
var msg_evt = document.createEvent('HTMLEvents');
|
||||
msg_evt.initEvent('kerberos-auth-config', true, false);
|
||||
opt_element.dispatchEvent(msg_evt);
|
||||
},
|
||||
|
||||
get_data_element: function() {
|
||||
|
||||
var els = document.getElementsByTagName('kerberosauthdataelement');
|
||||
var element;
|
||||
|
||||
if (els.length === 0) {
|
||||
element = document.createElement('kerberosauthdataelement');
|
||||
document.documentElement.appendChild(element);
|
||||
} else {
|
||||
element = els[0];
|
||||
}
|
||||
|
||||
return element;
|
||||
},
|
||||
|
||||
clear_data_element: function() {
|
||||
|
||||
var self = this;
|
||||
|
||||
var el = self.get_data_element();
|
||||
var to_remove = [];
|
||||
|
||||
for (var i=0; i<el.attributes.length; i++) {
|
||||
to_remove.push(el.attributes[i].name);
|
||||
}
|
||||
|
||||
for (i=0; i<to_remove.length; i++) {
|
||||
el.removeAttribute(to_remove[i]);
|
||||
}
|
||||
},
|
||||
|
||||
extension_installed: function() {
|
||||
|
||||
var self = this;
|
||||
|
||||
self.send_message({
|
||||
method: 'can_configure'
|
||||
});
|
||||
|
||||
var element = self.get_data_element();
|
||||
var ext_installed = element.getAttribute('answer') === 'true';
|
||||
return ext_installed;
|
||||
},
|
||||
|
||||
get_browser: function() {
|
||||
|
||||
var ua = window.navigator.userAgent.toLowerCase();
|
||||
|
||||
var match = (/(chrome)[ \/]([\w.]+)/.exec(ua)) ||
|
||||
(/(webkit)[ \/]([\w.]+)/.exec(ua)) ||
|
||||
(/(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua)) ||
|
||||
(/(msie) ([\w.]+)/.exec(ua)) ||
|
||||
ua.indexOf("compatible") < 0 && (/(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua)) ||
|
||||
[];
|
||||
|
||||
var matched = {
|
||||
browser: match[ 1 ] || "",
|
||||
version: match[ 2 ] || "0"
|
||||
};
|
||||
var browser = {};
|
||||
|
||||
if (matched.browser) {
|
||||
browser[matched.browser] = true;
|
||||
browser.version = matched.version;
|
||||
}
|
||||
|
||||
// Chrome is Webkit, but Webkit is also Safari.
|
||||
if (browser.chrome) {
|
||||
browser.webkit = true;
|
||||
} else if ( browser.webkit ) {
|
||||
browser.safari = true;
|
||||
}
|
||||
return browser;
|
||||
}
|
||||
};
|
||||
@@ -1,163 +0,0 @@
|
||||
/* Authors:
|
||||
* Petr Vobornik <pvoborni@redhat.com>
|
||||
*
|
||||
* Copyright (C) 2012 Red Hat
|
||||
* see file 'COPYING' for use and warranty information
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
$(document).ready(function() {
|
||||
|
||||
var set_enabled = function(steps, enabled) {
|
||||
|
||||
for (var i=0; i<steps.length; i++) {
|
||||
$(steps[i]).toggleClass('disabled', !enabled);
|
||||
$(steps[i]+" .btn").toggleClass('disabled', !enabled);
|
||||
}
|
||||
};
|
||||
|
||||
var show_installed = function(installed) {
|
||||
|
||||
if (installed) {
|
||||
$('#ext-installed').show();
|
||||
$('#ext-missing').hide();
|
||||
} else {
|
||||
$('#ext-installed').hide();
|
||||
$('#ext-missing').show();
|
||||
}
|
||||
set_enabled(['#step3'], installed);
|
||||
};
|
||||
|
||||
var install = function(event) {
|
||||
|
||||
window.location = $(event.target).attr('href');
|
||||
check_until_installed();
|
||||
return false;
|
||||
};
|
||||
|
||||
var check_until_installed = function() {
|
||||
|
||||
var installed = IPA.browser_config.extension_installed();
|
||||
show_installed(installed);
|
||||
|
||||
if (!installed) {
|
||||
window.setTimeout(function() {
|
||||
check_until_installed();
|
||||
}, 300);
|
||||
}
|
||||
};
|
||||
|
||||
var configure = function() {
|
||||
IPA.browser_config.configure_firefox();
|
||||
var result = IPA.browser_config.get_configure_result();
|
||||
var installed = IPA.browser_config.extension_installed();
|
||||
|
||||
$('#config-success').hide();
|
||||
$('#config-aborted').hide();
|
||||
$('#config-noext').hide();
|
||||
$('#config-error').hide();
|
||||
|
||||
if (result === 'configured') {
|
||||
$('#config-success').show();
|
||||
} else if (result == 'aborted') {
|
||||
$('#config-aborted').show();
|
||||
} else if (!installed) {
|
||||
$('#config-noext').show();
|
||||
} else {
|
||||
$('#config-error').show();
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
var check_version = function() {
|
||||
|
||||
var browser = IPA.browser_config.get_browser();
|
||||
|
||||
if (!browser.mozilla) {
|
||||
$('#wrongbrowser').show();
|
||||
set_enabled(['#step2b'], false);
|
||||
} else {
|
||||
// Disable for all version of FF older than 15. Theoretically
|
||||
// the extension is compatible with version 3.6, 10 and later
|
||||
// FF 4-9 are not compatible because there is an error in loading
|
||||
// resource from chrome.manifest
|
||||
if (compare_version(browser.version, '15') === -1) {
|
||||
$('#step2a').show();
|
||||
$('#step2').show();
|
||||
$('#step3').show();
|
||||
$('#step4header').show();
|
||||
$('#step3bheader').hide();
|
||||
set_enabled(['#step2', '#step3', '#step2b'], false);
|
||||
} else if (compare_version(browser.version, '40') === -1) {
|
||||
// FF is > 15 < 40
|
||||
// show krb extension method
|
||||
$('#step2').show();
|
||||
$('#step3').show();
|
||||
$('#step4header').show();
|
||||
$('#step3bheader').hide();
|
||||
$('#step2b').hide();
|
||||
}
|
||||
// else
|
||||
// Firefox since version 40 has new extension signing policy
|
||||
// this policy prevents to use self-signed FF extension and
|
||||
// thus a manual config is needed - which is the default (step2b)
|
||||
|
||||
// else if (compare_version(version, '15') === -1) {
|
||||
// $('#step2a').show();
|
||||
// $('#older-compatible').show();
|
||||
// $('#older-required').hide();
|
||||
// }
|
||||
}
|
||||
};
|
||||
|
||||
var compare_version = function(a, b) {
|
||||
|
||||
var only_digits =/[^\d.]/g;
|
||||
|
||||
var a_parts = a.replace(only_digits, '').split('.');
|
||||
var b_parts = b.replace(only_digits, '').split('.');
|
||||
|
||||
for (var i=0; i<a_parts.length && i<b_parts.length; i++) {
|
||||
var a_num = Number(a_parts[i]);
|
||||
var b_num = Number(b_parts[i]);
|
||||
|
||||
if (a_num > b_num) return 1;
|
||||
else if (a_num < b_num) return -1;
|
||||
}
|
||||
|
||||
if (a_parts.length !== b_parts.length) {
|
||||
return a_parts.length > b_parts.length ? 1 : -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
||||
var button_handler = function(handler) {
|
||||
return function(e) {
|
||||
if ($(this).hasClass('disabled')) {
|
||||
e.preventDefault();
|
||||
return false;
|
||||
}
|
||||
return handler.call(this, e);
|
||||
};
|
||||
};
|
||||
|
||||
$('#install-link').click(button_handler(install));
|
||||
$('#reinstall-link').click(button_handler(install));
|
||||
$('#configure-link').click(button_handler(configure));
|
||||
|
||||
check_version();
|
||||
show_installed(IPA.browser_config.extension_installed());
|
||||
});
|
||||
@@ -127,5 +127,3 @@
|
||||
# To add a set of files, use "+process FileName", "+process Folder\Path\*.js",
|
||||
# or "+process Folder\Path\*.htm".
|
||||
#
|
||||
+process ffconfig.js
|
||||
+process ffconfig_page.js
|
||||
@@ -6,23 +6,14 @@
|
||||
<script type="text/javascript" src="../ui/js/libs/loader.js"></script>
|
||||
<script type="text/javascript">
|
||||
(function() {
|
||||
function loaded() {
|
||||
$(document).ready(function() {
|
||||
var domain = '.' + (IPA_DOMAIN || 'example.com');
|
||||
$('.example-domain').text(domain);
|
||||
});
|
||||
}
|
||||
|
||||
var styles = [
|
||||
'../ui/css/patternfly.css',
|
||||
'../ui/css/ipa.css'
|
||||
];
|
||||
var scripts = [
|
||||
'../ui/js/libs/jquery.js',
|
||||
'krb.js',
|
||||
'ffconfig.js'
|
||||
'../ui/js/libs/jquery.js'
|
||||
];
|
||||
ipa_loader.scripts(scripts, loaded);
|
||||
ipa_loader.scripts(scripts);
|
||||
ipa_loader.styles(styles);
|
||||
})();
|
||||
</script>
|
||||
@@ -51,7 +42,12 @@
|
||||
|
||||
<ol>
|
||||
<li>
|
||||
Import <a href="ca.crt">CA certificate</a>. Make sure you checked all three checkboxes.
|
||||
<p>
|
||||
<a href="ca.crt" id="ca-link" class="btn btn-default">Import Certificate Authority certificate</a>
|
||||
</p>
|
||||
<p>
|
||||
Make sure you select <b>all three</b> checkboxes.
|
||||
</p>
|
||||
</li>
|
||||
<li>
|
||||
In the address bar of Firefox, type <code>about:config</code> to display the list of current configuration options.
|
||||
@@ -65,7 +61,7 @@
|
||||
<li>
|
||||
Enter the name of the domain against which you want to authenticate, for example, <code class="example-domain">.example.com.</code>
|
||||
</li>
|
||||
<li><strong> You are all set. </strong></li>
|
||||
<li><a href="../ui/index.html" id="return-link" class="btn btn-default">Return to Web UI</a></li>
|
||||
</ol>
|
||||
|
||||
<h2>Chrome</h2>
|
||||
|
||||
@@ -36,9 +36,7 @@
|
||||
|
||||
<div id="first-time">
|
||||
<p>
|
||||
If this is your first time, please <strong>configure your browser</strong>.
|
||||
Use <a href="browserconfig.html">Firefox configuration page</a> for Firefox
|
||||
or <a href="ssbrowser.html">manual configuration page</a> for other browsers.
|
||||
If this is your first time, please <a href="ssbrowser.html">configure your browser</a>.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -27,8 +27,6 @@ import tempfile
|
||||
import shutil
|
||||
import traceback
|
||||
|
||||
from cryptography.hazmat.primitives import serialization
|
||||
|
||||
from ipalib.install.kinit import kinit_keytab
|
||||
from ipalib import api, x509
|
||||
from ipaserver.install import certs, cainstance
|
||||
@@ -67,10 +65,8 @@ def _main():
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
dercert = cert.public_bytes(serialization.Encoding.DER)
|
||||
|
||||
# Load it into dogtag
|
||||
cainstance.update_people_entry(dercert)
|
||||
cainstance.update_people_entry(cert)
|
||||
finally:
|
||||
if api.Backend.ldap2.isconnected():
|
||||
api.Backend.ldap2.disconnect()
|
||||
|
||||
@@ -30,7 +30,6 @@ dist_app_DATA = \
|
||||
anon-princ-aci.ldif \
|
||||
bootstrap-template.ldif \
|
||||
ca-topology.uldif \
|
||||
caJarSigningCert.cfg.template \
|
||||
custodia.conf.template \
|
||||
default-aci.ldif \
|
||||
default-hbac.ldif \
|
||||
@@ -38,6 +37,7 @@ dist_app_DATA = \
|
||||
default-trust-view.ldif \
|
||||
delegation.ldif \
|
||||
replica-acis.ldif \
|
||||
replica-prevent-time-skew.ldif \
|
||||
ds-nfiles.ldif \
|
||||
dns.ldif \
|
||||
dnssec.ldif \
|
||||
@@ -79,7 +79,6 @@ dist_app_DATA = \
|
||||
sudobind.ldif \
|
||||
automember.ldif \
|
||||
replica-automember.ldif \
|
||||
replica-s4u2proxy.ldif \
|
||||
sasl-mapping-fallback.ldif \
|
||||
schema-update.ldif \
|
||||
vault.ldif \
|
||||
|
||||
@@ -1,88 +0,0 @@
|
||||
desc=Jar Signing certificate to auto-configure Firefox
|
||||
enable=true
|
||||
enableBy=admin
|
||||
lastModified=1239836280692
|
||||
name=Manual Jar Signing Certificate Enrollment
|
||||
visible=true
|
||||
auth.class_id=
|
||||
auth.instance_id=raCertAuth
|
||||
input.list=i1,i2
|
||||
input.i1.class_id=certReqInputImpl
|
||||
input.i2.class_id=submitterInfoInputImpl
|
||||
output.list=o1
|
||||
output.o1.class_id=certOutputImpl
|
||||
policyset.list=caJarSigningSet
|
||||
policyset.caJarSigningSet.list=1,2,3,6,7,9
|
||||
policyset.caJarSigningSet.1.constraint.class_id=subjectNameConstraintImpl
|
||||
policyset.caJarSigningSet.1.constraint.name=Subject Name Constraint
|
||||
policyset.caJarSigningSet.1.constraint.params.accept=true
|
||||
policyset.caJarSigningSet.1.constraint.params.pattern=.*
|
||||
policyset.caJarSigningSet.1.default.class_id=userSubjectNameDefaultImpl
|
||||
policyset.caJarSigningSet.1.default.name=Subject Name Default
|
||||
policyset.caJarSigningSet.1.default.params.name=
|
||||
policyset.caJarSigningSet.2.constraint.class_id=validityConstraintImpl
|
||||
policyset.caJarSigningSet.2.constraint.name=Validity Constraint
|
||||
policyset.caJarSigningSet.2.constraint.params.notAfterCheck=false
|
||||
policyset.caJarSigningSet.2.constraint.params.notBeforeCheck=false
|
||||
policyset.caJarSigningSet.2.constraint.params.range=2922
|
||||
policyset.caJarSigningSet.2.default.class_id=validityDefaultImpl
|
||||
policyset.caJarSigningSet.2.default.name=Validity Default
|
||||
policyset.caJarSigningSet.2.default.params.range=1461
|
||||
policyset.caJarSigningSet.2.default.params.startTime=60
|
||||
policyset.caJarSigningSet.3.constraint.class_id=keyConstraintImpl
|
||||
policyset.caJarSigningSet.3.constraint.name=Key Constraint
|
||||
policyset.caJarSigningSet.3.constraint.params.keyMaxLength=4096
|
||||
policyset.caJarSigningSet.3.constraint.params.keyMinLength=1024
|
||||
policyset.caJarSigningSet.3.constraint.params.keyType=-
|
||||
policyset.caJarSigningSet.3.default.class_id=userKeyDefaultImpl
|
||||
policyset.caJarSigningSet.3.default.name=Key Default
|
||||
policyset.caJarSigningSet.6.constraint.class_id=keyUsageExtConstraintImpl
|
||||
policyset.caJarSigningSet.6.constraint.name=Key Usage Extension Constraint
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageCritical=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageCrlSign=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageDataEncipherment=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageDecipherOnly=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageDigitalSignature=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageEncipherOnly=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageKeyAgreement=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageKeyCertSign=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageKeyEncipherment=-
|
||||
policyset.caJarSigningSet.6.constraint.params.keyUsageNonRepudiation=-
|
||||
policyset.caJarSigningSet.6.default.class_id=keyUsageExtDefaultImpl
|
||||
policyset.caJarSigningSet.6.default.name=Key Usage Default
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageCritical=true
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageCrlSign=false
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageDataEncipherment=false
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageDecipherOnly=false
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageDigitalSignature=true
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageEncipherOnly=false
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageKeyAgreement=false
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageKeyCertSign=true
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageKeyEncipherment=false
|
||||
policyset.caJarSigningSet.6.default.params.keyUsageNonRepudiation=false
|
||||
policyset.caJarSigningSet.7.constraint.class_id=nsCertTypeExtConstraintImpl
|
||||
policyset.caJarSigningSet.7.constraint.name=Netscape Certificate Type Extension Constraint
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertCritical=-
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertEmail=-
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertEmailCA=-
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertObjectSigning=-
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertObjectSigningCA=-
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertSSLCA=-
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertSSLClient=-
|
||||
policyset.caJarSigningSet.7.constraint.params.nsCertSSLServer=-
|
||||
policyset.caJarSigningSet.7.default.class_id=nsCertTypeExtDefaultImpl
|
||||
policyset.caJarSigningSet.7.default.name=Netscape Certificate Type Extension Default
|
||||
policyset.caJarSigningSet.7.default.params.nsCertCritical=false
|
||||
policyset.caJarSigningSet.7.default.params.nsCertEmail=false
|
||||
policyset.caJarSigningSet.7.default.params.nsCertEmailCA=false
|
||||
policyset.caJarSigningSet.7.default.params.nsCertObjectSigning=true
|
||||
policyset.caJarSigningSet.7.default.params.nsCertObjectSigningCA=false
|
||||
policyset.caJarSigningSet.7.default.params.nsCertSSLCA=false
|
||||
policyset.caJarSigningSet.7.default.params.nsCertSSLClient=false
|
||||
policyset.caJarSigningSet.7.default.params.nsCertSSLServer=false
|
||||
policyset.caJarSigningSet.9.constraint.class_id=signingAlgConstraintImpl
|
||||
policyset.caJarSigningSet.9.constraint.name=No Constraint
|
||||
policyset.caJarSigningSet.9.constraint.params.signingAlgsAllowed=MD5withRSA,MD2withRSA,SHA1withRSA,SHA256withRSA,SHA512withRSA,SHA1withDSA,SHA1withEC
|
||||
policyset.caJarSigningSet.9.default.class_id=signingAlgDefaultImpl
|
||||
policyset.caJarSigningSet.9.default.name=Signing Alg
|
||||
policyset.caJarSigningSet.9.default.params.signingAlg=-
|
||||
@@ -16,7 +16,7 @@ header = GSS_NAME
|
||||
handler = ipaserver.secrets.kem.IPAKEMKeys
|
||||
paths = /keys
|
||||
store = ipa
|
||||
server_keys = $IPA_CUSTODIA_CONF_DIR/server.keys
|
||||
server_keys = $IPA_CUSTODIA_KEYS
|
||||
|
||||
[store:ipa]
|
||||
handler = ipaserver.secrets.store.IPASecStore
|
||||
|
||||
@@ -108,6 +108,7 @@ cn: fqdn
|
||||
nsSystemIndex: false
|
||||
nsIndexType: eq
|
||||
nsIndexType: pres
|
||||
nsIndexType: sub
|
||||
|
||||
dn: cn=macAddress,cn=index,cn=userRoot,cn=ldbm database,cn=plugins,cn=config
|
||||
changetype: add
|
||||
@@ -287,3 +288,48 @@ objectClass: nsIndex
|
||||
nsSystemIndex: false
|
||||
nsIndexType: eq
|
||||
nsIndexType: sub
|
||||
|
||||
dn: cn=description,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
changetype: add
|
||||
cn: description
|
||||
objectClass: top
|
||||
objectClass: nsindex
|
||||
nssystemindex: false
|
||||
nsindextype: eq
|
||||
nsindextype: sub
|
||||
|
||||
dn: cn=l,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
changetype: add
|
||||
cn: l
|
||||
objectClass: top
|
||||
objectClass: nsindex
|
||||
nssystemindex: false
|
||||
nsindextype: eq
|
||||
nsindextype: sub
|
||||
|
||||
dn: cn=nsOsVersion,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
changetype: add
|
||||
cn: nsOsVersion
|
||||
objectClass: top
|
||||
objectClass: nsindex
|
||||
nssystemindex: false
|
||||
nsindextype: eq
|
||||
nsindextype: sub
|
||||
|
||||
dn: cn=nsHardwarePlatform,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
changetype: add
|
||||
cn: nsHardwarePlatform
|
||||
objectClass: top
|
||||
objectClass: nsindex
|
||||
nssystemindex: false
|
||||
nsindextype: eq
|
||||
nsindextype: sub
|
||||
|
||||
dn: cn=nsHostLocation,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
changetype: add
|
||||
cn: nsHostLocation
|
||||
objectClass: top
|
||||
objectClass: nsindex
|
||||
nssystemindex: false
|
||||
nsindextype: eq
|
||||
nsindextype: sub
|
||||
|
||||
@@ -4,4 +4,5 @@
|
||||
Environment=KRB5CCNAME=$KRB5CC_HTTPD
|
||||
Environment=GSS_USE_PROXY=yes
|
||||
Environment=KDCPROXY_CONFIG=$KDCPROXY_CONFIG
|
||||
Environment=LC_ALL=C.UTF-8
|
||||
ExecStartPre=$IPA_HTTPD_KDCPROXY
|
||||
|
||||
4
install/share/replica-prevent-time-skew.ldif
Normal file
4
install/share/replica-prevent-time-skew.ldif
Normal file
@@ -0,0 +1,4 @@
|
||||
dn: cn=config
|
||||
changetype: modify
|
||||
replace: nsslapd-ignore-time-skew
|
||||
nsslapd-ignore-time-skew: $SKEWVALUE
|
||||
@@ -1,14 +0,0 @@
|
||||
dn: cn=ipa-http-delegation,cn=s4u2proxy,cn=etc,$SUFFIX
|
||||
changetype: modify
|
||||
add: memberPrincipal
|
||||
memberPrincipal: HTTP/$FQDN@$REALM
|
||||
|
||||
# ipa-cifs-delegation-targets needs to be an ipaAllowedTarget for HTTP
|
||||
# delegation but we don't add it here as an LDIF because this entry may
|
||||
# already exist from another replica, or previous install. If it is missing
|
||||
# then it will be caught by the update file 61-trusts-s4u2proxy.update
|
||||
|
||||
dn: cn=ipa-ldap-delegation-targets,cn=s4u2proxy,cn=etc,$SUFFIX
|
||||
changetype: modify
|
||||
add: memberPrincipal
|
||||
memberPrincipal: ldap/$FQDN@$REALM
|
||||
@@ -25,6 +25,18 @@ WSGI appliction for IPA server.
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
# Some dependencies like Dogtag's pki.client library and custodia use
|
||||
# python-requsts to make HTTPS connection. python-requests prefers
|
||||
# PyOpenSSL over Python's stdlib ssl module. PyOpenSSL is build on top
|
||||
# of python-cryptography which trigger a execmem SELinux violation
|
||||
# in the context of Apache HTTPD (httpd_execmem).
|
||||
# When requests is imported, it always tries to import pyopenssl glue
|
||||
# code from urllib3's contrib directory. The import of PyOpenSSL is
|
||||
# enough to trigger the SELinux denial.
|
||||
# Block any import of PyOpenSSL's SSL module by raising an ImportError
|
||||
sys.modules['OpenSSL.SSL'] = None
|
||||
|
||||
from ipaplatform.paths import paths
|
||||
from ipalib import api
|
||||
|
||||
@@ -34,6 +34,7 @@ dist_sbin_SCRIPTS = \
|
||||
appdir = $(libexecdir)/ipa/
|
||||
dist_app_SCRIPTS = \
|
||||
ipa-custodia \
|
||||
ipa-custodia-check \
|
||||
ipa-httpd-kdcproxy \
|
||||
ipa-pki-retrieve-key \
|
||||
$(NULL)
|
||||
|
||||
@@ -27,6 +27,7 @@ import tempfile
|
||||
from ipalib.install.kinit import kinit_keytab
|
||||
from ipapython import ipautil
|
||||
|
||||
from ipaclient.install import ipa_certupdate
|
||||
from ipaserver.install import installutils
|
||||
from ipaserver.install.installutils import create_replica_config
|
||||
from ipaserver.install.installutils import check_creds, ReplicaConfig
|
||||
@@ -45,7 +46,7 @@ log_file_name = paths.IPAREPLICA_CA_INSTALL_LOG
|
||||
REPLICA_INFO_TOP_DIR = None
|
||||
|
||||
def parse_options():
|
||||
usage = "%prog [options] REPLICA_FILE"
|
||||
usage = "%prog [options] [REPLICA_FILE]"
|
||||
parser = IPAOptionParser(usage=usage, version=version.VERSION)
|
||||
parser.add_option("-d", "--debug", dest="debug", action="store_true",
|
||||
default=False, help="gather extra debugging information")
|
||||
@@ -64,11 +65,16 @@ def parse_options():
|
||||
default=False, help="unattended installation never prompts the user")
|
||||
parser.add_option("--external-ca", dest="external_ca", action="store_true",
|
||||
default=False, help="Generate a CSR to be signed by an external CA")
|
||||
ext_cas = ("generic", "ms-cs")
|
||||
ext_cas = tuple(x.value for x in cainstance.ExternalCAType)
|
||||
parser.add_option("--external-ca-type", dest="external_ca_type",
|
||||
type="choice", choices=ext_cas,
|
||||
metavar="{{{0}}}".format(",".join(ext_cas)),
|
||||
help="Type of the external CA. Default: generic")
|
||||
parser.add_option("--external-ca-profile", dest="external_ca_profile",
|
||||
type='constructor', constructor=cainstance.ExternalCAProfile,
|
||||
default=None, metavar="PROFILE-SPEC",
|
||||
help="Specify the certificate profile/template to use "
|
||||
"at the external CA")
|
||||
parser.add_option("--external-cert-file", dest="external_cert_files",
|
||||
action="append", metavar="FILE",
|
||||
help="File containing the IPA CA certificate and the external CA certificate chain")
|
||||
@@ -116,6 +122,11 @@ def parse_options():
|
||||
parser.error(
|
||||
"You cannot specify --external-ca-type without --external-ca")
|
||||
|
||||
if options.external_ca_profile and not options.external_ca:
|
||||
parser.error(
|
||||
"You cannot specify --external-ca-profile "
|
||||
"without --external-ca")
|
||||
|
||||
return safe_options, options, filename
|
||||
|
||||
|
||||
@@ -149,7 +160,7 @@ def install_replica(safe_options, options, filename):
|
||||
else:
|
||||
if filename is None:
|
||||
sys.exit("A replica file is required")
|
||||
if not ipautil.file_exists(filename):
|
||||
if not os.path.isfile(filename):
|
||||
sys.exit("Replica file %s does not exist" % filename)
|
||||
|
||||
if not options.promote:
|
||||
@@ -164,6 +175,16 @@ def install_replica(safe_options, options, filename):
|
||||
not options.skip_conncheck and options.unattended):
|
||||
sys.exit('admin password required')
|
||||
|
||||
# Run ipa-certupdate to ensure we have the CA cert. This is
|
||||
# necessary if the admin has just promoted the topology from
|
||||
# CA-less to CA-ful, and ipa-certupdate has not been run yet.
|
||||
ipa_certupdate.run_with_args(api)
|
||||
|
||||
# CertUpdate restarts DS causing broken pipe on the original
|
||||
# connection, so reconnect the backend.
|
||||
api.Backend.ldap2.disconnect()
|
||||
api.Backend.ldap2.connect()
|
||||
|
||||
if options.promote:
|
||||
config = ReplicaConfig()
|
||||
config.ca_host_name = None
|
||||
@@ -233,6 +254,10 @@ def install_master(safe_options, options):
|
||||
ca.install_check(True, None, options)
|
||||
ca.install(True, None, options)
|
||||
|
||||
# Run ipa-certupdate to add the new CA certificate to
|
||||
# certificate databases on this server.
|
||||
logger.info("Updating certificate databases.")
|
||||
ipa_certupdate.run_with_args(api)
|
||||
|
||||
def install(safe_options, options, filename):
|
||||
options.promote = False
|
||||
|
||||
286
install/tools/ipa-custodia-check
Executable file
286
install/tools/ipa-custodia-check
Executable file
@@ -0,0 +1,286 @@
|
||||
#!/usr/bin/python2
|
||||
"""Test client for ipa-custodia
|
||||
|
||||
The test script is expected to be executed on an IPA server with existing
|
||||
Custodia server keys.
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import socket
|
||||
import warnings
|
||||
|
||||
from custodia.message.kem import KEY_USAGE_SIG, KEY_USAGE_ENC, KEY_USAGE_MAP
|
||||
|
||||
from jwcrypto.common import json_decode
|
||||
from jwcrypto.jwk import JWK
|
||||
|
||||
from ipalib import api
|
||||
from ipaplatform.paths import paths
|
||||
import ipapython.version
|
||||
from ipaserver.install.installutils import is_ipa_configured
|
||||
|
||||
try:
|
||||
# FreeIPA >= 4.5
|
||||
from ipaserver.secrets.client import CustodiaClient
|
||||
except ImportError:
|
||||
# FreeIPA <= 4.4
|
||||
from ipapython.secrets.client import CustodiaClient
|
||||
|
||||
# Ignore security warning from vendored and non-vendored urllib3
|
||||
try:
|
||||
from urllib3.exceptions import SecurityWarning
|
||||
except ImportError:
|
||||
SecurityWarning = None
|
||||
else:
|
||||
warnings.simplefilter("ignore", SecurityWarning)
|
||||
|
||||
try:
|
||||
from requests.packages.urllib3.exceptions import SecurityWarning
|
||||
except ImportError:
|
||||
SecurityWarning = None
|
||||
else:
|
||||
warnings.simplefilter("ignore", SecurityWarning)
|
||||
|
||||
|
||||
KEYS = [
|
||||
'dm/DMHash',
|
||||
'ra/ipaCert',
|
||||
'ca/auditSigningCert cert-pki-ca',
|
||||
'ca/caSigningCert cert-pki-ca',
|
||||
'ca/ocspSigningCert cert-pki-ca',
|
||||
'ca/subsystemCert cert-pki-ca',
|
||||
]
|
||||
|
||||
IPA_CUSTODIA_KEYFILE = os.path.join(paths.IPA_CUSTODIA_CONF_DIR,
|
||||
'server.keys')
|
||||
|
||||
|
||||
logger = logging.getLogger('ipa-custodia-tester')
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
"IPA Custodia check",
|
||||
)
|
||||
# --store is dangerous and therefore hidden! Don't use it unless you really
|
||||
# know what you are doing! Keep in mind that it might destroy your NSSDB
|
||||
# unless it uses sqlite format.
|
||||
parser.add_argument(
|
||||
"--store", action='store_true', dest='store',
|
||||
help=argparse.SUPPRESS
|
||||
)
|
||||
parser.add_argument(
|
||||
"--debug", action='store_true',
|
||||
help="Debug mode"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--verbose", action='store_true',
|
||||
help='Verbose mode'
|
||||
)
|
||||
parser.add_argument(
|
||||
"server",
|
||||
help="FQDN of a IPA server (can be own FQDN for self-test)"
|
||||
)
|
||||
parser.add_argument(
|
||||
'keys', nargs='*', default=KEYS,
|
||||
help="Remote key ({})".format(', '.join(KEYS))
|
||||
)
|
||||
|
||||
|
||||
class IPACustodiaTester(object):
|
||||
files = [
|
||||
paths.IPA_DEFAULT_CONF,
|
||||
paths.KRB5_KEYTAB,
|
||||
paths.IPA_CUSTODIA_CONF,
|
||||
IPA_CUSTODIA_KEYFILE
|
||||
]
|
||||
|
||||
def __init__(self, parser, args):
|
||||
self.parser = parser
|
||||
self.args = args
|
||||
if not api.isdone('bootstrap'):
|
||||
# bootstrap to initialize api.env
|
||||
api.bootstrap()
|
||||
self.debug("IPA API bootstrapped")
|
||||
self.realm = api.env.realm
|
||||
self.host = api.env.host
|
||||
self.host_spn = 'host/{}@{}'.format(self.host, self.realm)
|
||||
self.server_spn = 'host/{}@{}'.format(self.args.server, self.realm)
|
||||
self.client = None
|
||||
self._errors = []
|
||||
|
||||
def error(self, msg, fatal=False):
|
||||
self._errors.append(msg)
|
||||
logger.error(msg, exc_info=self.args.verbose)
|
||||
if fatal:
|
||||
self.exit()
|
||||
|
||||
def exit(self):
|
||||
if self._errors:
|
||||
self.parser.exit(1, "[ERROR] One or more tests have failed.\n")
|
||||
else:
|
||||
self.parser.exit(0, "All tests have passed successfully.\n")
|
||||
|
||||
def warning(self, msg):
|
||||
logger.warning(msg)
|
||||
|
||||
def info(self, msg):
|
||||
logger.info(msg)
|
||||
|
||||
def debug(self, msg):
|
||||
logger.debug(msg)
|
||||
|
||||
def check(self):
|
||||
self.status()
|
||||
self.check_fqdn()
|
||||
self.check_files()
|
||||
self.check_client()
|
||||
self.check_jwk()
|
||||
self.check_keys()
|
||||
|
||||
def status(self):
|
||||
self.info("Platform: {}".format(platform.platform()))
|
||||
self.info("IPA version: {}".format(
|
||||
ipapython.version.VERSION
|
||||
))
|
||||
self.info("IPA vendor version: {}".format(
|
||||
ipapython.version.VENDOR_VERSION
|
||||
))
|
||||
self.info("Realm: {}".format(self.realm))
|
||||
self.info("Host: {}".format(self.host))
|
||||
self.info("Remote server: {}".format(self.args.server))
|
||||
if self.host == self.args.server:
|
||||
self.warning("Performing self-test only.")
|
||||
|
||||
def check_fqdn(self):
|
||||
fqdn = socket.getfqdn()
|
||||
if self.host != fqdn:
|
||||
self.warning(
|
||||
"socket.getfqdn() reports hostname '{}'".format(fqdn)
|
||||
)
|
||||
|
||||
def check_files(self):
|
||||
for filename in self.files:
|
||||
if not os.path.isfile(filename):
|
||||
self.error("File '{0}' is missing.".format(filename))
|
||||
else:
|
||||
self.info("File '{0}' exists.".format(filename))
|
||||
|
||||
def check_client(self):
|
||||
try:
|
||||
self.client = CustodiaClient(
|
||||
server=self.args.server,
|
||||
client_service='host@{}'.format(self.host),
|
||||
keyfile=IPA_CUSTODIA_KEYFILE,
|
||||
keytab=paths.KRB5_KEYTAB,
|
||||
realm=self.realm,
|
||||
)
|
||||
except Exception as e:
|
||||
self.error("Failed to create client: {}".format(e), fatal=True)
|
||||
else:
|
||||
self.info("Custodia client created.")
|
||||
|
||||
def _check_jwk_single(self, usage_id):
|
||||
usage = KEY_USAGE_MAP[usage_id]
|
||||
with open(IPA_CUSTODIA_KEYFILE) as f:
|
||||
dictkeys = json_decode(f.read())
|
||||
|
||||
try:
|
||||
pkey = JWK(**dictkeys[usage_id])
|
||||
local_pubkey = json_decode(pkey.export_public())
|
||||
except Exception:
|
||||
self.error("Failed to load and parse local JWK.", fatal=True)
|
||||
else:
|
||||
self.info("Loaded key for usage '{}' from '{}'.".format(
|
||||
usage, IPA_CUSTODIA_KEYFILE
|
||||
))
|
||||
|
||||
if pkey.key_id != self.host_spn:
|
||||
self.error(
|
||||
"KID '{}' != host service principal name '{}' "
|
||||
"(usage: {})".format(pkey.key_id, self.host_spn, usage),
|
||||
fatal=True
|
||||
)
|
||||
else:
|
||||
self.info(
|
||||
"JWK KID matches host's service principal name '{}'.".format(
|
||||
self.host_spn
|
||||
))
|
||||
|
||||
# LDAP doesn't contain KID
|
||||
local_pubkey.pop("kid", None)
|
||||
find_key = self.client.ikk.find_key
|
||||
try:
|
||||
host_pubkey = json_decode(find_key(self.host_spn, usage_id))
|
||||
except Exception:
|
||||
self.error("Fetching host keys {} (usage: {}) failed.".format(
|
||||
self.host_spn, usage), fatal=True)
|
||||
else:
|
||||
self.info("Checked host LDAP keys '{}' for usage {}.".format(
|
||||
self.host_spn, usage
|
||||
))
|
||||
|
||||
if host_pubkey != local_pubkey:
|
||||
self.debug("LDAP: '{}'".format(host_pubkey))
|
||||
self.debug("Local: '{}'".format(local_pubkey))
|
||||
self.error(
|
||||
"Host key in LDAP does not match local key.", fatal=True)
|
||||
else:
|
||||
self.info(
|
||||
"Local key for usage '{}' matches key in LDAP.".format(usage)
|
||||
)
|
||||
|
||||
try:
|
||||
server_pubkey = json_decode(find_key(self.server_spn, usage_id))
|
||||
except Exception:
|
||||
self.error("Fetching server keys {} (usage: {}) failed.".format(
|
||||
self.server_spn, usage), fatal=True)
|
||||
else:
|
||||
self.info("Checked server LDAP keys '{}' for usage {}.".format(
|
||||
self.server_spn, usage
|
||||
))
|
||||
|
||||
return local_pubkey, host_pubkey, server_pubkey
|
||||
|
||||
def check_jwk(self):
|
||||
self._check_jwk_single(KEY_USAGE_SIG)
|
||||
self._check_jwk_single(KEY_USAGE_ENC)
|
||||
|
||||
def check_keys(self):
|
||||
for key in self.args.keys:
|
||||
try:
|
||||
result = self.client.fetch_key(key, store=self.args.store)
|
||||
except Exception as e:
|
||||
self.error("Failed to retrieve key '{}': {}.".format(
|
||||
key, e
|
||||
))
|
||||
else:
|
||||
self.info("Successfully retrieved '{}'.".format(key))
|
||||
if not self.args.store:
|
||||
self.debug(result)
|
||||
|
||||
|
||||
def main():
|
||||
args = parser.parse_args()
|
||||
if args.debug:
|
||||
args.verbose = True
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG if args.debug else logging.INFO,
|
||||
format='[%(asctime)s %(name)s] <%(levelname)s>: %(message)s',
|
||||
datefmt='%Y-%m-%dT%H:%M:%S',
|
||||
)
|
||||
if not is_ipa_configured():
|
||||
parser.error("IPA is not configured on this system.\n")
|
||||
if os.geteuid() != 0:
|
||||
parser.error("Script must be executed as root.\n")
|
||||
|
||||
tester = IPACustodiaTester(parser, args)
|
||||
tester.check()
|
||||
tester.exit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -54,7 +54,7 @@ def parse_options():
|
||||
help="Master Server IP Address. This option can be used "
|
||||
"multiple times")
|
||||
parser.add_option("--forwarder", dest="forwarders", action="append",
|
||||
type="ip", help="Add a DNS forwarder. This option can be used multiple times")
|
||||
type="ip_with_loopback", help="Add a DNS forwarder. This option can be used multiple times")
|
||||
parser.add_option("--no-forwarders", dest="no_forwarders", action="store_true",
|
||||
default=False, help="Do not add any DNS forwarders, use root servers instead")
|
||||
parser.add_option("--auto-forwarders", dest="auto_forwarders",
|
||||
@@ -113,7 +113,7 @@ def parse_options():
|
||||
parser.error("You must specify at least one option: "
|
||||
"--forwarder or --no-forwarders or --auto-forwarders")
|
||||
|
||||
if options.kasp_db_file and not ipautil.file_exists(options.kasp_db_file):
|
||||
if options.kasp_db_file and not os.path.isfile(options.kasp_db_file):
|
||||
parser.error("File %s does not exist" % options.kasp_db_file)
|
||||
|
||||
if options.dm_password:
|
||||
|
||||
@@ -4,29 +4,39 @@ from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from ipalib import constants
|
||||
from ipalib.config import Env
|
||||
from ipaplatform.paths import paths
|
||||
from ipaserver.secrets.client import CustodiaClient
|
||||
|
||||
env = Env()
|
||||
env._finalize()
|
||||
|
||||
keyname = "ca_wrapped/" + sys.argv[1]
|
||||
servername = sys.argv[2]
|
||||
def main():
|
||||
env = Env()
|
||||
env._finalize()
|
||||
|
||||
service = constants.PKI_GSSAPI_SERVICE_NAME
|
||||
client_keyfile = os.path.join(paths.PKI_TOMCAT, service + '.keys')
|
||||
client_keytab = os.path.join(paths.PKI_TOMCAT, service + '.keytab')
|
||||
keyname = "ca_wrapped/" + sys.argv[1]
|
||||
servername = sys.argv[2]
|
||||
|
||||
# pylint: disable=no-member
|
||||
client = CustodiaClient(
|
||||
client_service='%s@%s' % (service, env.host), server=servername,
|
||||
realm=env.realm, ldap_uri="ldaps://" + env.host,
|
||||
keyfile=client_keyfile, keytab=client_keytab,
|
||||
)
|
||||
service = constants.PKI_GSSAPI_SERVICE_NAME
|
||||
client_keyfile = os.path.join(paths.PKI_TOMCAT, service + '.keys')
|
||||
client_keytab = os.path.join(paths.PKI_TOMCAT, service + '.keytab')
|
||||
|
||||
# Print the response JSON to stdout; it is already in the format
|
||||
# that Dogtag's ExternalProcessKeyRetriever expects
|
||||
print(client.fetch_key(keyname, store=False))
|
||||
# pylint: disable=no-member
|
||||
client = CustodiaClient(
|
||||
client_service='%s@%s' % (service, env.host), server=servername,
|
||||
realm=env.realm, ldap_uri="ldaps://" + env.host,
|
||||
keyfile=client_keyfile, keytab=client_keytab,
|
||||
)
|
||||
|
||||
# Print the response JSON to stdout; it is already in the format
|
||||
# that Dogtag's ExternalProcessKeyRetriever expects
|
||||
print(client.fetch_key(keyname, store=False))
|
||||
|
||||
|
||||
try:
|
||||
main()
|
||||
except BaseException:
|
||||
traceback.print_exc()
|
||||
sys.exit(1)
|
||||
|
||||
@@ -290,7 +290,7 @@ class PortResponder(threading.Thread):
|
||||
self._sockets = []
|
||||
self._close = False
|
||||
self._close_lock = threading.Lock()
|
||||
self.responder_data = 'FreeIPA'
|
||||
self.responder_data = b'FreeIPA'
|
||||
self.ports_opened = False
|
||||
self.ports_open_cond = threading.Condition()
|
||||
|
||||
@@ -318,7 +318,7 @@ class PortResponder(threading.Thread):
|
||||
logger.debug('%d %s: Stopped listening', port, proto)
|
||||
|
||||
def _is_closing(self):
|
||||
with self._close_lock:
|
||||
with self._close_lock: # pylint: disable=not-context-manager
|
||||
return self._close
|
||||
|
||||
def _bind_to_port(self, port, socket_type):
|
||||
@@ -369,7 +369,7 @@ class PortResponder(threading.Thread):
|
||||
def stop(self):
|
||||
logger.debug('Stopping listening thread.')
|
||||
|
||||
with self._close_lock:
|
||||
with self._close_lock: # pylint: disable=not-context-manager
|
||||
self._close = True
|
||||
|
||||
|
||||
@@ -530,6 +530,9 @@ def main():
|
||||
if result.returncode != 0:
|
||||
raise RuntimeError("Could not get ticket for master server: %s" %
|
||||
result.error_output)
|
||||
# Now that the cred cache file is initialized,
|
||||
# use it for the IPA API calls
|
||||
os.environ['KRB5CCNAME'] = CCACHE_FILE
|
||||
|
||||
try:
|
||||
logger.info("Check RPC connection to remote master")
|
||||
|
||||
@@ -86,6 +86,8 @@ def parse_options():
|
||||
parser.add_option("-p", "--password", dest="dirman_passwd", help="Directory Manager password")
|
||||
parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False,
|
||||
help="provide additional information")
|
||||
parser.add_option("-d", "--debug", dest="debug", action="store_true", default=False,
|
||||
help="provide additional debug information")
|
||||
parser.add_option("-f", "--force", dest="force", action="store_true", default=False,
|
||||
help="ignore some types of errors")
|
||||
parser.add_option("-c", "--cleanup", dest="cleanup", action="store_true", default=False,
|
||||
@@ -1235,8 +1237,14 @@ def force_sync(realm, thishost, fromhost, dirman_passwd, nolookup=False):
|
||||
repl = replication.ReplicationManager(realm, thishost, dirman_passwd)
|
||||
repl.force_sync(repl.conn, fromhost)
|
||||
else:
|
||||
ds = dsinstance.DsInstance(realm_name=realm)
|
||||
ds.ldapi = os.getegid() == 0
|
||||
ds.replica_manage_time_skew(prevent=False)
|
||||
repl = replication.ReplicationManager(realm, fromhost, dirman_passwd)
|
||||
repl.force_sync(repl.conn, thishost)
|
||||
agreement = repl.get_replication_agreement(thishost)
|
||||
repl.wait_for_repl_init(repl.conn, agreement.dn)
|
||||
ds.replica_manage_time_skew(prevent=True)
|
||||
|
||||
def show_DNA_ranges(hostname, master, realm, dirman_passwd, nextrange=False,
|
||||
nolookup=False):
|
||||
@@ -1503,18 +1511,9 @@ def main(options, args):
|
||||
elif not os.path.exists(paths.IPA_DEFAULT_CONF):
|
||||
sys.exit("IPA is not configured on this system.")
|
||||
|
||||
# Just initialize the environment. This is so the installer can have
|
||||
# access to the plugin environment
|
||||
api_env = {'in_server' : True,
|
||||
'verbose' : options.verbose,
|
||||
}
|
||||
|
||||
if os.getegid() != 0:
|
||||
api_env['log'] = None # turn off logging for non-root
|
||||
|
||||
api.bootstrap(
|
||||
context='cli', confdir=paths.ETC_IPA,
|
||||
in_server=True, verbose=options.verbose
|
||||
in_server=True, verbose=options.verbose, debug=options.debug
|
||||
)
|
||||
api.finalize()
|
||||
|
||||
|
||||
@@ -32,6 +32,7 @@ from ipaserver.install.installutils import is_ipa_configured, ScriptError
|
||||
from ipalib import api, errors
|
||||
from ipapython.ipaldap import LDAPClient
|
||||
from ipapython.ipautil import wait_for_open_ports, wait_for_open_socket
|
||||
from ipapython.ipautil import run
|
||||
from ipapython import config
|
||||
from ipaplatform.tasks import tasks
|
||||
from ipapython.dn import DN
|
||||
@@ -140,10 +141,22 @@ def version_check():
|
||||
except (installutils.UpgradeMissingVersionError,
|
||||
installutils.UpgradeDataOlderVersionError) as exc:
|
||||
emit_err("IPA version error: %s" % exc)
|
||||
emit_err("Upgrade required: please run ipa-server-upgrade command")
|
||||
raise IpactlError("Aborting ipactl")
|
||||
except installutils.UpgradeVersionError as e:
|
||||
emit_err("IPA version error: %s" % e)
|
||||
else:
|
||||
return
|
||||
|
||||
emit_err("Automatically running upgrade, for details see {}".format(
|
||||
paths.IPAUPGRADE_LOG))
|
||||
emit_err("Be patient, this may take a few minutes.")
|
||||
|
||||
# Fork out to call ipa-server-upgrade so that logging is sane.
|
||||
result = run([paths.IPA_SERVER_UPGRADE], raiseonerr=False,
|
||||
capture_error=True)
|
||||
if result.returncode != 0:
|
||||
emit_err("Automatic upgrade failed: %s" % result.error_output)
|
||||
emit_err("See the upgrade log for more details and/or run {} again".
|
||||
format(paths.IPA_SERVER_UPGRADE))
|
||||
raise IpactlError("Aborting ipactl")
|
||||
|
||||
|
||||
@@ -366,7 +379,16 @@ def ipa_stop(options):
|
||||
|
||||
def ipa_restart(options):
|
||||
if not options.skip_version_check:
|
||||
version_check()
|
||||
try:
|
||||
version_check()
|
||||
except Exception as e:
|
||||
try:
|
||||
ipa_stop(options)
|
||||
except Exception:
|
||||
# We don't care about errors that happened while stopping.
|
||||
# We need to raise the upgrade error.
|
||||
pass
|
||||
raise e
|
||||
else:
|
||||
print("Skipping version check")
|
||||
|
||||
|
||||
@@ -48,7 +48,26 @@ Admin user Kerberos password used for connection check
|
||||
Generate a CSR for the IPA CA certificate to be signed by an external CA.
|
||||
.TP
|
||||
\fB\-\-external\-ca\-type\fR=\fITYPE\fR
|
||||
Type of the external CA. Possible values are "generic", "ms-cs". Default value is "generic". Use "ms-cs" to include template name required by Microsoft Certificate Services (MS CS) in the generated CSR.
|
||||
Type of the external CA. Possible values are "generic", "ms-cs". Default value is "generic". Use "ms-cs" to include the template name required by Microsoft Certificate Services (MS CS) in the generated CSR (see \fB\-\-external\-ca\-profile\fR for full details).
|
||||
|
||||
.TP
|
||||
\fB\-\-external\-ca\-profile\fR=\fIPROFILE_SPEC\fR
|
||||
Specify the certificate profile or template to use at the external CA.
|
||||
|
||||
When \fB\-\-external\-ca\-type\fR is "ms-cs" the following specifiers may be used:
|
||||
|
||||
.RS
|
||||
.TP
|
||||
\fB<oid>:<majorVersion>[:<minorVersion>]\fR
|
||||
Specify a certificate template by OID and major version, optionally also specifying minor version.
|
||||
.TP
|
||||
\fB<name>\fR
|
||||
Specify a certificate template by name. The name cannot contain any \fI:\fR characters and cannot be an OID (otherwise the OID-based template specifier syntax takes precedence).
|
||||
.TP
|
||||
\fBdefault\fR
|
||||
If no template is specified, the template name "SubCA" is used.
|
||||
.RE
|
||||
|
||||
.TP
|
||||
\fB\-\-external\-cert\-file\fR=\fIFILE\fR
|
||||
File containing the IPA CA certificate and the external CA certificate chain. The file is accepted in PEM and DER certificate and PKCS#7 certificate chain formats. This option may be used multiple times.
|
||||
|
||||
@@ -79,7 +79,26 @@ Sign the renewed certificate by itself.
|
||||
Sign the renewed certificate by external CA.
|
||||
.TP
|
||||
\fB\-\-external\-ca\-type\fR=\fITYPE\fR
|
||||
Type of the external CA. Possible values are "generic", "ms-cs". Default value is "generic". Use "ms-cs" to include template name required by Microsoft Certificate Services (MS CS) in the generated CSR.
|
||||
Type of the external CA. Possible values are "generic", "ms-cs". Default value is "generic". Use "ms-cs" to include the template name required by Microsoft Certificate Services (MS CS) in the generated CSR (see \fB\-\-external\-ca\-profile\fR for full details).
|
||||
|
||||
.TP
|
||||
\fB\-\-external\-ca\-profile\fR=\fIPROFILE_SPEC\fR
|
||||
Specify the certificate profile or template to use at the external CA.
|
||||
|
||||
When \fB\-\-external\-ca\-type\fR is "ms-cs" the following specifiers may be used:
|
||||
|
||||
.RS
|
||||
.TP
|
||||
\fB<oid>:<majorVersion>[:<minorVersion>]\fR
|
||||
Specify a certificate template by OID and major version, optionally also specifying minor version.
|
||||
.TP
|
||||
\fB<name>\fR
|
||||
Specify a certificate template by name. The name cannot contain any \fI:\fR characters and cannot be an OID (otherwise the OID-based template specifier syntax takes precedence).
|
||||
.TP
|
||||
\fBdefault\fR
|
||||
If no template is specified, the template name "SubCA" is used.
|
||||
.RE
|
||||
|
||||
.TP
|
||||
\fB\-\-external\-cert\-file\fR=\fIFILE\fR
|
||||
File containing the IPA CA certificate and the external CA certificate chain. The file is accepted in PEM and DER certificate and PKCS#7 certificate chain formats. This option may be used multiple times.
|
||||
|
||||
@@ -40,7 +40,7 @@ Automatically log in to master machine and execute the master machine part of th
|
||||
The Kerberos realm name for the IPA server
|
||||
.TP
|
||||
\fB\-k\fR \fIKDC\fR, \fB\-\-kdc\fR=\fIKDC\fR
|
||||
KDC server address. Defaults t \fIMASTER\fR
|
||||
KDC server address. Defaults to \fIMASTER\fR
|
||||
.TP
|
||||
\fB\-p\fR \fIPRINCIPAL\fR, \fB\-\-principal\fR=\fIPRINCIPAL\fR
|
||||
Authorized Kerberos principal to use to log in to master machine. Defaults to \fIadmin\fR
|
||||
|
||||
@@ -28,7 +28,8 @@ PKCS#12 is a file format used to safely transport SSL certificates and public/pr
|
||||
|
||||
They may be generated and managed using the NSS pk12util command or the OpenSSL pkcs12 command.
|
||||
|
||||
The service(s) are not automatically restarted. In order to use the newly installed certificate(s) you will need to manually restart the Directory and/or Apache servers.
|
||||
The service(s) are not automatically restarted. In order to use the newly installed certificate(s) you will need to manually restart the Directory, Apache and/or Krb5kdc servers.
|
||||
|
||||
.SH "OPTIONS"
|
||||
.TP
|
||||
\fB\-d\fR, \fB\-\-dirsrv\fR
|
||||
|
||||
@@ -87,7 +87,26 @@ The path to LDIF file that will be used to modify configuration of dse.ldif duri
|
||||
Generate a CSR for the IPA CA certificate to be signed by an external CA.
|
||||
.TP
|
||||
\fB\-\-external\-ca\-type\fR=\fITYPE\fR
|
||||
Type of the external CA. Possible values are "generic", "ms-cs". Default value is "generic". Use "ms-cs" to include template name required by Microsoft Certificate Services (MS CS) in the generated CSR.
|
||||
Type of the external CA. Possible values are "generic", "ms-cs". Default value is "generic". Use "ms-cs" to include the template name required by Microsoft Certificate Services (MS CS) in the generated CSR (see \fB\-\-external\-ca\-profile\fR for full details).
|
||||
|
||||
.TP
|
||||
\fB\-\-external\-ca\-profile\fR=\fIPROFILE_SPEC\fR
|
||||
Specify the certificate profile or template to use at the external CA.
|
||||
|
||||
When \fB\-\-external\-ca\-type\fR is "ms-cs" the following specifiers may be used:
|
||||
|
||||
.RS
|
||||
.TP
|
||||
\fB<oid>:<majorVersion>[:<minorVersion>]\fR
|
||||
Specify a certificate template by OID and major version, optionally also specifying minor version.
|
||||
.TP
|
||||
\fB<name>\fR
|
||||
Specify a certificate template by name. The name cannot contain any \fI:\fR characters and cannot be an OID (otherwise the OID-based template specifier syntax takes precedence).
|
||||
.TP
|
||||
\fBdefault\fR
|
||||
If no template is specified, the template name "SubCA" is used.
|
||||
.RE
|
||||
|
||||
.TP
|
||||
\fB\-\-external\-cert\-file\fR=\fIFILE\fR
|
||||
File containing the IPA CA certificate and the external CA certificate chain. The file is accepted in PEM and DER certificate and PKCS#7 certificate chain formats. This option may be used multiple times.
|
||||
|
||||
@@ -101,6 +101,7 @@ return {
|
||||
'cn',
|
||||
{
|
||||
name: 'ipadomainresolutionorder',
|
||||
flags: ['w_if_no_aci'],
|
||||
tooltip: '@mc-opt:idview_mod:ipadomainresolutionorder:doc'
|
||||
},
|
||||
{
|
||||
|
||||
@@ -191,7 +191,7 @@ var IPA = function () {
|
||||
}
|
||||
}));
|
||||
|
||||
batch.add_command(that.get_whoami_command(true));
|
||||
batch.add_command(that.get_whoami_command());
|
||||
|
||||
batch.add_command(rpc.command({
|
||||
method: 'env',
|
||||
@@ -259,25 +259,24 @@ var IPA = function () {
|
||||
/**
|
||||
* Prepares `user-find --whoami` command
|
||||
* @protected
|
||||
* @param {boolean} batch - Specifies if it will be used as single command or
|
||||
* in a batch.
|
||||
*/
|
||||
that.get_whoami_command = function(batch) {
|
||||
that.get_whoami_command = function() {
|
||||
return rpc.command({
|
||||
method: 'whoami',
|
||||
on_success: function(data, text_status, xhr) {
|
||||
that.whoami.metadata = data;
|
||||
that.whoami.metadata = data.result || data;
|
||||
var wa_data = that.whoami.metadata;
|
||||
|
||||
rpc.command({
|
||||
method: data.details || data.command,
|
||||
args: data.arguments,
|
||||
method: wa_data.details || wa_data.command,
|
||||
args: wa_data.arguments,
|
||||
options: function() {
|
||||
var options = data.options || [];
|
||||
var options = wa_data.options || [];
|
||||
$.extend(options, {all: true});
|
||||
return options;
|
||||
}(),
|
||||
on_success: function(data, text_status, xhr) {
|
||||
that.whoami.data = false ? data.result[0] : data.result.result;
|
||||
that.whoami.data = data.result.result;
|
||||
var entity = that.whoami.metadata.object;
|
||||
|
||||
if (entity === 'user') {
|
||||
|
||||
@@ -55,7 +55,7 @@ define(['dojo/_base/declare',
|
||||
|
||||
kerberos_msg: "<i class=\"fa fa-info-circle\"></i> To login with <strong>Kerberos</strong>, please make sure you" +
|
||||
" have valid tickets (obtainable via kinit) and " +
|
||||
"<a href='http://${host}/ipa/config/unauthorized.html'>configured</a>" +
|
||||
"<a href='http://${host}/ipa/config/ssbrowser.html'>configured</a>" +
|
||||
" the browser correctly, then click Login. ",
|
||||
cert_msg: "<i class=\"fa fa-info-circle\"></i> To login with <strong>certificate</strong>," +
|
||||
" please make sure you have valid personal certificate. ",
|
||||
|
||||
@@ -70,8 +70,9 @@ default:cn: fqdn
|
||||
default:ObjectClass: top
|
||||
default:ObjectClass: nsIndex
|
||||
default:nsSystemIndex: false
|
||||
default:nsIndexType: eq
|
||||
default:nsIndexType: pres
|
||||
only:nsIndexType: eq
|
||||
only:nsIndexType: pres
|
||||
only:nsIndexType: sub
|
||||
|
||||
dn: cn=macAddress,cn=index,cn=userRoot,cn=ldbm database,cn=plugins,cn=config
|
||||
default:cn: macAddress
|
||||
@@ -267,3 +268,43 @@ default: objectClass: nsIndex
|
||||
only: nsSystemIndex: false
|
||||
only: nsIndexType: eq
|
||||
only: nsIndexType: sub
|
||||
|
||||
dn: cn=description,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
default: cn: description
|
||||
default: objectclass: top
|
||||
default: objectclass: nsindex
|
||||
default: nssystemindex: false
|
||||
default: nsindextype: eq
|
||||
default: nsindextype: sub
|
||||
|
||||
dn: cn=l,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
default: cn: l
|
||||
default: objectclass: top
|
||||
default: objectclass: nsindex
|
||||
default: nssystemindex: false
|
||||
default: nsindextype: eq
|
||||
default: nsindextype: sub
|
||||
|
||||
dn: cn=nsOsVersion,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
default: cn: nsOsVersion
|
||||
default: objectclass: top
|
||||
default: objectclass: nsindex
|
||||
default: nssystemindex: false
|
||||
default: nsindextype: eq
|
||||
default: nsindextype: sub
|
||||
|
||||
dn: cn=nsHardwarePlatform,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
default: cn: nsHardwarePlatform
|
||||
default: objectclass: top
|
||||
default: objectclass: nsindex
|
||||
default: nssystemindex: false
|
||||
default: nsindextype: eq
|
||||
default: nsindextype: sub
|
||||
|
||||
dn: cn=nsHostLocation,cn=index,cn=userroot,cn=ldbm database,cn=plugins,cn=config
|
||||
default: cn: nsHostLocation
|
||||
default: objectclass: top
|
||||
default: objectclass: nsindex
|
||||
default: nssystemindex: false
|
||||
default: nsindextype: eq
|
||||
default: nsindextype: sub
|
||||
|
||||
@@ -4,7 +4,7 @@ only:nsslapd-pluginEnabled: on
|
||||
# Remember original nsuniqueid for objects referenced from cn=changelog
|
||||
add:nsslapd-attribute: nsuniqueid:targetUniqueId
|
||||
add:nsslapd-changelogmaxage: 2d
|
||||
add:nsslapd-exclude-suffix: o=ipaca
|
||||
add:nsslapd-include-suffix: cn=dns,$SUFFIX
|
||||
|
||||
# Keep memberOf and referential integrity plugins away from cn=changelog.
|
||||
# It is necessary for performance reasons because we don't have appropriate
|
||||
|
||||
14
install/updates/20-whoami.update
Normal file
14
install/updates/20-whoami.update
Normal file
@@ -0,0 +1,14 @@
|
||||
dn: cn=whoami,cn=plugins,cn=config
|
||||
default:objectClass: top
|
||||
default:objectClass: nsSlapdPlugin
|
||||
default:objectClass: extensibleObject
|
||||
default:cn: whoami
|
||||
default:nsslapd-plugin-depends-on-type: database
|
||||
default:nsslapd-pluginDescription: whoami extended operation plugin
|
||||
default:nsslapd-pluginEnabled: on
|
||||
default:nsslapd-pluginId: whoami-plugin
|
||||
default:nsslapd-pluginInitfunc: whoami_init
|
||||
default:nsslapd-pluginPath: libwhoami-plugin
|
||||
default:nsslapd-pluginType: extendedop
|
||||
default:nsslapd-pluginVendor: 389 Project
|
||||
default:nsslapd-pluginVersion: 1.0
|
||||
@@ -24,6 +24,7 @@ app_DATA = \
|
||||
20-idoverride_index.update \
|
||||
20-uuid.update \
|
||||
20-default_password_policy.update \
|
||||
20-whoami.update \
|
||||
21-replicas_container.update \
|
||||
21-ca_renewal_container.update \
|
||||
21-certstore_container.update \
|
||||
|
||||
@@ -12,6 +12,7 @@ import os.path
|
||||
import pipes
|
||||
import subprocess
|
||||
import traceback
|
||||
import codecs
|
||||
|
||||
import pkg_resources
|
||||
|
||||
@@ -395,7 +396,7 @@ class CSRLibraryAdaptor(object):
|
||||
def sign_csr(self, certification_request_info):
|
||||
"""Sign a CertificationRequestInfo.
|
||||
|
||||
Returns: str, a DER-encoded signed CSR.
|
||||
:returns: bytes, a DER-encoded signed CSR.
|
||||
"""
|
||||
raise NotImplementedError('Use a subclass of CSRLibraryAdaptor')
|
||||
|
||||
@@ -406,11 +407,11 @@ class OpenSSLAdaptor(object):
|
||||
self.password_filename = password_filename
|
||||
|
||||
def key(self):
|
||||
with open(self.key_filename, 'r') as key_file:
|
||||
with open(self.key_filename, 'rb') as key_file:
|
||||
key_bytes = key_file.read()
|
||||
password = None
|
||||
if self.password_filename is not None:
|
||||
with open(self.password_filename, 'r') as password_file:
|
||||
with open(self.password_filename, 'rb') as password_file:
|
||||
password = password_file.read().strip()
|
||||
|
||||
key = load_pem_private_key(key_bytes, password, default_backend())
|
||||
@@ -438,7 +439,10 @@ class OpenSSLAdaptor(object):
|
||||
padding.PKCS1v15(),
|
||||
hashes.SHA256()
|
||||
)
|
||||
asn1sig = univ.BitString("'%s'H" % signature.encode('hex'))
|
||||
asn1sig = univ.BitString("'{sig}'H".format(
|
||||
sig=codecs.encode(signature, 'hex')
|
||||
.decode('ascii'))
|
||||
)
|
||||
csr.setComponentByName('signature', asn1sig)
|
||||
return encoder.encode(csr)
|
||||
|
||||
@@ -450,7 +454,7 @@ class NSSAdaptor(object):
|
||||
self.nickname = base64.b32encode(os.urandom(40))
|
||||
|
||||
def get_subject_public_key_info(self):
|
||||
temp_cn = base64.b32encode(os.urandom(40))
|
||||
temp_cn = base64.b32encode(os.urandom(40)).decode('ascii')
|
||||
|
||||
password_args = []
|
||||
if self.password_filename is not None:
|
||||
|
||||
@@ -196,17 +196,18 @@ def _parse_dn_section(subj, dn_sk):
|
||||
|
||||
# Skip past any leading X. X: X, etc to allow for multiple instances
|
||||
for idx, c in enumerate(rdn_type):
|
||||
if c in ':,.':
|
||||
if c in b':,.':
|
||||
if idx+1 < len(rdn_type):
|
||||
rdn_type = rdn_type[idx+1:]
|
||||
break
|
||||
if rdn_type.startswith('+'):
|
||||
if rdn_type.startswith(b'+'):
|
||||
rdn_type = rdn_type[1:]
|
||||
mval = -1
|
||||
else:
|
||||
mval = 0
|
||||
if not X509_NAME_add_entry_by_txt(
|
||||
subj, rdn_type, MBSTRING_UTF8, v.value, -1, -1, mval):
|
||||
subj, rdn_type, MBSTRING_UTF8,
|
||||
_ffi.cast("unsigned char *", v.value), -1, -1, mval):
|
||||
_raise_openssl_errors()
|
||||
|
||||
if not X509_NAME_entry_count(subj):
|
||||
@@ -236,7 +237,7 @@ def build_requestinfo(config, public_key_info):
|
||||
raise errors.CSRTemplateError(
|
||||
reason='Error on line %d of config file' % errorline[0])
|
||||
|
||||
dn_sect = NCONF_get_string(reqdata, 'req', 'distinguished_name')
|
||||
dn_sect = NCONF_get_string(reqdata, b'req', b'distinguished_name')
|
||||
if dn_sect == NULL:
|
||||
raise errors.CSRTemplateError(
|
||||
reason='Unable to find "distinguished_name" key in config')
|
||||
@@ -267,7 +268,7 @@ def build_requestinfo(config, public_key_info):
|
||||
X509V3_set_ctx(ext_ctx, NULL, NULL, req, NULL, 0)
|
||||
X509V3_set_nconf(ext_ctx, reqdata)
|
||||
|
||||
extn_section = NCONF_get_string(reqdata, "req", "req_extensions")
|
||||
extn_section = NCONF_get_string(reqdata, b"req", b"req_extensions")
|
||||
if extn_section != NULL:
|
||||
if not X509V3_EXT_REQ_add_nconf(
|
||||
reqdata, ext_ctx, extn_section, req):
|
||||
|
||||
@@ -54,8 +54,6 @@ from ipapython.install.core import group, knob, extend_knob
|
||||
from ipapython.install.common import step
|
||||
from ipapython.ipautil import (
|
||||
CalledProcessError,
|
||||
dir_exists,
|
||||
file_exists,
|
||||
realm_to_suffix,
|
||||
run,
|
||||
user_input,
|
||||
@@ -192,7 +190,7 @@ def nssldap_exists():
|
||||
for file_type in ['mandatory', 'optional']:
|
||||
try:
|
||||
for filename in function[file_type]:
|
||||
if file_exists(filename):
|
||||
if os.path.isfile(filename):
|
||||
files_found[function['function']].append(filename)
|
||||
if file_type == 'mandatory':
|
||||
retval = True
|
||||
@@ -605,7 +603,7 @@ def hardcode_ldap_server(cli_server):
|
||||
DNS Discovery didn't return a valid IPA server, hardcode a value into
|
||||
the file instead.
|
||||
"""
|
||||
if not file_exists(paths.LDAP_CONF):
|
||||
if not os.path.isfile(paths.LDAP_CONF):
|
||||
return
|
||||
|
||||
ldapconf = IPAChangeConf("IPA Installer")
|
||||
@@ -859,8 +857,8 @@ def configure_sssd_conf(
|
||||
sssd_enable_service(sssdconfig, 'ifp')
|
||||
|
||||
if (
|
||||
(options.conf_ssh and file_exists(paths.SSH_CONFIG)) or
|
||||
(options.conf_sshd and file_exists(paths.SSHD_CONFIG))
|
||||
(options.conf_ssh and os.path.isfile(paths.SSH_CONFIG)) or
|
||||
(options.conf_sshd and os.path.isfile(paths.SSHD_CONFIG))
|
||||
):
|
||||
try:
|
||||
sssdconfig.new_service('ssh')
|
||||
@@ -1032,7 +1030,7 @@ def change_ssh_config(filename, changes, sections):
|
||||
|
||||
|
||||
def configure_ssh_config(fstore, options):
|
||||
if not file_exists(paths.SSH_CONFIG):
|
||||
if not os.path.isfile(paths.SSH_CONFIG):
|
||||
logger.info("%s not found, skipping configuration", paths.SSH_CONFIG)
|
||||
return
|
||||
|
||||
@@ -1040,7 +1038,7 @@ def configure_ssh_config(fstore, options):
|
||||
|
||||
changes = {'PubkeyAuthentication': 'yes'}
|
||||
|
||||
if options.sssd and file_exists(paths.SSS_SSH_KNOWNHOSTSPROXY):
|
||||
if options.sssd and os.path.isfile(paths.SSS_SSH_KNOWNHOSTSPROXY):
|
||||
changes[
|
||||
'ProxyCommand'] = '%s -p %%p %%h' % paths.SSS_SSH_KNOWNHOSTSPROXY
|
||||
changes['GlobalKnownHostsFile'] = paths.SSSD_PUBCONF_KNOWN_HOSTS
|
||||
@@ -1055,7 +1053,7 @@ def configure_ssh_config(fstore, options):
|
||||
def configure_sshd_config(fstore, options):
|
||||
sshd = services.knownservices.sshd
|
||||
|
||||
if not file_exists(paths.SSHD_CONFIG):
|
||||
if not os.path.isfile(paths.SSHD_CONFIG):
|
||||
logger.info("%s not found, skipping configuration", paths.SSHD_CONFIG)
|
||||
return
|
||||
|
||||
@@ -1069,7 +1067,7 @@ def configure_sshd_config(fstore, options):
|
||||
'ChallengeResponseAuthentication': 'yes',
|
||||
}
|
||||
|
||||
if options.sssd and file_exists(paths.SSS_SSH_AUTHORIZEDKEYS):
|
||||
if options.sssd and os.path.isfile(paths.SSS_SSH_AUTHORIZEDKEYS):
|
||||
authorized_keys_changes = None
|
||||
|
||||
candidates = (
|
||||
@@ -1615,7 +1613,7 @@ def get_ca_certs_from_http(url, warn=True):
|
||||
result = run([paths.BIN_CURL, "-o", "-", url], capture_output=True)
|
||||
except CalledProcessError:
|
||||
raise errors.NoCertificateError(entry=url)
|
||||
stdout = result.output
|
||||
stdout = result.raw_output
|
||||
|
||||
try:
|
||||
certs = x509.load_certificate_list(stdout)
|
||||
@@ -1875,19 +1873,19 @@ def configure_firefox(options, statestore, domain):
|
||||
if options.firefox_dir is not None:
|
||||
pref_path = os.path.join(options.firefox_dir,
|
||||
FIREFOX_PREFERENCES_REL_PATH)
|
||||
if dir_exists(pref_path):
|
||||
if os.path.isdir(pref_path):
|
||||
preferences_dir = pref_path
|
||||
else:
|
||||
logger.error("Directory '%s' does not exists.", pref_path)
|
||||
else:
|
||||
# test if firefox is installed
|
||||
if file_exists(paths.FIREFOX):
|
||||
if os.path.isfile(paths.FIREFOX):
|
||||
|
||||
# find valid preferences path
|
||||
for path in [paths.LIB_FIREFOX, paths.LIB64_FIREFOX]:
|
||||
pref_path = os.path.join(path,
|
||||
FIREFOX_PREFERENCES_REL_PATH)
|
||||
if dir_exists(pref_path):
|
||||
if os.path.isdir(pref_path):
|
||||
preferences_dir = pref_path
|
||||
break
|
||||
else:
|
||||
@@ -3105,9 +3103,7 @@ def uninstall(options):
|
||||
"Removing Kerberos service principals from /etc/krb5.keytab")
|
||||
try:
|
||||
parser = RawConfigParser()
|
||||
fp = open(paths.IPA_DEFAULT_CONF, 'r')
|
||||
parser.readfp(fp)
|
||||
fp.close()
|
||||
parser.read(paths.IPA_DEFAULT_CONF)
|
||||
realm = parser.get('global', 'realm')
|
||||
run([paths.IPA_RMKEYTAB, "-k", paths.KRB5_KEYTAB, "-r", realm])
|
||||
except CalledProcessError as err:
|
||||
@@ -3287,7 +3283,7 @@ def uninstall(options):
|
||||
preferences_fname = statestore.restore_state(
|
||||
'firefox', 'preferences_fname')
|
||||
if preferences_fname is not None:
|
||||
if file_exists(preferences_fname):
|
||||
if os.path.isfile(preferences_fname):
|
||||
try:
|
||||
os.remove(preferences_fname)
|
||||
except Exception as e:
|
||||
|
||||
@@ -56,137 +56,148 @@ class CertUpdate(admintool.AdminTool):
|
||||
api.bootstrap(context='cli_installer', confdir=paths.ETC_IPA)
|
||||
api.finalize()
|
||||
|
||||
server = urlsplit(api.env.jsonrpc_uri).hostname
|
||||
ldap_uri = ipaldap.get_ldap_uri(server)
|
||||
ldap = ipaldap.LDAPClient(ldap_uri)
|
||||
api.Backend.rpcclient.connect()
|
||||
run_with_args(api)
|
||||
api.Backend.rpcclient.disconnect()
|
||||
|
||||
|
||||
def run_with_args(api):
|
||||
"""
|
||||
Run the certupdate procedure with the given API object.
|
||||
|
||||
:param api: API object with ldap2/rpcclient backend connected
|
||||
(such that Commands can be invoked)
|
||||
|
||||
"""
|
||||
server = urlsplit(api.env.jsonrpc_uri).hostname
|
||||
ldap_uri = ipaldap.get_ldap_uri(server)
|
||||
ldap = ipaldap.LDAPClient(ldap_uri)
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix="tmp-")
|
||||
ccache_name = os.path.join(tmpdir, 'ccache')
|
||||
old_krb5ccname = os.environ.get('KRB5CCNAME')
|
||||
try:
|
||||
principal = str('host/%s@%s' % (api.env.host, api.env.realm))
|
||||
kinit_keytab(principal, paths.KRB5_KEYTAB, ccache_name)
|
||||
os.environ['KRB5CCNAME'] = ccache_name
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix="tmp-")
|
||||
ccache_name = os.path.join(tmpdir, 'ccache')
|
||||
try:
|
||||
principal = str('host/%s@%s' % (api.env.host, api.env.realm))
|
||||
kinit_keytab(principal, paths.KRB5_KEYTAB, ccache_name)
|
||||
os.environ['KRB5CCNAME'] = ccache_name
|
||||
result = api.Command.ca_is_enabled(version=u'2.107')
|
||||
ca_enabled = result['result']
|
||||
except (errors.CommandError, errors.NetworkError):
|
||||
result = api.Command.env(server=True, version=u'2.0')
|
||||
ca_enabled = result['result']['enable_ra']
|
||||
|
||||
api.Backend.rpcclient.connect()
|
||||
try:
|
||||
result = api.Backend.rpcclient.forward(
|
||||
'ca_is_enabled',
|
||||
version=u'2.107',
|
||||
)
|
||||
ca_enabled = result['result']
|
||||
except (errors.CommandError, errors.NetworkError):
|
||||
result = api.Backend.rpcclient.forward(
|
||||
'env',
|
||||
server=True,
|
||||
version=u'2.0',
|
||||
)
|
||||
ca_enabled = result['result']['enable_ra']
|
||||
ldap.gssapi_bind()
|
||||
|
||||
ldap.gssapi_bind()
|
||||
certs = certstore.get_ca_certs(
|
||||
ldap, api.env.basedn, api.env.realm, ca_enabled)
|
||||
|
||||
certs = certstore.get_ca_certs(ldap, api.env.basedn,
|
||||
api.env.realm, ca_enabled)
|
||||
if ca_enabled:
|
||||
lwcas = api.Command.ca_find()['result']
|
||||
else:
|
||||
lwcas = []
|
||||
|
||||
if ca_enabled:
|
||||
lwcas = api.Command.ca_find()['result']
|
||||
else:
|
||||
lwcas = []
|
||||
finally:
|
||||
if old_krb5ccname is None:
|
||||
del os.environ['KRB5CCNAME']
|
||||
else:
|
||||
os.environ['KRB5CCNAME'] = old_krb5ccname
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
api.Backend.rpcclient.disconnect()
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
server_fstore = sysrestore.FileStore(paths.SYSRESTORE)
|
||||
if server_fstore.has_files():
|
||||
self.update_server(certs)
|
||||
try:
|
||||
# pylint: disable=import-error,ipa-forbidden-import
|
||||
from ipaserver.install import cainstance
|
||||
# pylint: enable=import-error,ipa-forbidden-import
|
||||
cainstance.add_lightweight_ca_tracking_requests(lwcas)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to add lightweight CA tracking requests")
|
||||
|
||||
self.update_client(certs)
|
||||
|
||||
def update_client(self, certs):
|
||||
self.update_file(paths.IPA_CA_CRT, certs)
|
||||
self.update_file(paths.KDC_CA_BUNDLE_PEM, certs)
|
||||
self.update_file(paths.CA_BUNDLE_PEM, certs)
|
||||
|
||||
ipa_db = certdb.NSSDatabase(api.env.nss_dir)
|
||||
|
||||
# Remove old IPA certs from /etc/ipa/nssdb
|
||||
for nickname in ('IPA CA', 'External CA cert'):
|
||||
while ipa_db.has_nickname(nickname):
|
||||
try:
|
||||
ipa_db.delete_cert(nickname)
|
||||
except ipautil.CalledProcessError as e:
|
||||
logger.error("Failed to remove %s from %s: %s",
|
||||
nickname, ipa_db.secdir, e)
|
||||
break
|
||||
|
||||
self.update_db(ipa_db.secdir, certs)
|
||||
|
||||
tasks.remove_ca_certs_from_systemwide_ca_store()
|
||||
tasks.insert_ca_certs_into_systemwide_ca_store(certs)
|
||||
|
||||
def update_server(self, certs):
|
||||
instance = '-'.join(api.env.realm.split('.'))
|
||||
self.update_db(
|
||||
paths.ETC_DIRSRV_SLAPD_INSTANCE_TEMPLATE % instance, certs)
|
||||
if services.knownservices.dirsrv.is_running():
|
||||
services.knownservices.dirsrv.restart(instance)
|
||||
|
||||
self.update_db(paths.HTTPD_ALIAS_DIR, certs)
|
||||
if services.knownservices.httpd.is_running():
|
||||
services.knownservices.httpd.restart()
|
||||
|
||||
criteria = {
|
||||
'cert-database': paths.PKI_TOMCAT_ALIAS_DIR,
|
||||
'cert-nickname': IPA_CA_NICKNAME,
|
||||
'ca-name': RENEWAL_CA_NAME
|
||||
}
|
||||
request_id = certmonger.get_request_id(criteria)
|
||||
if request_id is not None:
|
||||
timeout = api.env.startup_timeout + 60
|
||||
|
||||
logger.debug("resubmitting certmonger request '%s'", request_id)
|
||||
certmonger.resubmit_request(
|
||||
request_id, ca='dogtag-ipa-ca-renew-agent-reuse', profile='')
|
||||
try:
|
||||
state = certmonger.wait_for_request(request_id, timeout)
|
||||
except RuntimeError:
|
||||
raise admintool.ScriptError(
|
||||
"Resubmitting certmonger request '%s' timed out, "
|
||||
"please check the request manually" % request_id)
|
||||
ca_error = certmonger.get_request_value(request_id, 'ca-error')
|
||||
if state != 'MONITORING' or ca_error:
|
||||
raise admintool.ScriptError(
|
||||
"Error resubmitting certmonger request '%s', "
|
||||
"please check the request manually" % request_id)
|
||||
|
||||
logger.debug("modifying certmonger request '%s'", request_id)
|
||||
certmonger.modify(request_id, ca='dogtag-ipa-ca-renew-agent')
|
||||
|
||||
self.update_file(paths.CA_CRT, certs)
|
||||
self.update_file(paths.CACERT_PEM, certs)
|
||||
|
||||
def update_file(self, filename, certs, mode=0o444):
|
||||
certs = (c[0] for c in certs if c[2] is not False)
|
||||
server_fstore = sysrestore.FileStore(paths.SYSRESTORE)
|
||||
if server_fstore.has_files():
|
||||
update_server(certs)
|
||||
try:
|
||||
x509.write_certificate_list(certs, filename)
|
||||
except Exception as e:
|
||||
logger.error("failed to update %s: %s", filename, e)
|
||||
# pylint: disable=import-error,ipa-forbidden-import
|
||||
from ipaserver.install import cainstance
|
||||
# pylint: enable=import-error,ipa-forbidden-import
|
||||
cainstance.add_lightweight_ca_tracking_requests(lwcas)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
"Failed to add lightweight CA tracking requests")
|
||||
|
||||
def update_db(self, path, certs):
|
||||
db = certdb.NSSDatabase(path)
|
||||
for cert, nickname, trusted, eku in certs:
|
||||
trust_flags = certstore.key_policy_to_trust_flags(
|
||||
trusted, True, eku)
|
||||
update_client(certs)
|
||||
|
||||
|
||||
def update_client(certs):
|
||||
update_file(paths.IPA_CA_CRT, certs)
|
||||
update_file(paths.KDC_CA_BUNDLE_PEM, certs)
|
||||
update_file(paths.CA_BUNDLE_PEM, certs)
|
||||
|
||||
ipa_db = certdb.NSSDatabase(api.env.nss_dir)
|
||||
|
||||
# Remove old IPA certs from /etc/ipa/nssdb
|
||||
for nickname in ('IPA CA', 'External CA cert'):
|
||||
while ipa_db.has_nickname(nickname):
|
||||
try:
|
||||
db.add_cert(cert, nickname, trust_flags)
|
||||
ipa_db.delete_cert(nickname)
|
||||
except ipautil.CalledProcessError as e:
|
||||
logger.error(
|
||||
"failed to update %s in %s: %s", nickname, path, e)
|
||||
"Failed to remove %s from %s: %s",
|
||||
nickname, ipa_db.secdir, e)
|
||||
break
|
||||
|
||||
update_db(ipa_db.secdir, certs)
|
||||
|
||||
tasks.remove_ca_certs_from_systemwide_ca_store()
|
||||
tasks.insert_ca_certs_into_systemwide_ca_store(certs)
|
||||
|
||||
|
||||
def update_server(certs):
|
||||
instance = '-'.join(api.env.realm.split('.'))
|
||||
update_db(paths.ETC_DIRSRV_SLAPD_INSTANCE_TEMPLATE % instance, certs)
|
||||
if services.knownservices.dirsrv.is_running():
|
||||
services.knownservices.dirsrv.restart(instance)
|
||||
|
||||
update_db(paths.HTTPD_ALIAS_DIR, certs)
|
||||
if services.knownservices.httpd.is_running():
|
||||
services.knownservices.httpd.restart()
|
||||
|
||||
criteria = {
|
||||
'cert-database': paths.PKI_TOMCAT_ALIAS_DIR,
|
||||
'cert-nickname': IPA_CA_NICKNAME,
|
||||
'ca-name': RENEWAL_CA_NAME,
|
||||
}
|
||||
request_id = certmonger.get_request_id(criteria)
|
||||
if request_id is not None:
|
||||
timeout = api.env.startup_timeout + 60
|
||||
|
||||
logger.debug("resubmitting certmonger request '%s'", request_id)
|
||||
certmonger.resubmit_request(
|
||||
request_id, ca='dogtag-ipa-ca-renew-agent-reuse', profile='')
|
||||
try:
|
||||
state = certmonger.wait_for_request(request_id, timeout)
|
||||
except RuntimeError:
|
||||
raise admintool.ScriptError(
|
||||
"Resubmitting certmonger request '%s' timed out, "
|
||||
"please check the request manually" % request_id)
|
||||
ca_error = certmonger.get_request_value(request_id, 'ca-error')
|
||||
if state != 'MONITORING' or ca_error:
|
||||
raise admintool.ScriptError(
|
||||
"Error resubmitting certmonger request '%s', "
|
||||
"please check the request manually" % request_id)
|
||||
|
||||
logger.debug("modifying certmonger request '%s'", request_id)
|
||||
certmonger.modify(request_id, ca='dogtag-ipa-ca-renew-agent')
|
||||
|
||||
update_file(paths.CA_CRT, certs)
|
||||
update_file(paths.CACERT_PEM, certs)
|
||||
|
||||
|
||||
def update_file(filename, certs, mode=0o444):
|
||||
certs = (c[0] for c in certs if c[2] is not False)
|
||||
try:
|
||||
x509.write_certificate_list(certs, filename)
|
||||
except Exception as e:
|
||||
logger.error("failed to update %s: %s", filename, e)
|
||||
|
||||
|
||||
def update_db(path, certs):
|
||||
db = certdb.NSSDatabase(path)
|
||||
for cert, nickname, trusted, eku in certs:
|
||||
trust_flags = certstore.key_policy_to_trust_flags(trusted, True, eku)
|
||||
try:
|
||||
db.add_cert(cert, nickname, trust_flags)
|
||||
except ipautil.CalledProcessError as e:
|
||||
logger.error("failed to update %s in %s: %s", nickname, path, e)
|
||||
|
||||
@@ -132,7 +132,7 @@ class cert_request(CertRetrieveOverride):
|
||||
response = self.api.Command.cert_get_requestdata(
|
||||
profile_id=profile_id,
|
||||
principal=options.get('principal'),
|
||||
public_key_info=unicode(pubkey_info_b64))
|
||||
public_key_info=pubkey_info_b64)
|
||||
|
||||
req_info_b64 = response['result']['request_info']
|
||||
req_info = base64.b64decode(req_info_b64)
|
||||
@@ -143,9 +143,6 @@ class cert_request(CertRetrieveOverride):
|
||||
raise errors.CertificateOperationError(
|
||||
error=(_('Generated CSR was empty')))
|
||||
|
||||
# cert_request requires the CSR to be base64-encoded (but PEM
|
||||
# header and footer are not required)
|
||||
csr = unicode(base64.b64encode(csr))
|
||||
else:
|
||||
if database is not None or private_key is not None:
|
||||
raise errors.MutuallyExclusiveError(reason=_(
|
||||
@@ -209,6 +206,7 @@ class cert_find(MethodOverride):
|
||||
raise errors.MutuallyExclusiveError(
|
||||
reason=_("cannot specify both raw certificate and file"))
|
||||
if 'certificate' not in options and 'file' in options:
|
||||
options['certificate'] = x509.strip_header(options.pop('file'))
|
||||
options['certificate'] = x509.load_unknown_x509_certificate(
|
||||
options.pop('file'))
|
||||
|
||||
return super(cert_find, self).forward(*args, **options)
|
||||
|
||||
@@ -40,7 +40,7 @@ class certmap_match(MethodOverride):
|
||||
raise errors.MutuallyExclusiveError(
|
||||
reason=_("cannot specify both raw certificate and file"))
|
||||
if args:
|
||||
args = [x509.strip_header(args[0])]
|
||||
args = [x509.load_unknown_x509_certificate(args[0])]
|
||||
elif 'certificate' in options:
|
||||
args = [options.pop('certificate')]
|
||||
else:
|
||||
|
||||
@@ -11,7 +11,7 @@ from ipalib import errors
|
||||
from ipalib import output
|
||||
from ipalib import util
|
||||
from ipalib.frontend import Local, Str
|
||||
from ipalib.parameters import File, Principal
|
||||
from ipalib.parameters import Bytes, Principal
|
||||
from ipalib.plugable import Registry
|
||||
from ipalib.text import _
|
||||
from ipapython import dogtag
|
||||
@@ -52,7 +52,7 @@ class cert_get_requestdata(Local):
|
||||
label=_('Profile ID'),
|
||||
doc=_('CSR Generation Profile to use'),
|
||||
),
|
||||
File(
|
||||
Bytes(
|
||||
'public_key_info',
|
||||
label=_('Subject Public Key Info'),
|
||||
doc=_('DER-encoded SubjectPublicKeyInfo structure'),
|
||||
|
||||
@@ -62,7 +62,7 @@ def __get_part_param(rrtype, cmd, part, output_kw, default=None):
|
||||
def prompt_parts(rrtype, cmd, mod_dnsvalue=None):
|
||||
mod_parts = None
|
||||
if mod_dnsvalue is not None:
|
||||
name = record_name_format % rrtype.lower()
|
||||
name = record_name_format % unicode(rrtype.lower())
|
||||
mod_parts = cmd.api.Command.dnsrecord_split_parts(
|
||||
name, mod_dnsvalue)['result']
|
||||
|
||||
|
||||
@@ -586,7 +586,7 @@ def get_package(server_info, client):
|
||||
for plugin_cls in (_SchemaCommandPlugin, _SchemaObjectPlugin):
|
||||
for full_name in schema[plugin_cls.schema_key]:
|
||||
plugin = plugin_cls(schema, str(full_name))
|
||||
plugin = module.register()(plugin)
|
||||
plugin = module.register()(plugin) # pylint: disable=no-member
|
||||
sys.modules[module_name] = module
|
||||
|
||||
for full_name, topic in six.iteritems(schema['topics']):
|
||||
|
||||
@@ -46,6 +46,8 @@ class ACI(object):
|
||||
entry in LDAP. Has methods to parse an ACI string and export to an
|
||||
ACI String.
|
||||
"""
|
||||
__hash__ = None
|
||||
|
||||
def __init__(self,acistr=None):
|
||||
self.name = None
|
||||
self.source_group = None
|
||||
|
||||
107
ipalib/cli.py
107
ipalib/cli.py
@@ -38,6 +38,10 @@ import traceback
|
||||
import six
|
||||
from six.moves import input
|
||||
|
||||
from ipalib.util import (
|
||||
check_client_configuration, get_terminal_height, open_in_pager
|
||||
)
|
||||
|
||||
if six.PY3:
|
||||
unicode = str
|
||||
|
||||
@@ -55,7 +59,6 @@ from ipalib.constants import CLI_TAB, LDAP_GENERALIZED_TIME_FORMAT
|
||||
from ipalib.parameters import File, Str, Enum, Any, Flag
|
||||
from ipalib.text import _
|
||||
from ipalib import api # pylint: disable=unused-import
|
||||
from ipalib.util import check_client_configuration
|
||||
from ipapython.dnsutil import DNSName
|
||||
from ipapython.admintool import ScriptError
|
||||
|
||||
@@ -680,10 +683,39 @@ class textui(backend.Backend):
|
||||
self.print_line('')
|
||||
return selection
|
||||
|
||||
|
||||
class help(frontend.Local):
|
||||
"""
|
||||
Display help for a command or topic.
|
||||
"""
|
||||
class Writer(object):
|
||||
"""
|
||||
Writer abstraction
|
||||
"""
|
||||
def __init__(self, outfile):
|
||||
self.outfile = outfile
|
||||
self.buffer = []
|
||||
|
||||
@property
|
||||
def buffer_length(self):
|
||||
length = 0
|
||||
for line in self.buffer:
|
||||
length += len(line.split("\n"))
|
||||
return length
|
||||
|
||||
def append(self, string=u""):
|
||||
self.buffer.append(unicode(string))
|
||||
|
||||
def write(self):
|
||||
if self.buffer_length > get_terminal_height():
|
||||
data = "\n".join(self.buffer).encode("utf-8")
|
||||
open_in_pager(data)
|
||||
else:
|
||||
try:
|
||||
for line in self.buffer:
|
||||
print(line, file=self.outfile)
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
takes_args = (
|
||||
Str('command?', cli_name='topic', label=_('Topic or Command'),
|
||||
@@ -702,7 +734,7 @@ class help(frontend.Local):
|
||||
parent_topic = None
|
||||
|
||||
for package in self.api.packages:
|
||||
module_name = '%s.%s' % (package.__name__, topic)
|
||||
module_name = '{0}.{1}'.format(package.__name__, topic)
|
||||
try:
|
||||
module = sys.modules[module_name]
|
||||
except KeyError:
|
||||
@@ -725,7 +757,8 @@ class help(frontend.Local):
|
||||
self._topics[topic_name][1] = mcl
|
||||
|
||||
def _on_finalize(self):
|
||||
# {topic: ["description", mcl, {"subtopic": ["description", mcl, [commands]]}]}
|
||||
# {topic: ["description", mcl, {
|
||||
# "subtopic": ["description", mcl, [commands]]}]}
|
||||
# {topic: ["description", mcl, [commands]]}
|
||||
self._topics = {}
|
||||
# [builtin_commands]
|
||||
@@ -749,22 +782,26 @@ class help(frontend.Local):
|
||||
self._topics[topic_name] = [doc, 0, [c]]
|
||||
mcl = max((self._topics[topic_name][1], len(c.name)))
|
||||
self._topics[topic_name][1] = mcl
|
||||
else: # a module grouped in a topic
|
||||
else: # a module grouped in a topic
|
||||
topic = self._get_topic(topic_name)
|
||||
mod_name = c.topic
|
||||
if topic_name in self._topics:
|
||||
if mod_name in self._topics[topic_name][2]:
|
||||
self._topics[topic_name][2][mod_name][2].append(c)
|
||||
else:
|
||||
self._topics[topic_name][2][mod_name] = [doc, 0, [c]]
|
||||
self._topics[topic_name][2][mod_name] = [
|
||||
doc, 0, [c]]
|
||||
self._count_topic_mcl(topic_name, mod_name)
|
||||
# count mcl for for the subtopic
|
||||
mcl = max((self._topics[topic_name][2][mod_name][1], len(c.name)))
|
||||
mcl = max((
|
||||
self._topics[topic_name][2][mod_name][1],
|
||||
len(c.name)))
|
||||
self._topics[topic_name][2][mod_name][1] = mcl
|
||||
else:
|
||||
self._topics[topic_name] = [topic[0].split('\n', 1)[0],
|
||||
0,
|
||||
{mod_name: [doc, 0, [c]]}]
|
||||
self._topics[topic_name] = [
|
||||
topic[0].split('\n', 1)[0],
|
||||
0,
|
||||
{mod_name: [doc, 0, [c]]}]
|
||||
self._count_topic_mcl(topic_name, mod_name)
|
||||
else:
|
||||
self._builtins.append(c)
|
||||
@@ -778,8 +815,10 @@ class help(frontend.Local):
|
||||
def run(self, key=None, outfile=None, **options):
|
||||
if outfile is None:
|
||||
outfile = sys.stdout
|
||||
writer = self._writer(outfile)
|
||||
|
||||
writer = self.Writer(outfile)
|
||||
name = from_cli(key)
|
||||
|
||||
if key is None:
|
||||
self.api.parser.print_help(outfile)
|
||||
return
|
||||
@@ -804,33 +843,30 @@ class help(frontend.Local):
|
||||
if cmd_plugin.NO_CLI:
|
||||
continue
|
||||
mcl = max(mcl, len(cmd_plugin.name))
|
||||
writer('%s %s' % (to_cli(cmd_plugin.name).ljust(mcl),
|
||||
cmd_plugin.summary))
|
||||
writer.append('{0} {1}'.format(
|
||||
to_cli(cmd_plugin.name).ljust(mcl), cmd_plugin.summary))
|
||||
else:
|
||||
raise HelpError(topic=name)
|
||||
|
||||
def _writer(self, outfile):
|
||||
def writer(string=''):
|
||||
try:
|
||||
print(unicode(string), file=outfile)
|
||||
except IOError:
|
||||
pass
|
||||
return writer
|
||||
writer.write()
|
||||
|
||||
def print_topics(self, outfile):
|
||||
writer = self._writer(outfile)
|
||||
writer = self.Writer(outfile)
|
||||
|
||||
for t, topic in sorted(self._topics.items()):
|
||||
writer('%s %s' % (to_cli(t).ljust(self._mtl), topic[0]))
|
||||
writer.append('{0} {1}'.format(
|
||||
to_cli(t).ljust(self._mtl), topic[0]))
|
||||
writer.write()
|
||||
|
||||
def print_commands(self, topic, outfile):
|
||||
writer = self._writer(outfile)
|
||||
writer = self.Writer(outfile)
|
||||
|
||||
if topic in self._topics and type(self._topics[topic][2]) is dict:
|
||||
# we want to display topic which has subtopics
|
||||
for subtopic in self._topics[topic][2]:
|
||||
doc = self._topics[topic][2][subtopic][0]
|
||||
mcl = self._topics[topic][1]
|
||||
writer(' %s %s' % (to_cli(subtopic).ljust(mcl), doc))
|
||||
writer.append(' {0} {1}'.format(
|
||||
to_cli(subtopic).ljust(mcl), doc))
|
||||
else:
|
||||
# we want to display subtopic or a topic which has no subtopics
|
||||
if topic in self._topics:
|
||||
@@ -852,17 +888,20 @@ class help(frontend.Local):
|
||||
if topic not in self.Command and len(commands) == 0:
|
||||
raise HelpError(topic=topic)
|
||||
|
||||
writer(doc)
|
||||
writer.append(doc)
|
||||
if commands:
|
||||
writer()
|
||||
writer(_('Topic commands:'))
|
||||
writer.append()
|
||||
writer.append(_('Topic commands:'))
|
||||
for c in commands:
|
||||
writer(
|
||||
' %s %s' % (to_cli(c.name).ljust(mcl), c.summary))
|
||||
writer()
|
||||
writer(_('To get command help, use:'))
|
||||
writer(_(' ipa <command> --help'))
|
||||
writer()
|
||||
writer.append(
|
||||
' {0} {1}'.format(
|
||||
to_cli(c.name).ljust(mcl), c.summary))
|
||||
writer.append()
|
||||
writer.append(_('To get command help, use:'))
|
||||
writer.append(_(' ipa <command> --help'))
|
||||
writer.append()
|
||||
writer.write()
|
||||
|
||||
|
||||
class show_mappings(frontend.Command):
|
||||
"""
|
||||
@@ -999,7 +1038,9 @@ class Collector(object):
|
||||
value = v + (value,)
|
||||
else:
|
||||
value = (v, value)
|
||||
# pylint: disable=unsupported-assignment-operation
|
||||
self.__options[name] = value
|
||||
# pylint: enable=unsupported-assignment-operation
|
||||
object.__setattr__(self, name, value)
|
||||
|
||||
def __todict__(self):
|
||||
|
||||
@@ -28,6 +28,7 @@ of the process.
|
||||
|
||||
For the per-request thread-local information, see `ipalib.request`.
|
||||
"""
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
from os import path
|
||||
@@ -39,6 +40,7 @@ from six.moves.urllib.parse import urlparse, urlunparse
|
||||
from six.moves.configparser import RawConfigParser, ParsingError
|
||||
# pylint: enable=import-error
|
||||
|
||||
from ipaplatform.tasks import tasks
|
||||
from ipapython.dn import DN
|
||||
from ipalib.base import check_name
|
||||
from ipalib.constants import (
|
||||
@@ -47,12 +49,6 @@ from ipalib.constants import (
|
||||
TLS_VERSIONS
|
||||
)
|
||||
from ipalib import errors
|
||||
try:
|
||||
# pylint: disable=ipa-forbidden-import
|
||||
from ipaplatform.tasks import tasks
|
||||
# pylint: enable=ipa-forbidden-import
|
||||
except ImportError:
|
||||
tasks = None
|
||||
|
||||
if six.PY3:
|
||||
unicode = str
|
||||
@@ -273,7 +269,9 @@ class Env(object):
|
||||
if type(value) not in (unicode, int, float, bool, type(None), DN):
|
||||
raise TypeError(key, value)
|
||||
object.__setattr__(self, key, value)
|
||||
# pylint: disable=unsupported-assignment-operation
|
||||
self.__d[key] = value
|
||||
# pylint: enable=unsupported-assignment-operation
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""
|
||||
@@ -449,10 +447,7 @@ class Env(object):
|
||||
self.script = path.abspath(sys.argv[0])
|
||||
self.bin = path.dirname(self.script)
|
||||
self.home = os.environ.get('HOME', None)
|
||||
|
||||
# Set fips_mode only if ipaplatform module was loaded
|
||||
if tasks is not None:
|
||||
self.fips_mode = tasks.is_fips_enabled()
|
||||
self.fips_mode = tasks.is_fips_enabled()
|
||||
|
||||
# Merge in overrides:
|
||||
self._merge(**overrides)
|
||||
|
||||
@@ -286,6 +286,9 @@ IPA_CA_CN = u'ipa'
|
||||
IPA_CA_RECORD = "ipa-ca"
|
||||
IPA_CA_NICKNAME = 'caSigningCert cert-pki-ca'
|
||||
RENEWAL_CA_NAME = 'dogtag-ipa-ca-renew-agent'
|
||||
RENEWAL_REUSE_CA_NAME = 'dogtag-ipa-ca-renew-agent-reuse'
|
||||
# How long dbus clients should wait for CA certificate RPCs [seconds]
|
||||
CA_DBUS_TIMEOUT = 120
|
||||
|
||||
# regexp definitions
|
||||
PATTERN_GROUPUSER_NAME = '^[a-zA-Z0-9_.][a-zA-Z0-9_.-]*[a-zA-Z0-9_.$-]?$'
|
||||
|
||||
@@ -369,7 +369,7 @@ class ServerCommandError(PublicError):
|
||||
For example:
|
||||
|
||||
>>> e = CommandError(name='foobar')
|
||||
>>> raise ServerCommandError(error=e.message, server='https://localhost')
|
||||
>>> raise ServerCommandError(error=str(e), server='https://localhost')
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
ServerCommandError: error on server 'https://localhost': unknown command 'foobar'
|
||||
@@ -452,6 +452,18 @@ class EnvironmentError(PublicError):
|
||||
|
||||
errno = 912
|
||||
|
||||
|
||||
class SystemEncodingError(PublicError):
|
||||
"""
|
||||
**913** Raised when system encoding is not UTF-8
|
||||
"""
|
||||
|
||||
errno = 913
|
||||
format = _(
|
||||
"System encoding must be UTF-8, '%(encoding)s' is not supported. "
|
||||
"Set LC_ALL=\"C.UTF-8\", or LC_ALL=\"\" and LC_CTYPE=\"C.UTF-8\"."
|
||||
)
|
||||
|
||||
##############################################################################
|
||||
# 1000 - 1999: Authentication errors
|
||||
class AuthenticationError(PublicError):
|
||||
|
||||
@@ -32,6 +32,7 @@ import shlex
|
||||
import subprocess
|
||||
import tempfile
|
||||
from ipalib import api
|
||||
from ipalib.constants import CA_DBUS_TIMEOUT
|
||||
from ipapython.dn import DN
|
||||
from ipaplatform.paths import paths
|
||||
from ipaplatform import services
|
||||
@@ -507,37 +508,53 @@ def stop_tracking(secdir=None, request_id=None, nickname=None, certfile=None):
|
||||
request.parent.obj_if.remove_request(request.path)
|
||||
|
||||
|
||||
def modify(request_id, ca=None, profile=None):
|
||||
if ca or profile:
|
||||
def modify(request_id, ca=None, profile=None, template_v2=None):
|
||||
update = {}
|
||||
if ca is not None:
|
||||
cm = _certmonger()
|
||||
update['CA'] = cm.obj_if.find_ca_by_nickname(ca)
|
||||
if profile is not None:
|
||||
update['template-profile'] = profile
|
||||
if template_v2 is not None:
|
||||
update['template-ms-certificate-template'] = template_v2
|
||||
|
||||
if len(update) > 0:
|
||||
request = _get_request({'nickname': request_id})
|
||||
request.obj_if.modify(update)
|
||||
|
||||
|
||||
def resubmit_request(
|
||||
request_id,
|
||||
ca=None,
|
||||
profile=None,
|
||||
template_v2=None,
|
||||
is_ca=False):
|
||||
"""
|
||||
:param request_id: the certmonger numeric request ID
|
||||
:param ca: the nickname for the certmonger CA, e.g. IPA or SelfSign
|
||||
:param profile: the profile to use, e.g. SubCA. For requests using the
|
||||
Dogtag CA, this is the profile to use. This also causes
|
||||
the Microsoft certificate tempalte name extension to the
|
||||
CSR (for telling AD CS what template to use).
|
||||
:param template_v2: Microsoft V2 template specifier extension value.
|
||||
Format: <oid>:<major-version>[:<minor-version>]
|
||||
:param is_ca: boolean that if True adds the CA basic constraint
|
||||
"""
|
||||
request = _get_request({'nickname': request_id})
|
||||
if request:
|
||||
update = {}
|
||||
if ca is not None:
|
||||
cm = _certmonger()
|
||||
update['CA'] = cm.obj_if.find_ca_by_nickname(ca)
|
||||
if profile is not None:
|
||||
update['template-profile'] = profile
|
||||
request.obj_if.modify(update)
|
||||
if template_v2 is not None:
|
||||
update['template-ms-certificate-template'] = template_v2
|
||||
if is_ca:
|
||||
update['template-is-ca'] = True
|
||||
update['template-ca-path-length'] = -1 # no path length
|
||||
|
||||
|
||||
def resubmit_request(request_id, ca=None, profile=None, is_ca=False):
|
||||
"""
|
||||
:param request_id: the certmonger numeric request ID
|
||||
:param ca: the nickname for the certmonger CA, e.g. IPA or SelfSign
|
||||
:param profile: the dogtag template profile to use, e.g. SubCA
|
||||
:param is_ca: boolean that if True adds the CA basic constraint
|
||||
"""
|
||||
request = _get_request({'nickname': request_id})
|
||||
if request:
|
||||
if ca or profile or is_ca:
|
||||
update = {}
|
||||
if ca is not None:
|
||||
cm = _certmonger()
|
||||
update['CA'] = cm.obj_if.find_ca_by_nickname(ca)
|
||||
if profile is not None:
|
||||
update['template-profile'] = profile
|
||||
if is_ca:
|
||||
update['template-is-ca'] = True
|
||||
update['template-ca-path-length'] = -1 # no path length
|
||||
if len(update) > 0:
|
||||
request.obj_if.modify(update)
|
||||
request.obj_if.resubmit()
|
||||
|
||||
@@ -604,7 +621,9 @@ def modify_ca_helper(ca_name, helper):
|
||||
old_helper = ca_iface.Get('org.fedorahosted.certmonger.ca',
|
||||
'external-helper')
|
||||
ca_iface.Set('org.fedorahosted.certmonger.ca',
|
||||
'external-helper', helper)
|
||||
'external-helper', helper,
|
||||
# Give dogtag extra time to generate cert
|
||||
timeout=CA_DBUS_TIMEOUT)
|
||||
return old_helper
|
||||
|
||||
|
||||
|
||||
@@ -129,11 +129,6 @@ class PublicMessage(UserWarning):
|
||||
data=self.kw,
|
||||
)
|
||||
|
||||
if six.PY3:
|
||||
@property
|
||||
def message(self):
|
||||
return str(self)
|
||||
|
||||
|
||||
class VersionMissing(PublicMessage):
|
||||
"""
|
||||
|
||||
@@ -115,12 +115,15 @@ from ipalib.base import check_name
|
||||
from ipalib.plugable import ReadOnly, lock
|
||||
from ipalib.errors import ConversionError, RequirementError, ValidationError
|
||||
from ipalib.errors import (
|
||||
PasswordMismatch, Base64DecodeError, CertificateFormatError
|
||||
PasswordMismatch, Base64DecodeError, CertificateFormatError,
|
||||
CertificateOperationError
|
||||
)
|
||||
from ipalib.constants import TYPE_ERROR, CALLABLE_ERROR, LDAP_GENERALIZED_TIME_FORMAT
|
||||
from ipalib.text import Gettext, FixMe
|
||||
from ipalib.util import json_serialize, validate_idna_domain
|
||||
from ipalib.x509 import load_der_x509_certificate, IPACertificate
|
||||
from ipalib.x509 import (
|
||||
load_der_x509_certificate, IPACertificate, default_backend)
|
||||
from ipalib.util import strip_csr_header
|
||||
from ipapython import kerberos
|
||||
from ipapython.dn import DN
|
||||
from ipapython.dnsutil import DNSName
|
||||
@@ -848,8 +851,10 @@ class Param(ReadOnly):
|
||||
"""
|
||||
Convert a single scalar value.
|
||||
"""
|
||||
if type(value) in self.allowed_types:
|
||||
return value
|
||||
for t in self.allowed_types:
|
||||
if isinstance(value, t):
|
||||
return value
|
||||
|
||||
raise ConversionError(name=self.name, error=ugettext(self.type_error))
|
||||
|
||||
def validate(self, value, supplied=None):
|
||||
@@ -879,7 +884,10 @@ class Param(ReadOnly):
|
||||
self._validate_scalar(value)
|
||||
|
||||
def _validate_scalar(self, value, index=None):
|
||||
if type(value) not in self.allowed_types:
|
||||
for t in self.allowed_types:
|
||||
if isinstance(value, t):
|
||||
break
|
||||
else:
|
||||
raise TypeError(
|
||||
TYPE_ERROR % (self.name, self.type, value, type(value))
|
||||
)
|
||||
@@ -1227,7 +1235,7 @@ class Decimal(Number):
|
||||
def _enforce_precision(self, value):
|
||||
assert type(value) is decimal.Decimal
|
||||
if self.precision is not None:
|
||||
quantize_exp = decimal.Decimal(10) ** -self.precision
|
||||
quantize_exp = decimal.Decimal(10) ** -int(self.precision)
|
||||
try:
|
||||
value = value.quantize(quantize_exp)
|
||||
except decimal.DecimalException as e:
|
||||
@@ -1447,6 +1455,60 @@ class Certificate(Param):
|
||||
return super(Certificate, self)._convert_scalar(value)
|
||||
|
||||
|
||||
class CertificateSigningRequest(Param):
|
||||
type = crypto_x509.CertificateSigningRequest
|
||||
type_error = _('must be a certificate signing request')
|
||||
allowed_types = (crypto_x509.CertificateSigningRequest, bytes, unicode)
|
||||
|
||||
def __extract_der_from_input(self, value):
|
||||
"""
|
||||
Tries to get the DER representation of whatever we receive as an input
|
||||
|
||||
:param value:
|
||||
bytes instance containing something we hope is a certificate
|
||||
signing request
|
||||
:returns:
|
||||
base64-decoded representation of whatever we found in case input
|
||||
had been something else than DER or something which resembles
|
||||
DER, in which case we would just return input
|
||||
"""
|
||||
try:
|
||||
value.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
# possibly DER-encoded CSR or something similar
|
||||
return value
|
||||
|
||||
value = strip_csr_header(value)
|
||||
return base64.b64decode(value)
|
||||
|
||||
def _convert_scalar(self, value, index=None):
|
||||
"""
|
||||
:param value:
|
||||
either DER csr, base64-encoded csr or an object implementing the
|
||||
cryptography.CertificateSigningRequest interface
|
||||
:returns:
|
||||
an object with the cryptography.CertificateSigningRequest interface
|
||||
"""
|
||||
if isinstance(value, unicode):
|
||||
try:
|
||||
value = value.encode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
raise CertificateOperationError('not a valid CSR')
|
||||
|
||||
if isinstance(value, bytes):
|
||||
# try to extract DER from whatever we got
|
||||
value = self.__extract_der_from_input(value)
|
||||
try:
|
||||
value = crypto_x509.load_der_x509_csr(
|
||||
value, backend=default_backend())
|
||||
except ValueError as e:
|
||||
raise CertificateOperationError(
|
||||
error=_("Failure decoding Certificate Signing Request:"
|
||||
" %s") % e)
|
||||
|
||||
return super(CertificateSigningRequest, self)._convert_scalar(value)
|
||||
|
||||
|
||||
class Str(Data):
|
||||
"""
|
||||
A parameter for Unicode text (stored in the ``unicode`` type).
|
||||
|
||||
@@ -1,56 +1,8 @@
|
||||
# Authors:
|
||||
# Rob Crittenden <rcritten@redhat.com>
|
||||
#
|
||||
# Copyright (C) 2010 Red Hat
|
||||
# see file 'COPYING' for use and warranty information
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
|
||||
import binascii
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
import cryptography.x509
|
||||
|
||||
|
||||
def strip_header(csr):
|
||||
"""
|
||||
Remove the header and footer (and surrounding material) from a CSR.
|
||||
"""
|
||||
headerlen = 40
|
||||
s = csr.find("-----BEGIN NEW CERTIFICATE REQUEST-----")
|
||||
if s == -1:
|
||||
headerlen = 36
|
||||
s = csr.find("-----BEGIN CERTIFICATE REQUEST-----")
|
||||
if s >= 0:
|
||||
e = csr.find("-----END")
|
||||
csr = csr[s+headerlen:e]
|
||||
|
||||
return csr
|
||||
|
||||
|
||||
def load_certificate_request(data):
|
||||
"""
|
||||
Load a PEM or base64-encoded PKCS #10 certificate request.
|
||||
|
||||
:return: a python-cryptography ``Certificate`` object.
|
||||
:raises: ``ValueError`` if unable to load the request
|
||||
|
||||
"""
|
||||
data = strip_header(data)
|
||||
try:
|
||||
data = binascii.a2b_base64(data)
|
||||
except binascii.Error as e:
|
||||
raise ValueError(e)
|
||||
return cryptography.x509.load_der_x509_csr(data, default_backend())
|
||||
print(
|
||||
"ipalib.pkcs10 module is deprecated and will be removed in FreeIPA 4.6. "
|
||||
"To load CSRs, please, use python-cryptography instead.",
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
@@ -485,6 +485,11 @@ class API(ReadOnly):
|
||||
handler.setFormatter(ipa_log_manager.Formatter(LOGGING_FORMAT_STDERR))
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
# check after logging is set up but before we create files.
|
||||
fse = sys.getfilesystemencoding()
|
||||
if fse.lower() not in {'utf-8', 'utf8'}:
|
||||
raise errors.SystemEncodingError(encoding=fse)
|
||||
|
||||
# Add file handler:
|
||||
if self.env.mode in ('dummy', 'unit_test'):
|
||||
return # But not if in unit-test mode
|
||||
@@ -515,7 +520,7 @@ class API(ReadOnly):
|
||||
Add global options to an optparse.OptionParser instance.
|
||||
"""
|
||||
def config_file_callback(option, opt, value, parser):
|
||||
if not ipautil.file_exists(value):
|
||||
if not os.path.isfile(value):
|
||||
parser.error(
|
||||
_("%(filename)s: file not found") % dict(filename=value))
|
||||
|
||||
|
||||
@@ -197,6 +197,10 @@ def xml_wrap(value, version):
|
||||
return base64.b64encode(
|
||||
value.public_bytes(x509_Encoding.DER)).decode('ascii')
|
||||
|
||||
if isinstance(value, crypto_x509.CertificateSigningRequest):
|
||||
return base64.b64encode(
|
||||
value.public_bytes(x509_Encoding.DER)).decode('ascii')
|
||||
|
||||
assert type(value) in (unicode, float, bool, type(None)) + six.integer_types
|
||||
return value
|
||||
|
||||
@@ -325,6 +329,7 @@ class _JSONPrimer(dict):
|
||||
tuple: self._enc_list,
|
||||
dict: self._enc_dict,
|
||||
crypto_x509.Certificate: self._enc_certificate,
|
||||
crypto_x509.CertificateSigningRequest: self._enc_certificate,
|
||||
})
|
||||
# int, long
|
||||
for t in six.integer_types:
|
||||
@@ -556,7 +561,7 @@ class SSLTransport(LanguageAwareTransport):
|
||||
|
||||
conn = create_https_connection(
|
||||
host, 443,
|
||||
api.env.tls_ca_cert,
|
||||
getattr(context, 'ca_certfile', None),
|
||||
tls_version_min=api.env.tls_version_min,
|
||||
tls_version_max=api.env.tls_version_max)
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ if __name__ == '__main__':
|
||||
"ipalib.install",
|
||||
],
|
||||
install_requires=[
|
||||
"ipaplatform",
|
||||
"ipapython",
|
||||
"netaddr",
|
||||
"pyasn1",
|
||||
|
||||
@@ -146,6 +146,7 @@ class LazyText(object):
|
||||
"""
|
||||
|
||||
__slots__ = ('domain', 'localedir', 'key', 'args')
|
||||
__hash__ = None
|
||||
|
||||
def __init__(self, domain=None, localedir=None):
|
||||
"""
|
||||
|
||||
@@ -35,6 +35,10 @@ import dns
|
||||
import encodings
|
||||
import sys
|
||||
import ssl
|
||||
import termios
|
||||
import fcntl
|
||||
import struct
|
||||
import subprocess
|
||||
|
||||
import netaddr
|
||||
from dns import resolver, rdatatype
|
||||
@@ -55,10 +59,7 @@ from ipalib.constants import (
|
||||
TLS_VERSIONS, TLS_VERSION_MINIMAL, TLS_HIGH_CIPHERS
|
||||
)
|
||||
from ipalib.text import _
|
||||
# pylint: disable=ipa-forbidden-import
|
||||
from ipalib.install import sysrestore
|
||||
from ipaplatform.paths import paths
|
||||
# pylint: enable=ipa-forbidden-import
|
||||
from ipapython.ssh import SSHPublicKey
|
||||
from ipapython.dn import DN, RDN
|
||||
from ipapython.dnsutil import DNSName
|
||||
@@ -68,6 +69,9 @@ from ipapython.admintool import ScriptError
|
||||
if six.PY3:
|
||||
unicode = str
|
||||
|
||||
_IPA_CLIENT_SYSRESTORE = "/var/lib/ipa-client/sysrestore"
|
||||
_IPA_DEFAULT_CONF = "/etc/ipa/default.conf"
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -154,6 +158,23 @@ def isvalid_base64(data):
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def strip_csr_header(csr):
|
||||
"""
|
||||
Remove the header and footer (and surrounding material) from a CSR.
|
||||
"""
|
||||
headerlen = 40
|
||||
s = csr.find(b"-----BEGIN NEW CERTIFICATE REQUEST-----")
|
||||
if s == -1:
|
||||
headerlen = 36
|
||||
s = csr.find(b"-----BEGIN CERTIFICATE REQUEST-----")
|
||||
if s >= 0:
|
||||
e = csr.find(b"-----END")
|
||||
csr = csr[s + headerlen:e]
|
||||
|
||||
return csr
|
||||
|
||||
|
||||
def validate_ipaddr(ipaddr):
|
||||
"""
|
||||
Check to see if the given IP address is a valid IPv4 or IPv6 address.
|
||||
@@ -292,6 +313,10 @@ def create_https_connection(
|
||||
raise RuntimeError("cafile argument is required to perform server "
|
||||
"certificate verification")
|
||||
|
||||
if not os.path.isfile(cafile) or not os.access(cafile, os.R_OK):
|
||||
raise RuntimeError("cafile \'{file}\' doesn't exist or is unreadable".
|
||||
format(file=cafile))
|
||||
|
||||
# remove the slice of negating protocol options according to options
|
||||
tls_span = get_proper_tls_version_span(tls_version_min, tls_version_max)
|
||||
|
||||
@@ -1078,8 +1103,9 @@ def check_client_configuration():
|
||||
"""
|
||||
Check if IPA client is configured on the system.
|
||||
"""
|
||||
fstore = sysrestore.FileStore(paths.IPA_CLIENT_SYSRESTORE)
|
||||
if not fstore.has_files() and not os.path.exists(paths.IPA_DEFAULT_CONF):
|
||||
if (not os.path.isfile(paths.IPA_DEFAULT_CONF) or
|
||||
not os.path.isdir(paths.IPA_CLIENT_SYSRESTORE) or
|
||||
not os.listdir(paths.IPA_CLIENT_SYSRESTORE)):
|
||||
raise ScriptError('IPA client is not configured on this system')
|
||||
|
||||
|
||||
@@ -1132,3 +1158,40 @@ def no_matching_interface_for_ip_address_warning(addr_list):
|
||||
"{}".format(ip),
|
||||
file=sys.stderr
|
||||
)
|
||||
|
||||
|
||||
def get_terminal_height(fd=1):
|
||||
"""
|
||||
Get current terminal height
|
||||
|
||||
Args:
|
||||
fd (int): file descriptor. Default: 1 (stdout)
|
||||
|
||||
Returns:
|
||||
int: Terminal height
|
||||
"""
|
||||
try:
|
||||
return struct.unpack(
|
||||
'hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, b'1234'))[0]
|
||||
except (IOError, OSError, struct.error):
|
||||
return os.environ.get("LINES", 25)
|
||||
|
||||
|
||||
def open_in_pager(data):
|
||||
"""
|
||||
Open text data in pager
|
||||
|
||||
Args:
|
||||
data (bytes): data to view in pager
|
||||
|
||||
Returns:
|
||||
None
|
||||
"""
|
||||
pager = os.environ.get("PAGER", "less")
|
||||
pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE)
|
||||
|
||||
try:
|
||||
pager_process.stdin.write(data)
|
||||
pager_process.communicate()
|
||||
except IOError:
|
||||
pass
|
||||
|
||||
@@ -51,7 +51,6 @@ from pyasn1_modules import rfc2315, rfc2459
|
||||
import six
|
||||
|
||||
from ipalib import errors
|
||||
from ipapython.dn import DN
|
||||
from ipapython.dnsutil import DNSName
|
||||
|
||||
if six.PY3:
|
||||
@@ -76,33 +75,6 @@ EKU_PLACEHOLDER = '1.3.6.1.4.1.3319.6.10.16'
|
||||
SAN_UPN = '1.3.6.1.4.1.311.20.2.3'
|
||||
SAN_KRB5PRINCIPALNAME = '1.3.6.1.5.2.2'
|
||||
|
||||
_subject_base = None
|
||||
|
||||
def subject_base():
|
||||
from ipalib import api
|
||||
global _subject_base
|
||||
|
||||
if _subject_base is None:
|
||||
config = api.Command['config_show']()['result']
|
||||
_subject_base = DN(config['ipacertificatesubjectbase'][0])
|
||||
|
||||
return _subject_base
|
||||
|
||||
def strip_header(pem):
|
||||
"""
|
||||
Remove the header and footer from a certificate.
|
||||
"""
|
||||
regexp = (
|
||||
u"^-----BEGIN CERTIFICATE-----(.*?)-----END CERTIFICATE-----"
|
||||
)
|
||||
if isinstance(pem, bytes):
|
||||
regexp = regexp.encode('ascii')
|
||||
s = re.search(regexp, pem, re.MULTILINE | re.DOTALL)
|
||||
if s is not None:
|
||||
return s.group(1)
|
||||
else:
|
||||
return pem
|
||||
|
||||
|
||||
@crypto_utils.register_interface(crypto_x509.Certificate)
|
||||
class IPACertificate(object):
|
||||
@@ -123,18 +95,21 @@ class IPACertificate(object):
|
||||
# some field types encode-decoding is not strongly defined
|
||||
self._subject = self.__get_der_field('subject')
|
||||
self._issuer = self.__get_der_field('issuer')
|
||||
self._serial_number = self.__get_der_field('serialNumber')
|
||||
|
||||
def __getstate__(self):
|
||||
state = {
|
||||
'_cert': self.public_bytes(Encoding.DER),
|
||||
'_subject': self.subject_bytes,
|
||||
'_issuer': self.issuer_bytes,
|
||||
'_serial_number': self._serial_number,
|
||||
}
|
||||
return state
|
||||
|
||||
def __setstate__(self, state):
|
||||
self._subject = state['_subject']
|
||||
self._issuer = state['_issuer']
|
||||
self._issuer = state['_serial_number']
|
||||
self._cert = crypto_x509.load_der_x509_certificate(
|
||||
state['_cert'], backend=default_backend())
|
||||
|
||||
@@ -215,6 +190,10 @@ class IPACertificate(object):
|
||||
def serial_number(self):
|
||||
return self._cert.serial_number
|
||||
|
||||
@property
|
||||
def serial_number_bytes(self):
|
||||
return self._serial_number
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self._cert.version
|
||||
@@ -295,8 +274,11 @@ class IPACertificate(object):
|
||||
|
||||
@property
|
||||
def extended_key_usage_bytes(self):
|
||||
eku = self.extended_key_usage
|
||||
if eku is None:
|
||||
return
|
||||
|
||||
ekurfc = rfc2459.ExtKeyUsageSyntax()
|
||||
eku = self.extended_key_usage or {EKU_PLACEHOLDER}
|
||||
for i, oid in enumerate(eku):
|
||||
ekurfc[i] = univ.ObjectIdentifier(oid)
|
||||
ekurfc = encoder.encode(ekurfc)
|
||||
@@ -472,7 +454,7 @@ def pkcs7_to_certs(data, datatype=PEM):
|
||||
"""
|
||||
if datatype == PEM:
|
||||
match = re.match(
|
||||
r'-----BEGIN PKCS7-----(.*?)-----END PKCS7-----',
|
||||
br'-----BEGIN PKCS7-----(.*?)-----END PKCS7-----',
|
||||
data,
|
||||
re.DOTALL)
|
||||
if not match:
|
||||
|
||||
@@ -1 +1,12 @@
|
||||
include $(top_srcdir)/Makefile.python.am
|
||||
|
||||
EXTRA_DIST = override.py.in
|
||||
|
||||
all-local: override.py
|
||||
dist-hook: override.py
|
||||
install-exec-local: override.py
|
||||
|
||||
override.py: override.py.in $(top_builddir)/$(CONFIG_STATUS)
|
||||
$(AM_V_GEN)sed \
|
||||
-e 's|@IPAPLATFORM[@]|$(IPAPLATFORM)|g' \
|
||||
$< > $@
|
||||
|
||||
11
ipaplatform/__init__.py
Normal file
11
ipaplatform/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
||||
#
|
||||
# Copyright (C) 2017 FreeIPA Contributors see COPYING for license
|
||||
#
|
||||
"""ipaplatform namespace package
|
||||
|
||||
In the presence of a namespace package, any code in this module will be
|
||||
ignore.
|
||||
"""
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
|
||||
NAME = None # initialized by IpaMetaImporter
|
||||
150
ipaplatform/_importhook.py
Normal file
150
ipaplatform/_importhook.py
Normal file
@@ -0,0 +1,150 @@
|
||||
#
|
||||
# Copyright (C) 2017 FreeIPA Contributors see COPYING for license
|
||||
#
|
||||
"""Meta import hook for ipaplatform.
|
||||
|
||||
Known Linux distros with /etc/os-release
|
||||
----------------------------------------
|
||||
|
||||
- alpine
|
||||
- centos (like rhel, fedora)
|
||||
- debian
|
||||
- fedora
|
||||
- rhel
|
||||
- ubuntu (like debian)
|
||||
"""
|
||||
|
||||
import importlib
|
||||
import io
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
import ipaplatform
|
||||
try:
|
||||
from ipaplatform.override import OVERRIDE
|
||||
except ImportError:
|
||||
OVERRIDE = None
|
||||
|
||||
|
||||
_osrelease_line = re.compile(
|
||||
u"^(?!#)(?P<name>[a-zA-Z0-9_]+)="
|
||||
u"(?P<quote>[\"\']?)(?P<value>.+)(?P=quote)$"
|
||||
)
|
||||
|
||||
|
||||
class IpaMetaImporter(object):
|
||||
"""Meta import hook and platform detector.
|
||||
|
||||
The meta import hook uses /etc/os-release to auto-detects the best
|
||||
matching ipaplatform provider. It is compatible with external namespace
|
||||
packages, too.
|
||||
"""
|
||||
modules = {
|
||||
'ipaplatform.constants',
|
||||
'ipaplatform.paths',
|
||||
'ipaplatform.services',
|
||||
'ipaplatform.tasks'
|
||||
}
|
||||
|
||||
bsd_family = (
|
||||
'freebsd',
|
||||
'openbsd',
|
||||
'netbsd',
|
||||
'dragonfly',
|
||||
'gnukfreebsd'
|
||||
)
|
||||
|
||||
def __init__(self, override=OVERRIDE):
|
||||
self.override = override
|
||||
self.platform_ids = self._get_platform_ids(self.override)
|
||||
self.platform = self._get_platform(self.platform_ids)
|
||||
|
||||
def _get_platform_ids(self, override):
|
||||
platforms = []
|
||||
# allow RPM and Debian packages to override platform
|
||||
if override is not None:
|
||||
platforms.append(override)
|
||||
|
||||
if sys.platform.startswith('linux'):
|
||||
# Linux, get distribution from /etc/os-release
|
||||
try:
|
||||
platforms.extend(self._parse_osrelease())
|
||||
except Exception as e:
|
||||
warnings.warn("Failed to read /etc/os-release: {}".format(e))
|
||||
elif sys.platform == 'win32':
|
||||
# Windows 32 or 64bit platform
|
||||
platforms.append('win32')
|
||||
elif sys.platform == 'darwin':
|
||||
# macOS
|
||||
platforms.append('macos')
|
||||
elif sys.platform.startswith(self.bsd_family):
|
||||
# BSD family, look for e.g. ['freebsd10', 'freebsd']
|
||||
platforms.append(sys.platform)
|
||||
simple = sys.platform.rstrip('0123456789')
|
||||
if simple != sys.platform:
|
||||
platforms.append(simple)
|
||||
|
||||
if not platforms:
|
||||
raise ValueError("Unsupported platform: {}".format(sys.platform))
|
||||
|
||||
return platforms
|
||||
|
||||
def _parse_osrelease(self, filename='/etc/os-release'):
|
||||
release = {}
|
||||
with io.open(filename, encoding='utf-8') as f:
|
||||
for line in f:
|
||||
mo = _osrelease_line.match(line)
|
||||
if mo is not None:
|
||||
release[mo.group('name')] = mo.group('value')
|
||||
|
||||
platforms = [
|
||||
release['ID'],
|
||||
]
|
||||
if "ID_LIKE" in release:
|
||||
platforms.extend(
|
||||
v.strip() for v in release['ID_LIKE'].split(' ') if v.strip()
|
||||
)
|
||||
|
||||
return platforms
|
||||
|
||||
def _get_platform(self, platform_ids):
|
||||
for platform in platform_ids:
|
||||
try:
|
||||
importlib.import_module('ipaplatform.{}'.format(platform))
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
return platform
|
||||
raise ImportError('No ipaplatform available for "{}"'.format(
|
||||
', '.join(platform_ids)))
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
"""Meta importer hook"""
|
||||
if fullname in self.modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def load_module(self, fullname):
|
||||
"""Meta importer hook"""
|
||||
suffix = fullname.split('.', 1)[1]
|
||||
alias = 'ipaplatform.{}.{}'.format(self.platform, suffix)
|
||||
platform_mod = importlib.import_module(alias)
|
||||
base_mod = sys.modules.get(fullname)
|
||||
if base_mod is not None:
|
||||
# module has been imported before, update its __dict__
|
||||
base_mod.__dict__.update(platform_mod.__dict__)
|
||||
for key in list(base_mod.__dict__):
|
||||
if not hasattr(platform_mod, key):
|
||||
delattr(base_mod, key)
|
||||
else:
|
||||
sys.modules[fullname] = platform_mod
|
||||
return platform_mod
|
||||
|
||||
|
||||
metaimporter = IpaMetaImporter()
|
||||
sys.meta_path.insert(0, metaimporter)
|
||||
|
||||
fixup_module = metaimporter.load_module
|
||||
ipaplatform.NAME = metaimporter.platform
|
||||
@@ -37,3 +37,6 @@ class BaseConstantsNamespace(object):
|
||||
'httpd_dbus_sssd': 'on',
|
||||
}
|
||||
SSSD_USER = "sssd"
|
||||
|
||||
|
||||
constants = BaseConstantsNamespace()
|
||||
|
||||
@@ -171,7 +171,6 @@ class BasePathNamespace(object):
|
||||
ODS_SIGNER = "/usr/sbin/ods-signer"
|
||||
OPENSSL = "/usr/bin/openssl"
|
||||
PK12UTIL = "/usr/bin/pk12util"
|
||||
SIGNTOOL = "/usr/bin/signtool"
|
||||
SOFTHSM2_UTIL = "/usr/bin/softhsm2-util"
|
||||
SSLGET = "/usr/bin/sslget"
|
||||
SSS_SSH_AUTHORIZEDKEYS = "/usr/bin/sss_ssh_authorizedkeys"
|
||||
@@ -282,8 +281,6 @@ class BasePathNamespace(object):
|
||||
CA_BACKUP_KEYS_P12 = "/var/lib/pki/pki-tomcat/alias/ca_backup_keys.p12"
|
||||
KRA_BACKUP_KEYS_P12 = "/var/lib/pki/pki-tomcat/alias/kra_backup_keys.p12"
|
||||
CA_CS_CFG_PATH = "/var/lib/pki/pki-tomcat/conf/ca/CS.cfg"
|
||||
CAJARSIGNINGCERT_CFG = (
|
||||
"/var/lib/pki/pki-tomcat/ca/profiles/ca/caJarSigningCert.cfg")
|
||||
CASIGNEDLOGCERT_CFG = (
|
||||
"/var/lib/pki/pki-tomcat/ca/profiles/ca/caSignedLogCert.cfg")
|
||||
KRA_CS_CFG_PATH = "/var/lib/pki/pki-tomcat/conf/kra/CS.cfg"
|
||||
@@ -350,6 +347,7 @@ class BasePathNamespace(object):
|
||||
NETWORK_MANAGER_CONFIG_DIR = '/etc/NetworkManager/conf.d'
|
||||
IPA_CUSTODIA_CONF_DIR = '/etc/ipa/custodia'
|
||||
IPA_CUSTODIA_CONF = '/etc/ipa/custodia/custodia.conf'
|
||||
IPA_CUSTODIA_KEYS = '/etc/ipa/custodia/server.keys'
|
||||
IPA_CUSTODIA_SOCKET = '/run/httpd/ipa-custodia.sock'
|
||||
IPA_CUSTODIA_AUDIT_LOG = '/var/log/ipa-custodia.audit.log'
|
||||
IPA_GETKEYTAB = '/usr/sbin/ipa-getkeytab'
|
||||
@@ -357,5 +355,8 @@ class BasePathNamespace(object):
|
||||
GSSPROXY_CONF = '/etc/gssproxy/10-ipa.conf'
|
||||
KRB5CC_HTTPD = '/tmp/krb5cc-httpd'
|
||||
IF_INET6 = '/proc/net/if_inet6'
|
||||
AUTHCONFIG = None
|
||||
IPA_SERVER_UPGRADE = '/usr/sbin/ipa-server-upgrade'
|
||||
|
||||
path_namespace = BasePathNamespace
|
||||
|
||||
paths = BasePathNamespace()
|
||||
|
||||
@@ -424,7 +424,7 @@ class SystemdService(PlatformService):
|
||||
self.service_instance(instance_name))
|
||||
|
||||
try:
|
||||
if not ipautil.dir_exists(srv_tgt):
|
||||
if not os.path.isdir(srv_tgt):
|
||||
os.mkdir(srv_tgt)
|
||||
os.chmod(srv_tgt, 0o755)
|
||||
if os.path.exists(srv_lnk):
|
||||
@@ -459,7 +459,7 @@ class SystemdService(PlatformService):
|
||||
self.service_instance(instance_name))
|
||||
|
||||
try:
|
||||
if ipautil.dir_exists(srv_tgt):
|
||||
if os.path.isdir(srv_tgt):
|
||||
if os.path.islink(srv_lnk):
|
||||
os.unlink(srv_lnk)
|
||||
ipautil.run([paths.SYSTEMCTL, "--system", "daemon-reload"])
|
||||
@@ -505,8 +505,12 @@ class SystemdService(PlatformService):
|
||||
|
||||
# Objects below are expected to be exported by platform module
|
||||
|
||||
service = None
|
||||
knownservices = None
|
||||
def base_service_class_factory(name, api=None):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
service = base_service_class_factory
|
||||
knownservices = KnownServices({})
|
||||
|
||||
# System may support more time&date services. FreeIPA supports ntpd only, other
|
||||
# services will be disabled during IPA installation
|
||||
|
||||
@@ -204,6 +204,9 @@ class BaseTaskNamespace(object):
|
||||
"""Configure httpd service to work with IPA"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def configure_http_gssproxy_conf(self, ipauser):
|
||||
raise NotImplementedError()
|
||||
|
||||
def remove_httpd_service_ipa_conf(self):
|
||||
"""Remove configuration of httpd service of IPA"""
|
||||
raise NotImplementedError()
|
||||
@@ -219,3 +222,6 @@ class BaseTaskNamespace(object):
|
||||
logger.debug('Done adding user to group')
|
||||
except ipautil.CalledProcessError as e:
|
||||
logger.debug('Failed to add user to group: %s', e)
|
||||
|
||||
|
||||
tasks = BaseTaskNamespace()
|
||||
|
||||
8
ipaplatform/constants.py
Normal file
8
ipaplatform/constants.py
Normal file
@@ -0,0 +1,8 @@
|
||||
#
|
||||
# Copyright (C) 2017 FreeIPA Contributors see COPYING for license
|
||||
#
|
||||
"""IpaMetaImporter replaces this module with ipaplatform.$NAME.constants.
|
||||
"""
|
||||
import ipaplatform._importhook
|
||||
|
||||
ipaplatform._importhook.fixup_module('ipaplatform.constants')
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user