Imported Upstream version 4.0.5

This commit is contained in:
Mario Fetka
2021-07-25 07:50:50 +02:00
parent 8ff3be4216
commit 3bfaa6e020
2049 changed files with 317193 additions and 1632423 deletions

2
ipapython/MANIFEST.in Normal file
View File

@@ -0,0 +1,2 @@
include *.conf

45
ipapython/Makefile Normal file
View File

@@ -0,0 +1,45 @@
PYTHONLIBDIR ?= $(shell python2 -c "from distutils.sysconfig import *; print get_python_lib()")
PACKAGEDIR ?= $(DESTDIR)/$(PYTHONLIBDIR)/ipa
CONFIGDIR ?= $(DESTDIR)/etc/ipa
TESTS = $(wildcard test/*.py)
SUBDIRS = py_default_encoding
all:
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
install:
if [ "$(DESTDIR)" = "" ]; then \
python2 setup.py install; \
else \
python2 setup.py install --root $(DESTDIR); \
fi
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
clean:
rm -f *~ *.pyc
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
distclean: clean
rm -f setup.py ipa-python.spec version.py
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
maintainer-clean: distclean
rm -rf build
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
.PHONY: test
test: $(subst .py,.tst,$(TESTS))
%.tst: %.py
python2 $<

View File

@@ -1,23 +0,0 @@
include $(top_srcdir)/Makefile.python.am
MOSTLYCLEANFILES = .DEFAULT_PLUGINS
EXTRA_DIST = version.py.in
all-local: version.py
dist-hook: version.py
install-exec-local: version.py
bdist_wheel: version.py
.DEFAULT_PLUGINS: $(top_srcdir)/API.txt
$(AM_V_GEN)awk '$$1 == "default:" { print $$2 }' $< >$@
version.py: version.py.in .DEFAULT_PLUGINS $(top_builddir)/$(CONFIG_STATUS)
$(AM_V_GEN)sed \
-e 's|@API_VERSION[@]|$(API_VERSION)|g' \
-e 's|@NUM_VERSION[@]|$(NUM_VERSION)|g' \
-e 's|@VERSION[@]|$(VERSION)|g' \
-e 's|@VENDOR_SUFFIX[@]|$(VENDOR_SUFFIX)|g' \
-e '/@DEFAULT_PLUGINS[@]/r .DEFAULT_PLUGINS' \
-e '/@DEFAULT_PLUGINS[@]/d' \
$< > $@

View File

@@ -1,639 +0,0 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
VPATH = @srcdir@
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
subdir = ipapython
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \
$(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/intlmacosx.m4 \
$(top_srcdir)/m4/lib-ld.m4 $(top_srcdir)/m4/lib-link.m4 \
$(top_srcdir)/m4/lib-prefix.m4 $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \
$(top_srcdir)/m4/progtest.m4 $(top_srcdir)/VERSION.m4 \
$(top_srcdir)/server.m4 $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
am__DIST_COMMON = $(srcdir)/Makefile.in \
$(top_srcdir)/Makefile.python.am README
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
API_VERSION = @API_VERSION@
AR = @AR@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CMOCKA_CFLAGS = @CMOCKA_CFLAGS@
CMOCKA_LIBS = @CMOCKA_LIBS@
CONFIG_STATUS = @CONFIG_STATUS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CRYPTO_CFLAGS = @CRYPTO_CFLAGS@
CRYPTO_LIBS = @CRYPTO_LIBS@
CYGPATH_W = @CYGPATH_W@
DATA_VERSION = @DATA_VERSION@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DIRSRV_CFLAGS = @DIRSRV_CFLAGS@
DIRSRV_LIBS = @DIRSRV_LIBS@
DLLTOOL = @DLLTOOL@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGREP = @EGREP@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
GETTEXT_DOMAIN = @GETTEXT_DOMAIN@
GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@
GIT_BRANCH = @GIT_BRANCH@
GIT_VERSION = @GIT_VERSION@
GMSGFMT = @GMSGFMT@
GMSGFMT_015 = @GMSGFMT_015@
GREP = @GREP@
INI_CFLAGS = @INI_CFLAGS@
INI_LIBS = @INI_LIBS@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
INTLLIBS = @INTLLIBS@
INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@
IPAPLATFORM = @IPAPLATFORM@
IPA_DATA_DIR = @IPA_DATA_DIR@
IPA_SYSCONF_DIR = @IPA_SYSCONF_DIR@
JSLINT = @JSLINT@
KRAD_LIBS = @KRAD_LIBS@
KRB5KDC_SERVICE = @KRB5KDC_SERVICE@
KRB5_CFLAGS = @KRB5_CFLAGS@
KRB5_LIBS = @KRB5_LIBS@
LD = @LD@
LDAP_CFLAGS = @LDAP_CFLAGS@
LDAP_LIBS = @LDAP_LIBS@
LDFLAGS = @LDFLAGS@
LIBICONV = @LIBICONV@
LIBINTL = @LIBINTL@
LIBINTL_LIBS = @LIBINTL_LIBS@
LIBOBJS = @LIBOBJS@
LIBPDB_NAME = @LIBPDB_NAME@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIBVERTO_CFLAGS = @LIBVERTO_CFLAGS@
LIBVERTO_LIBS = @LIBVERTO_LIBS@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBICONV = @LTLIBICONV@
LTLIBINTL = @LTLIBINTL@
LTLIBOBJS = @LTLIBOBJS@
LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
MAKEINFO = @MAKEINFO@
MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR_P = @MKDIR_P@
MK_ASSIGN = @MK_ASSIGN@
MK_ELSE = @MK_ELSE@
MK_ENDIF = @MK_ENDIF@
MK_IFEQ = @MK_IFEQ@
MSGATTRIB = @MSGATTRIB@
MSGFMT = @MSGFMT@
MSGFMT_015 = @MSGFMT_015@
MSGMERGE = @MSGMERGE@
NAMED_GROUP = @NAMED_GROUP@
NDRNBT_CFLAGS = @NDRNBT_CFLAGS@
NDRNBT_LIBS = @NDRNBT_LIBS@
NDRPAC_CFLAGS = @NDRPAC_CFLAGS@
NDRPAC_LIBS = @NDRPAC_LIBS@
NDR_CFLAGS = @NDR_CFLAGS@
NDR_LIBS = @NDR_LIBS@
NM = @NM@
NMEDIT = @NMEDIT@
NSPR_CFLAGS = @NSPR_CFLAGS@
NSPR_LIBS = @NSPR_LIBS@
NSS_CFLAGS = @NSS_CFLAGS@
NSS_LIBS = @NSS_LIBS@
NUM_VERSION = @NUM_VERSION@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
ODS_USER = @ODS_USER@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
PKG_CONFIG = @PKG_CONFIG@
PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@
PKG_CONFIG_PATH = @PKG_CONFIG_PATH@
POPT_CFLAGS = @POPT_CFLAGS@
POPT_LIBS = @POPT_LIBS@
POSUB = @POSUB@
PYLINT = @PYLINT@
PYTHON = @PYTHON@
PYTHON2 = @PYTHON2@
PYTHON3 = @PYTHON3@
PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@
PYTHON_INSTALL_EXTRA_OPTIONS = @PYTHON_INSTALL_EXTRA_OPTIONS@
PYTHON_PLATFORM = @PYTHON_PLATFORM@
PYTHON_PREFIX = @PYTHON_PREFIX@
PYTHON_VERSION = @PYTHON_VERSION@
RANLIB = @RANLIB@
SAMBA40EXTRA_LIBPATH = @SAMBA40EXTRA_LIBPATH@
SAMBAUTIL_CFLAGS = @SAMBAUTIL_CFLAGS@
SAMBAUTIL_LIBS = @SAMBAUTIL_LIBS@
SASL_CFLAGS = @SASL_CFLAGS@
SASL_LIBS = @SASL_LIBS@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
SSSCERTMAP_CFLAGS = @SSSCERTMAP_CFLAGS@
SSSCERTMAP_LIBS = @SSSCERTMAP_LIBS@
SSSIDMAP_CFLAGS = @SSSIDMAP_CFLAGS@
SSSIDMAP_LIBS = @SSSIDMAP_LIBS@
SSSNSSIDMAP_CFLAGS = @SSSNSSIDMAP_CFLAGS@
SSSNSSIDMAP_LIBS = @SSSNSSIDMAP_LIBS@
STRIP = @STRIP@
TALLOC_CFLAGS = @TALLOC_CFLAGS@
TALLOC_LIBS = @TALLOC_LIBS@
TEVENT_CFLAGS = @TEVENT_CFLAGS@
TEVENT_LIBS = @TEVENT_LIBS@
UNISTRING_LIBS = @UNISTRING_LIBS@
UNLINK = @UNLINK@
USE_NLS = @USE_NLS@
UUID_CFLAGS = @UUID_CFLAGS@
UUID_LIBS = @UUID_LIBS@
VENDOR_SUFFIX = @VENDOR_SUFFIX@
VERSION = @VERSION@
XGETTEXT = @XGETTEXT@
XGETTEXT_015 = @XGETTEXT_015@
XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@
XMLRPC_CFLAGS = @XMLRPC_CFLAGS@
XMLRPC_LIBS = @XMLRPC_LIBS@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
i18ntests = @i18ntests@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
krb5rundir = @krb5rundir@
libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
pkgpyexecdir = @pkgpyexecdir@
pkgpythondir = $(pythondir)/$(pkgname)
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
pyexecdir = @pyexecdir@
pythondir = @pythondir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
sysconfenvdir = @sysconfenvdir@
systemdsystemunitdir = @systemdsystemunitdir@
systemdtmpfilesdir = @systemdtmpfilesdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
pkgname = $(shell basename "$(abs_srcdir)")
@VERBOSE_MAKE_FALSE@VERBOSITY = "--quiet"
@VERBOSE_MAKE_TRUE@VERBOSITY = "--verbose"
WHEELDISTDIR = $(top_builddir)/dist/wheels
MOSTLYCLEANFILES = .DEFAULT_PLUGINS
EXTRA_DIST = version.py.in
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(top_srcdir)/Makefile.python.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign ipapython/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign ipapython/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_srcdir)/Makefile.python.am $(am__empty):
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
tags TAGS:
ctags CTAGS:
cscope cscopelist:
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
$(MAKE) $(AM_MAKEFLAGS) \
top_distdir="$(top_distdir)" distdir="$(distdir)" \
dist-hook
check-am: all-am
check: check-am
all-am: Makefile all-local
installdirs:
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
-test -z "$(MOSTLYCLEANFILES)" || rm -f $(MOSTLYCLEANFILES)
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-local mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-generic
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am: install-exec-local
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-local
.MAKE: install-am install-strip
.PHONY: all all-am all-local check check-am clean clean-generic \
clean-libtool clean-local cscopelist-am ctags-am dist-hook \
distclean distclean-generic distclean-libtool distdir dvi \
dvi-am html html-am info info-am install install-am \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-exec-local install-html \
install-html-am install-info install-info-am install-man \
install-pdf install-pdf-am install-ps install-ps-am \
install-strip installcheck installcheck-am installdirs \
maintainer-clean maintainer-clean-generic mostlyclean \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags-am uninstall uninstall-am uninstall-local
.PRECIOUS: Makefile
# hack to handle back-in-the-hierarchy depedency on ipasetup.py
.PHONY: $(top_builddir)/ipasetup.py
$(top_builddir)/ipasetup.py:
(cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) ipasetup.py)
all-local: $(top_builddir)/ipasetup.py
cd $(srcdir); $(PYTHON) setup.py \
$(VERBOSITY) \
build \
--build-base "$(abs_builddir)/build"
install-exec-local: $(top_builddir)/ipasetup.py
if [ "x$(pkginstall)" != "xfalse" ]; then \
$(PYTHON) $(srcdir)/setup.py \
$(VERBOSITY) \
build \
--build-base "$(abs_builddir)/build" \
install \
--prefix "$(DESTDIR)$(prefix)" \
--single-version-externally-managed \
--record "$(DESTDIR)$(pkgpythondir)/install_files.txt" \
--optimize 1 \
$(PYTHON_INSTALL_EXTRA_OPTIONS); \
fi
uninstall-local:
if [ -f "$(DESTDIR)$(pkgpythondir)/install_files.txt" ]; then \
cat "$(DESTDIR)$(pkgpythondir)/install_files.txt" | xargs rm -rf ; \
fi
rm -rf "$(DESTDIR)$(pkgpythondir)"
clean-local: $(top_builddir)/ipasetup.py
$(PYTHON) "$(srcdir)/setup.py" \
clean \
--all
--build-base "$(abs_builddir)/build"
rm -rf "$(srcdir)/build" "$(srcdir)/dist" "$(srcdir)/MANIFEST"
find "$(srcdir)" \
-name "*.py[co]" -delete -o \
-name "__pycache__" -delete -o \
-name "*.egg-info" -exec rm -rf {} +
# take list of all Python source files and copy them into distdir
# SOURCES.txt does not contain directories so we need to create those
dist-hook: $(top_builddir)/ipasetup.py
$(PYTHON) "$(srcdir)/setup.py" egg_info
PYTHON_SOURCES=$$(cat "$(srcdir)/$(pkgname).egg-info/SOURCES.txt") || exit $$?; \
for FILEN in $${PYTHON_SOURCES}; \
do \
if test -x "$(srcdir)/$${FILEN}"; then MODE=755; else MODE=644; fi; \
$(INSTALL) -D -m $${MODE} "$(srcdir)/$${FILEN}" "$(distdir)/$${FILEN}" || exit $$?; \
done
.PHONY: bdist_wheel
bdist_wheel: $(top_builddir)/ipasetup.py
rm -rf $(WHEELDISTDIR)/$(pkgname)-*.whl
$(PYTHON) "$(srcdir)/setup.py" \
build \
--build-base "$(abs_builddir)/build" \
bdist_wheel \
--dist-dir=$(WHEELDISTDIR)
all-local: version.py
dist-hook: version.py
install-exec-local: version.py
bdist_wheel: version.py
.DEFAULT_PLUGINS: $(top_srcdir)/API.txt
$(AM_V_GEN)awk '$$1 == "default:" { print $$2 }' $< >$@
version.py: version.py.in .DEFAULT_PLUGINS $(top_builddir)/$(CONFIG_STATUS)
$(AM_V_GEN)sed \
-e 's|@API_VERSION[@]|$(API_VERSION)|g' \
-e 's|@NUM_VERSION[@]|$(NUM_VERSION)|g' \
-e 's|@VERSION[@]|$(VERSION)|g' \
-e 's|@VENDOR_SUFFIX[@]|$(VENDOR_SUFFIX)|g' \
-e '/@DEFAULT_PLUGINS[@]/r .DEFAULT_PLUGINS' \
-e '/@DEFAULT_PLUGINS[@]/d' \
$< > $@
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:

BIN
ipapython/__init__.pyc Normal file

Binary file not shown.

View File

@@ -22,31 +22,27 @@
Handles common operations like option parsing and logging
"""
import logging
import sys
import os
import traceback
from optparse import OptionGroup # pylint: disable=deprecated-module
from optparse import OptionGroup
from ipapython import version
from ipapython import config
from ipapython.ipa_log_manager import standard_logging_setup
logger = logging.getLogger(__name__)
from ipapython import ipa_log_manager
class ScriptError(Exception):
class ScriptError(StandardError):
"""An exception that records an error message and a return value
"""
def __init__(self, msg='', rval=1):
if msg is None:
msg = ''
super(ScriptError, self).__init__(msg)
self.msg = msg
self.rval = rval
@property
def msg(self):
return str(self)
def __str__(self):
return self.msg
class AdminTool(object):
@@ -90,6 +86,7 @@ class AdminTool(object):
usage = None
description = None
log = None
_option_parsers = dict()
@classmethod
@@ -172,7 +169,7 @@ class AdminTool(object):
self.ask_for_options()
self.setup_logging()
return_value = self.run()
except BaseException as exception:
except BaseException, exception:
traceback = sys.exc_info()[2]
error_message, return_value = self.handle_error(exception)
if return_value:
@@ -227,18 +224,11 @@ class AdminTool(object):
- a plain print for things that should not be log (for example,
interactive prompting)
To log, use a module-level logger.
To log, use `self.log.info()`, `self.log.warning()`, etc.
Logging to file is only set up after option validation and prompting;
before that, all output will go to the console only.
"""
root_logger = logging.getLogger()
for handler in root_logger.handlers:
if (isinstance(handler, logging.StreamHandler) and
handler.stream is sys.stderr): # pylint: disable=no-member
root_logger.removeHandler(handler)
break
self._setup_logging(log_file_mode=log_file_mode)
def _setup_logging(self, log_file_mode='w', no_file=False):
@@ -259,13 +249,14 @@ class AdminTool(object):
verbose = False
else:
verbose = True
standard_logging_setup(
ipa_log_manager.standard_logging_setup(
log_file_name, console_format=console_format,
filemode=log_file_mode, debug=debug, verbose=verbose)
self.log = ipa_log_manager.log_mgr.get_logger(self)
if log_file_name:
logger.debug('Logging to %s', log_file_name)
self.log.debug('Logging to %s' % log_file_name)
elif not no_file:
logger.debug('Not logging to a file')
self.log.debug('Not logging to a file')
def handle_error(self, exception):
@@ -290,20 +281,16 @@ class AdminTool(object):
assumed to have run successfully, and the return value is used as the
SystemExit code.
"""
logger.debug('%s was invoked with arguments %s and options: %s',
self.command_name, self.args, self.safe_options)
logger.debug('IPA version %s', version.VENDOR_VERSION)
self.log.debug('%s was invoked with arguments %s and options: %s',
self.command_name, self.args, self.safe_options)
self.log.debug('IPA version %s' % version.VENDOR_VERSION)
def log_failure(self, error_message, return_value, exception, backtrace):
logger.debug('%s', ''.join(traceback.format_tb(backtrace)))
logger.debug('The %s command failed, exception: %s: %s',
self.command_name, type(exception).__name__, exception)
self.log.debug(''.join(traceback.format_tb(backtrace)))
self.log.debug('The %s command failed, exception: %s: %s',
self.command_name, type(exception).__name__, exception)
if error_message:
logger.error('%s', error_message)
message = "The %s command failed." % self.command_name
if self.log_file_name:
message += " See %s for more information" % self.log_file_name
logger.error('%s', message)
self.log.error(error_message)
def log_success(self):
logger.info('The %s command was successful', self.command_name)
self.log.info('The %s command was successful', self.command_name)

View File

@@ -16,715 +16,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import collections
import logging
import os
import io
import pwd
import grp
import re
import tempfile
from tempfile import NamedTemporaryFile
import shutil
import cryptography.x509
from ipaplatform.paths import paths
from ipapython.dn import DN
from ipapython.kerberos import Principal
from ipapython import ipautil
from ipalib import x509 # pylint: disable=ipa-forbidden-import
logger = logging.getLogger(__name__)
CA_NICKNAME_FMT = "%s IPA CA"
NSS_FILES = ("cert8.db", "key3.db", "secmod.db", "pwdfile.txt")
TrustFlags = collections.namedtuple('TrustFlags', 'has_key trusted ca usages')
EMPTY_TRUST_FLAGS = TrustFlags(False, None, None, None)
IPA_CA_TRUST_FLAGS = TrustFlags(
False, True, True, frozenset({
x509.EKU_SERVER_AUTH,
x509.EKU_CLIENT_AUTH,
x509.EKU_CODE_SIGNING,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_PKINIT_CLIENT_AUTH,
x509.EKU_PKINIT_KDC,
}),
)
EXTERNAL_CA_TRUST_FLAGS = TrustFlags(
False, True, True, frozenset({x509.EKU_SERVER_AUTH}),
)
TRUSTED_PEER_TRUST_FLAGS = TrustFlags(
False, True, False, frozenset({x509.EKU_SERVER_AUTH}),
)
def get_ca_nickname(realm, format=CA_NICKNAME_FMT):
return format % realm
def find_cert_from_txt(cert, start=0):
"""
Given a cert blob (str) which may or may not contian leading and
trailing text, pull out just the certificate part. This will return
the FIRST cert in a stream of data.
:returns: a tuple (IPACertificate, last position in cert)
"""
s = cert.find('-----BEGIN CERTIFICATE-----', start)
e = cert.find('-----END CERTIFICATE-----', s)
if e > 0:
e = e + 25
if s < 0 or e < 0:
raise RuntimeError("Unable to find certificate")
cert = x509.load_pem_x509_certificate(cert[s:e].encode('utf-8'))
return (cert, e)
def parse_trust_flags(trust_flags):
"""
Convert certutil trust flags to TrustFlags object.
"""
has_key = 'u' in trust_flags
if 'p' in trust_flags:
if 'C' in trust_flags or 'P' in trust_flags or 'T' in trust_flags:
raise ValueError("cannot be both trusted and not trusted")
return False, None, None
elif 'C' in trust_flags or 'T' in trust_flags:
if 'P' in trust_flags:
raise ValueError("cannot be both CA and not CA")
ca = True
elif 'P' in trust_flags:
ca = False
else:
return TrustFlags(has_key, None, None, frozenset())
trust_flags = trust_flags.split(',')
ext_key_usage = set()
for i, kp in enumerate((x509.EKU_SERVER_AUTH,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_CODE_SIGNING)):
if 'C' in trust_flags[i] or 'P' in trust_flags[i]:
ext_key_usage.add(kp)
if 'T' in trust_flags[0]:
ext_key_usage.add(x509.EKU_CLIENT_AUTH)
return TrustFlags(has_key, True, ca, frozenset(ext_key_usage))
def unparse_trust_flags(trust_flags):
"""
Convert TrustFlags object to certutil trust flags.
"""
has_key, trusted, ca, ext_key_usage = trust_flags
if trusted is False:
if has_key:
return 'pu,pu,pu'
else:
return 'p,p,p'
elif trusted is None or ca is None:
if has_key:
return 'u,u,u'
else:
return ',,'
elif ext_key_usage is None:
if ca:
if has_key:
return 'CTu,Cu,Cu'
else:
return 'CT,C,C'
else:
if has_key:
return 'Pu,Pu,Pu'
else:
return 'P,P,P'
trust_flags = ['', '', '']
for i, kp in enumerate((x509.EKU_SERVER_AUTH,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_CODE_SIGNING)):
if kp in ext_key_usage:
trust_flags[i] += ('C' if ca else 'P')
if ca and x509.EKU_CLIENT_AUTH in ext_key_usage:
trust_flags[0] += 'T'
if has_key:
for i in range(3):
trust_flags[i] += 'u'
trust_flags = ','.join(trust_flags)
return trust_flags
def verify_kdc_cert_validity(kdc_cert, ca_certs, realm):
with NamedTemporaryFile() as kdc_file, NamedTemporaryFile() as ca_file:
kdc_file.write(kdc_cert.public_bytes(x509.Encoding.PEM))
kdc_file.flush()
x509.write_certificate_list(ca_certs, ca_file.name)
ca_file.flush()
try:
ipautil.run(
[paths.OPENSSL, 'verify', '-CAfile', ca_file.name,
kdc_file.name],
capture_output=True)
except ipautil.CalledProcessError as e:
raise ValueError(e.output)
try:
eku = kdc_cert.extensions.get_extension_for_class(
cryptography.x509.ExtendedKeyUsage)
list(eku.value).index(
cryptography.x509.ObjectIdentifier(x509.EKU_PKINIT_KDC))
except (cryptography.x509.ExtensionNotFound,
ValueError):
raise ValueError("invalid for a KDC")
principal = str(Principal(['krbtgt', realm], realm))
gns = x509.process_othernames(kdc_cert.san_general_names)
for gn in gns:
if isinstance(gn, x509.KRB5PrincipalName) and gn.name == principal:
break
else:
raise ValueError("invalid for realm %s" % realm)
class NSSDatabase(object):
"""A general-purpose wrapper around a NSS cert database
For permanent NSS databases, pass the cert DB directory to __init__
For temporary databases, do not pass nssdir, and call close() when done
to remove the DB. Alternatively, a NSSDatabase can be used as a
context manager that calls close() automatically.
"""
# Traditionally, we used CertDB for our NSS DB operations, but that class
# got too tied to IPA server details, killing reusability.
# BaseCertDB is a class that knows nothing about IPA.
# Generic NSS DB code should be moved here.
def __init__(self, nssdir=None):
if nssdir is None:
self.secdir = tempfile.mkdtemp()
self._is_temporary = True
else:
self.secdir = nssdir
self._is_temporary = False
self.pwd_file = os.path.join(self.secdir, 'pwdfile.txt')
def close(self):
if self._is_temporary:
shutil.rmtree(self.secdir)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
def run_certutil(self, args, stdin=None, **kwargs):
new_args = [paths.CERTUTIL, "-d", self.secdir]
new_args = new_args + args
new_args.extend(['-f', self.pwd_file])
return ipautil.run(new_args, stdin, **kwargs)
def create_db(self, user=None, group=None, mode=None, backup=False):
"""Create cert DB
:param user: User owner the secdir
:param group: Group owner of the secdir
:param mode: Mode of the secdir
:param backup: Backup the sedir files
"""
dirmode = 0o750
filemode = 0o640
pwdfilemode = 0o640
if mode is not None:
dirmode = mode
filemode = mode & 0o666
pwdfilemode = mode & 0o660
uid = -1
gid = -1
if user is not None:
uid = pwd.getpwnam(user).pw_uid
if group is not None:
gid = grp.getgrnam(group).gr_gid
if backup:
for filename in NSS_FILES:
path = os.path.join(self.secdir, filename)
ipautil.backup_file(path)
if not os.path.exists(self.secdir):
os.makedirs(self.secdir, dirmode)
if not os.path.exists(self.pwd_file):
# Create the password file for this db
with io.open(os.open(self.pwd_file,
os.O_CREAT | os.O_WRONLY,
pwdfilemode), 'w', closefd=True) as f:
f.write(ipautil.ipa_generate_password())
f.flush()
self.run_certutil(["-N", "-f", self.pwd_file])
# Finally fix up perms
os.chown(self.secdir, uid, gid)
os.chmod(self.secdir, dirmode)
for filename in NSS_FILES:
path = os.path.join(self.secdir, filename)
if os.path.exists(path):
os.chown(path, uid, gid)
if path == self.pwd_file:
new_mode = pwdfilemode
else:
new_mode = filemode
os.chmod(path, new_mode)
def restore(self):
for filename in NSS_FILES:
path = os.path.join(self.secdir, filename)
backup_path = path + '.orig'
save_path = path + '.ipasave'
try:
if os.path.exists(path):
os.rename(path, save_path)
if os.path.exists(backup_path):
os.rename(backup_path, path)
except OSError as e:
logger.debug('%s', e)
def list_certs(self):
"""Return nicknames and cert flags for all certs in the database
:return: List of (name, trust_flags) tuples
"""
result = self.run_certutil(["-L"], capture_output=True)
certs = result.output.splitlines()
# FIXME, this relies on NSS never changing the formatting of certutil
certlist = []
for cert in certs:
match = re.match(r'^(.+?)\s+(\w*,\w*,\w*)\s*$', cert)
if match:
nickname = match.group(1)
trust_flags = parse_trust_flags(match.group(2))
certlist.append((nickname, trust_flags))
return tuple(certlist)
def find_server_certs(self):
"""Return nicknames and cert flags for server certs in the database
Server certs have an "u" character in the trust flags.
:return: List of (name, trust_flags) tuples
"""
server_certs = []
for name, flags in self.list_certs():
if flags.has_key:
server_certs.append((name, flags))
return server_certs
def get_trust_chain(self, nickname):
"""Return names of certs in a given cert's trust chain
:param nickname: Name of the cert
:return: List of certificate names
"""
root_nicknames = []
result = self.run_certutil(["-O", "-n", nickname], capture_output=True)
chain = result.output.splitlines()
for c in chain:
m = re.match('\s*"(.*)" \[.*', c)
if m:
root_nicknames.append(m.groups()[0])
return root_nicknames
def export_pkcs12(self, nickname, pkcs12_filename, pkcs12_passwd=None):
args = [paths.PK12UTIL, "-d", self.secdir,
"-o", pkcs12_filename,
"-n", nickname,
"-k", self.pwd_file]
pkcs12_password_file = None
if pkcs12_passwd is not None:
pkcs12_password_file = ipautil.write_tmp_file(pkcs12_passwd + '\n')
args = args + ["-w", pkcs12_password_file.name]
try:
ipautil.run(args)
except ipautil.CalledProcessError as e:
if e.returncode == 17:
raise RuntimeError("incorrect password for pkcs#12 file %s" %
pkcs12_filename)
elif e.returncode == 10:
raise RuntimeError("Failed to open %s" % pkcs12_filename)
else:
raise RuntimeError("unknown error exporting pkcs#12 file %s" %
pkcs12_filename)
finally:
if pkcs12_password_file is not None:
pkcs12_password_file.close()
def import_pkcs12(self, pkcs12_filename, pkcs12_passwd=None):
args = [paths.PK12UTIL, "-d", self.secdir,
"-i", pkcs12_filename,
"-k", self.pwd_file, '-v']
pkcs12_password_file = None
if pkcs12_passwd is not None:
pkcs12_password_file = ipautil.write_tmp_file(pkcs12_passwd + '\n')
args = args + ["-w", pkcs12_password_file.name]
try:
ipautil.run(args)
except ipautil.CalledProcessError as e:
if e.returncode == 17:
raise RuntimeError("incorrect password for pkcs#12 file %s" %
pkcs12_filename)
elif e.returncode == 10:
raise RuntimeError("Failed to open %s" % pkcs12_filename)
else:
raise RuntimeError("unknown error import pkcs#12 file %s" %
pkcs12_filename)
finally:
if pkcs12_password_file is not None:
pkcs12_password_file.close()
def import_files(self, files, import_keys=False, key_password=None,
key_nickname=None):
"""
Import certificates and a single private key from multiple files
The files may be in PEM and DER certificate, PKCS#7 certificate chain,
PKCS#8 and raw private key and PKCS#12 formats.
:param files: Names of files to import
:param import_keys: Whether to import private keys
:param key_password: Password to decrypt private keys
:param key_nickname: Nickname of the private key to import from PKCS#12
files
"""
key_file = None
extracted_key = None
extracted_certs = []
for filename in files:
try:
with open(filename, 'rb') as f:
data = f.read()
except IOError as e:
raise RuntimeError(
"Failed to open %s: %s" % (filename, e.strerror))
# Try to parse the file as PEM file
matches = list(
re.finditer(
br'-----BEGIN (.+?)-----(.*?)-----END \1-----',
data, re.DOTALL
)
)
if matches:
loaded = False
for match in matches:
body = match.group()
label = match.group(1)
line = len(data[:match.start() + 1].splitlines())
if label in (b'CERTIFICATE', b'X509 CERTIFICATE',
b'X.509 CERTIFICATE'):
try:
cert = x509.load_pem_x509_certificate(body)
except ValueError as e:
if label != b'CERTIFICATE':
logger.warning(
"Skipping certificate in %s at line %s: "
"%s",
filename, line, e)
continue
else:
extracted_certs.append(cert)
loaded = True
continue
if label in (b'PKCS7', b'PKCS #7 SIGNED DATA',
b'CERTIFICATE'):
try:
certs = x509.pkcs7_to_certs(body)
except ipautil.CalledProcessError as e:
if label == b'CERTIFICATE':
logger.warning(
"Skipping certificate in %s at line %s: "
"%s",
filename, line, e)
else:
logger.warning(
"Skipping PKCS#7 in %s at line %s: %s",
filename, line, e)
continue
else:
extracted_certs.extend(certs)
loaded = True
continue
if label in (b'PRIVATE KEY', b'ENCRYPTED PRIVATE KEY',
b'RSA PRIVATE KEY', b'DSA PRIVATE KEY',
b'EC PRIVATE KEY'):
if not import_keys:
continue
if key_file:
raise RuntimeError(
"Can't load private key from both %s and %s" %
(key_file, filename))
# the args -v2 aes256 -v2prf hmacWithSHA256 are needed
# on OpenSSL 1.0.2 (fips mode). As soon as FreeIPA
# requires OpenSSL 1.1.0 we'll be able to drop them
args = [
paths.OPENSSL, 'pkcs8',
'-topk8',
'-v2', 'aes256', '-v2prf', 'hmacWithSHA256',
'-passout', 'file:' + self.pwd_file,
]
if ((label != b'PRIVATE KEY' and key_password) or
label == b'ENCRYPTED PRIVATE KEY'):
key_pwdfile = ipautil.write_tmp_file(key_password)
args += [
'-passin', 'file:' + key_pwdfile.name,
]
try:
result = ipautil.run(
args, stdin=body, capture_output=True)
except ipautil.CalledProcessError as e:
logger.warning(
"Skipping private key in %s at line %s: %s",
filename, line, e)
continue
else:
extracted_key = result.raw_output
key_file = filename
loaded = True
continue
if loaded:
continue
raise RuntimeError("Failed to load %s" % filename)
# Try to load the file as DER certificate
try:
cert = x509.load_der_x509_certificate(data)
except ValueError:
pass
else:
extracted_certs.append(cert)
continue
# Try to import the file as PKCS#12 file
if import_keys:
try:
self.import_pkcs12(filename, key_password)
except RuntimeError:
pass
else:
if key_file:
raise RuntimeError(
"Can't load private key from both %s and %s" %
(key_file, filename))
key_file = filename
server_certs = self.find_server_certs()
if key_nickname:
for nickname, _trust_flags in server_certs:
if nickname == key_nickname:
break
else:
raise RuntimeError(
"Server certificate \"%s\" not found in %s" %
(key_nickname, filename))
else:
if len(server_certs) > 1:
raise RuntimeError(
"%s server certificates found in %s, "
"expecting only one" %
(len(server_certs), filename))
continue
raise RuntimeError("Failed to load %s" % filename)
if import_keys and not key_file:
raise RuntimeError(
"No server certificates found in %s" % (', '.join(files)))
for cert in extracted_certs:
nickname = str(DN(cert.subject))
self.add_cert(cert, nickname, EMPTY_TRUST_FLAGS)
if extracted_key:
with tempfile.NamedTemporaryFile() as in_file, \
tempfile.NamedTemporaryFile() as out_file:
for cert in extracted_certs:
in_file.write(cert.public_bytes(x509.Encoding.PEM))
in_file.write(extracted_key)
in_file.flush()
out_password = ipautil.ipa_generate_password()
out_pwdfile = ipautil.write_tmp_file(out_password)
args = [
paths.OPENSSL, 'pkcs12',
'-export',
'-in', in_file.name,
'-out', out_file.name,
'-passin', 'file:' + self.pwd_file,
'-passout', 'file:' + out_pwdfile.name,
]
try:
ipautil.run(args)
except ipautil.CalledProcessError as e:
raise RuntimeError(
"No matching certificate found for private key from "
"%s" % key_file)
self.import_pkcs12(out_file.name, out_password)
def trust_root_cert(self, root_nickname, trust_flags):
if root_nickname[:7] == "Builtin":
logger.debug(
"No need to add trust for built-in root CAs, skipping %s",
root_nickname)
else:
trust_flags = unparse_trust_flags(trust_flags)
try:
self.run_certutil(["-M", "-n", root_nickname,
"-t", trust_flags])
except ipautil.CalledProcessError:
raise RuntimeError(
"Setting trust on %s failed" % root_nickname)
def get_cert(self, nickname):
"""
:param nickname: nickname of the certificate in the NSS database
:returns: string in Python2
bytes in Python3
"""
args = ['-L', '-n', nickname, '-a']
try:
result = self.run_certutil(args, capture_output=True)
except ipautil.CalledProcessError:
raise RuntimeError("Failed to get %s" % nickname)
cert, _start = find_cert_from_txt(result.output, start=0)
return cert
def has_nickname(self, nickname):
try:
self.get_cert(nickname)
except RuntimeError:
# This might be error other than "nickname not found". Beware.
return False
else:
return True
def export_pem_cert(self, nickname, location):
"""Export the given cert to PEM file in the given location"""
cert = self.get_cert(nickname)
with open(location, "wb") as fd:
fd.write(cert.public_bytes(x509.Encoding.PEM))
os.chmod(location, 0o444)
def import_pem_cert(self, nickname, flags, location):
"""Import a cert form the given PEM file.
The file must contain exactly one certificate.
"""
try:
with open(location) as fd:
certs = fd.read()
except IOError as e:
raise RuntimeError(
"Failed to open %s: %s" % (location, e.strerror)
)
cert, st = find_cert_from_txt(certs)
self.add_cert(cert, nickname, flags)
try:
find_cert_from_txt(certs, st)
except RuntimeError:
pass
else:
raise ValueError('%s contains more than one certificate' %
location)
def add_cert(self, cert, nick, flags):
flags = unparse_trust_flags(flags)
args = ["-A", "-n", nick, "-t", flags, '-a']
self.run_certutil(args, stdin=cert.public_bytes(x509.Encoding.PEM))
def delete_cert(self, nick):
self.run_certutil(["-D", "-n", nick])
def verify_server_cert_validity(self, nickname, hostname):
"""Verify a certificate is valid for a SSL server with given hostname
Raises a ValueError if the certificate is invalid.
"""
cert = self.get_cert(nickname)
try:
self.run_certutil(['-V', '-n', nickname, '-u', 'V'],
capture_output=True)
except ipautil.CalledProcessError as e:
# certutil output in case of error is
# 'certutil: certificate is invalid: <ERROR_STRING>\n'
raise ValueError(e.output)
try:
cert.match_hostname(hostname)
except ValueError:
raise ValueError('invalid for server %s' % hostname)
def verify_ca_cert_validity(self, nickname):
cert = self.get_cert(nickname)
if not cert.subject:
raise ValueError("has empty subject")
try:
bc = cert.extensions.get_extension_for_class(
cryptography.x509.BasicConstraints)
except cryptography.x509.ExtensionNotFound:
raise ValueError("missing basic constraints")
if not bc.value.ca:
raise ValueError("not a CA certificate")
try:
cert.extensions.get_extension_for_class(
cryptography.x509.SubjectKeyIdentifier)
except cryptography.x509.ExtensionNotFound:
raise ValueError("missing subject key identifier extension")
try:
self.run_certutil(['-V', '-n', nickname, '-u', 'L'],
capture_output=True)
except ipautil.CalledProcessError as e:
# certutil output in case of error is
# 'certutil: certificate is invalid: <ERROR_STRING>\n'
raise ValueError(e.output)
def verify_kdc_cert_validity(self, nickname, realm):
nicknames = self.get_trust_chain(nickname)
certs = [self.get_cert(nickname) for nickname in nicknames]
verify_kdc_cert_validity(certs[-1], certs[:-1], realm)

472
ipapython/certmonger.py Normal file
View File

@@ -0,0 +1,472 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
# David Kupka <dkupka@redhat.com>
#
# Copyright (C) 2010 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Some certmonger functions, mostly around updating the request file.
# This is used so we can add tracking to the Apache and 389-ds
# server certificates created during the IPA server installation.
import os
import sys
import time
import dbus
import shlex
from ipapython import ipautil
from ipapython import dogtag
from ipaplatform.paths import paths
from ipaplatform import services
from ipapython.ipa_log_manager import root_logger
REQUEST_DIR = paths.CERTMONGER_REQUESTS_DIR
CA_DIR = paths.CERTMONGER_CAS_DIR
DBUS_CM_PATH = '/org/fedorahosted/certmonger'
DBUS_CM_IF = 'org.fedorahosted.certmonger'
DBUS_CM_REQUEST_IF = 'org.fedorahosted.certmonger.request'
DBUS_CM_CA_IF = 'org.fedorahosted.certmonger.ca'
DBUS_PROPERTY_IF = 'org.freedesktop.DBus.Properties'
class _cm_dbus_object(object):
"""
Auxiliary class for convenient DBus object handling.
"""
def __init__(self, bus, object_path, object_dbus_interface,
parent_dbus_interface=None, property_interface=False):
"""
bus - DBus bus object, result of dbus.SystemBus() or dbus.SessionBus()
Object is accesible over this DBus bus instance.
object_path - path to requested object on DBus bus
object_dbus_interface
parent_dbus_interface
property_interface - create DBus property interface? True or False
"""
if bus is None or object_path is None or object_dbus_interface is None:
raise RuntimeError(
"bus, object_path and dbus_interface must not be None.")
if parent_dbus_interface is None:
parent_dbus_interface = object_dbus_interface
self.bus = bus
self.path = object_path
self.obj_dbus_if = object_dbus_interface
self.parent_dbus_if = parent_dbus_interface
self.obj = bus.get_object(parent_dbus_interface, object_path)
self.obj_if = dbus.Interface(self.obj, object_dbus_interface)
if property_interface:
self.prop_if = dbus.Interface(self.obj, DBUS_PROPERTY_IF)
def _start_certmonger():
"""
Start certmonger daemon. If it's already running systemctl just ignores
the command.
"""
if not services.knownservices.certmonger.is_running():
try:
services.knownservices.certmonger.start()
except Exception, e:
root_logger.error('Failed to start certmonger: %s' % e)
raise
def _connect_to_certmonger():
"""
Start certmonger daemon and connect to it via DBus.
"""
try:
_start_certmonger()
except (KeyboardInterrupt, OSError), e:
root_logger.error('Failed to start certmonger: %s' % e)
raise
try:
bus = dbus.SystemBus()
cm = _cm_dbus_object(bus, DBUS_CM_PATH, DBUS_CM_IF)
except dbus.DBusException, e:
root_logger.error("Failed to access certmonger over DBus: %s", e)
raise
return cm
def _get_requests(criteria=dict()):
"""
Get all requests that matches the provided criteria.
"""
if not isinstance(criteria, dict):
raise TypeError('"criteria" must be dict.')
cm = _connect_to_certmonger()
requests = []
requests_paths = []
if 'nickname' in criteria:
request_path = cm.obj_if.find_request_by_nickname(criteria['nickname'])
if request_path:
requests_paths = [request_path]
else:
requests_paths = cm.obj_if.get_requests()
for request_path in requests_paths:
request = _cm_dbus_object(cm.bus, request_path, DBUS_CM_REQUEST_IF,
DBUS_CM_IF, True)
for criterion in criteria:
if criterion == 'ca-name':
ca_path = request.obj_if.get_ca()
ca = _cm_dbus_object(cm.bus, ca_path, DBUS_CM_CA_IF,
DBUS_CM_IF)
value = ca.obj_if.get_nickname()
else:
value = request.prop_if.Get(DBUS_CM_REQUEST_IF, criterion)
if value != criteria[criterion]:
break
else:
requests.append(request)
return requests
def _get_request(criteria):
"""
Find request that matches criteria.
If 'nickname' is specified other criteria are ignored because 'nickname'
uniquely identify single request.
When multiple or none request matches specified criteria RuntimeError is
raised.
"""
requests = _get_requests(criteria)
if len(requests) == 0:
return None
elif len(requests) == 1:
return requests[0]
else:
raise RuntimeError("Criteria expected to be met by 1 request, got %s."
% len(requests))
def get_request_value(request_id, directive):
"""
Get property of request.
"""
try:
request = _get_request(dict(nickname=request_id))
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
if directive == 'ca-name':
ca_path = request.obj_if.get_ca()
ca = _cm_dbus_object(request.bus, ca_path, DBUS_CM_CA_IF,
DBUS_CM_IF)
return ca.obj_if.get_nickname()
else:
return request.prop_if.Get(DBUS_CM_REQUEST_IF, directive)
else:
return None
def get_request_id(criteria):
"""
If you don't know the certmonger request_id then try to find it by looking
through all the requests.
criteria is a tuple of key/value to search for. The more specific
the better. An error is raised if multiple request_ids are returned for
the same criteria.
None is returned if none of the criteria match.
"""
try:
request = _get_request(criteria)
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
return request.prop_if.Get(DBUS_CM_REQUEST_IF, 'nickname')
else:
return None
def get_requests_for_dir(dir):
"""
Return a list containing the request ids for a given NSS database
directory.
"""
reqid = []
criteria = {'cert-storage': 'NSSDB', 'key-storage': 'NSSDB',
'cert-database': dir, 'key-database': dir, }
requests = _get_requests(criteria)
for request in requests:
reqid.append(request.prop_if.Get(DBUS_CM_REQUEST_IF, 'nickname'))
return reqid
def add_request_value(request_id, directive, value):
"""
Add a new directive to a certmonger request file.
"""
try:
request = _get_request({'nickname': request_id})
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
request.obj_if.modify({directive: value})
def add_principal(request_id, principal):
"""
In order for a certmonger request to be renewable it needs a principal.
When an existing certificate is added via start-tracking it won't have
a principal.
"""
add_request_value(request_id, 'template-principal', [principal])
def add_subject(request_id, subject):
"""
In order for a certmonger request to be renwable it needs the subject
set in the request file.
When an existing certificate is added via start-tracking it won't have
a subject_template set.
"""
add_request_value(request_id, 'template-subject', subject)
def request_cert(nssdb, nickname, subject, principal, passwd_fname=None):
"""
Execute certmonger to request a server certificate.
"""
cm = _connect_to_certmonger()
request_parameters = dict(KEY_STORAGE='NSSDB', CERT_STORAGE='NSSDB',
CERT_LOCATION=nssdb, CERT_NICKNAME=nickname,
SUBJECT=subject, PRINCIPAL=principal,)
if passwd_fname:
request_parameters['KEY_PIN_FILE'] = passwd_fname
result = cm.obj_if.add_request(request_parameters)
try:
if result[0]:
request = _cm_dbus_object(cm.bus, result[1], DBUS_CM_REQUEST_IF,
DBUS_CM_IF, True)
except TypeError:
root_logger.error('Failed to get create new request.')
raise
return request.obj_if.get_nickname()
def start_tracking(nickname, secdir, password_file=None, command=None):
"""
Tell certmonger to track the given certificate nickname in NSS
database in secdir protected by optional password file password_file.
command is an optional parameter which specifies a command for
certmonger to run when it renews a certificate. This command must
reside in /usr/lib/ipa/certmonger to work with SELinux.
Returns certificate nickname.
"""
cm = _connect_to_certmonger()
params = {'TRACK': True}
params['cert-nickname'] = nickname
params['cert-database'] = os.path.abspath(secdir)
params['cert-storage'] = 'NSSDB'
params['key-nickname'] = nickname
params['key-database'] = os.path.abspath(secdir)
params['key-storage'] = 'NSSDB'
ca_path = cm.obj_if.find_ca_by_nickname('IPA')
if not ca_path:
raise RuntimeError('IPA CA not found')
params['ca'] = ca_path
if command:
params['cert-postsave-command'] = command
if password_file:
params['KEY_PIN_FILE'] = os.path.abspath(password_file)
result = cm.obj_if.add_request(params)
try:
if result[0]:
request = _cm_dbus_object(cm.bus, result[1], DBUS_CM_REQUEST_IF,
DBUS_CM_IF, True)
except TypeError, e:
root_logger.error('Failed to add new request.')
raise
return request.prop_if.Get(DBUS_CM_REQUEST_IF, 'nickname')
def stop_tracking(secdir, request_id=None, nickname=None):
"""
Stop tracking the current request using either the request_id or nickname.
Returns True or False
"""
if request_id is None and nickname is None:
raise RuntimeError('Both request_id and nickname are missing.')
criteria = {'cert-database': secdir}
if request_id:
criteria['nickname'] = request_id
if nickname:
criteria['cert-nickname'] = nickname
try:
request = _get_request(criteria)
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
cm = _connect_to_certmonger()
cm.obj_if.remove_request(request.path)
def _find_IPA_ca():
"""
Look through all the certmonger CA files to find the one that
has id=IPA
We can use find_request_value because the ca files have the
same file format.
"""
cm = _connect_to_certmonger()
ca_path = cm.obj_if.find_ca_by_nickname('IPA')
return _cm_dbus_object(cm.bus, ca_path, DBUS_CM_CA_IF, DBUS_CM_IF, True)
def add_principal_to_cas(principal):
"""
If the hostname we were passed to use in ipa-client-install doesn't
match the value of gethostname() then we need to append
-k host/HOSTNAME@REALM to the ca helper defined for
/usr/libexec/certmonger/ipa-submit.
We also need to restore this on uninstall.
"""
ca = _find_IPA_ca()
if ca:
ext_helper = ca.prop_if.Get(DBUS_CM_CA_IF, 'external-helper')
if ext_helper and '-k' not in shlex.split(ext_helper):
ext_helper = '%s -k %s' % (ext_helper.strip(), principal)
ca.prop_if.Set(DBUS_CM_CA_IF, 'external-helper', ext_helper)
def remove_principal_from_cas():
"""
Remove any -k principal options from the ipa_submit helper.
"""
ca = _find_IPA_ca()
if ca:
ext_helper = ca.prop_if.Get(DBUS_CM_CA_IF, 'external-helper')
if ext_helper and '-k' in shlex.split(ext_helper):
ext_helper = shlex.split(ext_helper)[0]
ca.prop_if.Set(DBUS_CM_CA_IF, 'external-helper', ext_helper)
def get_pin(token, dogtag_constants=None):
"""
Dogtag stores its NSS pin in a file formatted as token:PIN.
The caller is expected to handle any exceptions raised.
"""
if dogtag_constants is None:
dogtag_constants = dogtag.configured_constants()
with open(dogtag_constants.PASSWORD_CONF_PATH, 'r') as f:
for line in f:
(tok, pin) = line.split('=', 1)
if token == tok:
return pin.strip()
return None
def dogtag_start_tracking(ca, nickname, pin, pinfile, secdir, pre_command,
post_command, profile=None):
"""
Tell certmonger to start tracking a dogtag CA certificate. These
are handled differently because their renewal must be done directly
and not through IPA.
This uses the generic certmonger command getcert so we can specify
a different helper.
pre_command is the script to execute before a renewal is done.
post_command is the script to execute after a renewal is done.
Both commands can be None.
"""
cm = _connect_to_certmonger()
certmonger_cmd_template = paths.CERTMONGER_COMMAND_TEMPLATE
params = {'TRACK': True}
params['cert-nickname'] = nickname
params['cert-database'] = os.path.abspath(secdir)
params['cert-storage'] = 'NSSDB'
params['key-nickname'] = nickname
params['key-database'] = os.path.abspath(secdir)
params['key-storage'] = 'NSSDB'
ca_path = cm.obj_if.find_ca_by_nickname(ca)
if ca_path:
params['ca'] = ca_path
if pin:
params['KEY_PIN'] = pin
if pinfile:
params['KEY_PIN_FILE'] = os.path.abspath(pinfile)
if pre_command:
if not os.path.isabs(pre_command):
if sys.maxsize > 2**32L:
libpath = 'lib64'
else:
libpath = 'lib'
pre_command = certmonger_cmd_template % (libpath, pre_command)
params['cert-presave-command'] = pre_command
if post_command:
if not os.path.isabs(post_command):
if sys.maxsize > 2**32L:
libpath = 'lib64'
else:
libpath = 'lib'
post_command = certmonger_cmd_template % (libpath, post_command)
params['cert-postsave-command'] = post_command
if profile:
params['ca-profile'] = profile
cm.obj_if.add_request(params)
def check_state(dirs):
"""
Given a set of directories and nicknames verify that we are no longer
tracking certificates.
dirs is a list of directories to test for. We will return a tuple
of nicknames for any tracked certificates found.
This can only check for NSS-based certificates.
"""
reqids = []
for dir in dirs:
reqids.extend(get_requests_for_dir(dir))
return reqids
if __name__ == '__main__':
request_id = request_cert(paths.HTTPD_ALIAS_DIR, "Test",
"cn=tiger.example.com,O=IPA",
"HTTP/tiger.example.com@EXAMPLE.COM")
csr = get_request_value(request_id, 'csr')
print csr
stop_tracking(request_id)

View File

@@ -16,28 +16,19 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
# pylint: disable=deprecated-module
from optparse import (
Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError)
# pylint: enable=deprecated-module
import ConfigParser
from optparse import Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError
from copy import copy
import socket
import functools
from dns import resolver, rdatatype
from dns.exception import DNSException
import dns.name
# pylint: disable=import-error
from six.moves.configparser import SafeConfigParser
from six.moves.urllib.parse import urlsplit
# pylint: enable=import-error
from ipaplatform.paths import paths
from ipapython.dn import DN
from ipapython.ipautil import CheckedIPAddress, CheckedIPAddressLoopback
from ipaplatform.paths import paths
import dns.name
import socket
import re
import urlparse
class IPAConfigError(Exception):
def __init__(self, msg=''):
@@ -60,47 +51,32 @@ class IPAFormatter(IndentedHelpFormatter):
ret += "%s %s\n" % (spacing, line)
return ret
def check_ip_option(option, opt, value):
from ipapython.ipautil import CheckedIPAddress
def check_ip_option(option, opt, value, allow_loopback=False):
ip_local = option.ip_local is True
ip_netmask = option.ip_netmask is True
try:
if allow_loopback:
return CheckedIPAddressLoopback(value)
else:
return CheckedIPAddress(value)
return CheckedIPAddress(value, parse_netmask=ip_netmask, match_local=ip_local)
except Exception as e:
raise OptionValueError("option {}: invalid IP address {}: {}"
.format(opt, value, e))
raise OptionValueError("option %s: invalid IP address %s: %s" % (opt, value, e))
def check_dn_option(option, opt, value):
try:
return DN(value)
except Exception as e:
except Exception, e:
raise OptionValueError("option %s: invalid DN: %s" % (opt, e))
def check_constructor(option, opt, value):
con = option.constructor
assert con is not None, "Oops! Developer forgot to set 'constructor' kwarg"
try:
return con(value)
except Exception as e:
raise OptionValueError("option {} invalid: {}".format(opt, e))
class IPAOption(Option):
"""
optparse.Option subclass with support of options labeled as
security-sensitive such as passwords.
"""
ATTRS = Option.ATTRS + ["sensitive", "constructor"]
TYPES = Option.TYPES + ("ip", "dn", "constructor", "ip_with_loopback")
ATTRS = Option.ATTRS + ["sensitive", "ip_local", "ip_netmask"]
TYPES = Option.TYPES + ("ip", "dn")
TYPE_CHECKER = copy(Option.TYPE_CHECKER)
TYPE_CHECKER["ip"] = check_ip_option
TYPE_CHECKER["ip_with_loopback"] = functools.partial(check_ip_option,
allow_loopback=True)
TYPE_CHECKER["dn"] = check_dn_option
TYPE_CHECKER["constructor"] = check_constructor
class IPAOptionParser(OptionParser):
"""
@@ -126,13 +102,10 @@ class IPAOptionParser(OptionParser):
Returns all options except those with sensitive=True in the same
fashion as parse_args would
"""
all_opts_dict = {
o.dest: o for o in self._get_all_options()
if hasattr(o, 'sensitive')
}
all_opts_dict = dict([ (o.dest, o) for o in self._get_all_options() if hasattr(o, 'sensitive') ])
safe_opts_dict = {}
for option, value in opts.__dict__.items():
for option, value in opts.__dict__.iteritems():
if all_opts_dict[option].sensitive != True:
safe_opts_dict[option] = value
@@ -151,8 +124,7 @@ def verify_args(parser, args, needed_args = None):
elif len_have < len_need:
parser.error("no %s specified" % needed_list[len_have])
class IPAConfig(object):
class IPAConfig:
def __init__(self):
self.default_realm = None
self.default_server = []
@@ -180,30 +152,41 @@ class IPAConfig(object):
config = IPAConfig()
def __parse_config(discover_server = True):
p = SafeConfigParser()
p = ConfigParser.SafeConfigParser()
p.read(paths.IPA_DEFAULT_CONF)
try:
if not config.default_realm:
config.default_realm = p.get("global", "realm")
except Exception:
except:
pass
if discover_server:
try:
s = p.get("global", "xmlrpc_uri")
server = urlsplit(s)
server = urlparse.urlsplit(s)
config.default_server.append(server.netloc)
except Exception:
except:
pass
try:
if not config.default_domain:
config.default_domain = p.get("global", "domain")
except Exception:
except:
pass
def __discover_config(discover_server = True):
servers = []
try:
if not config.default_realm:
try:
# only import krbV when we need it
import krbV
krbctx = krbV.default_context()
config.default_realm = krbctx.default_realm
except ImportError:
pass
if not config.default_realm:
return False
if not config.default_domain:
# try once with REALM -> domain
domain = str(config.default_realm).lower()
@@ -244,13 +227,12 @@ def __discover_config(discover_server = True):
hostname = str(server.target).rstrip(".")
config.default_server.append(hostname)
except Exception:
except:
pass
def add_standard_options(parser):
parser.add_option("--realm", dest="realm", help="Override default IPA realm")
parser.add_option("--server", dest="server",
help="Override default FQDN of IPA server")
parser.add_option("--server", dest="server", help="Override default IPA server")
parser.add_option("--domain", dest="domain", help="Override default IPA DNS domain")
def init_config(options=None):

BIN
ipapython/config.pyc Normal file

Binary file not shown.

View File

@@ -18,14 +18,12 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import time
import datetime
import email.utils
from urllib2 import urlparse
from calendar import timegm
import six
# pylint: disable=import-error
from six.moves.urllib.parse import urlparse
# pylint: enable=import-error
from ipapython.ipa_log_manager import log_mgr
'''
Core Python has two cookie libraries, Cookie.py targeted to server
@@ -48,7 +46,7 @@ the relevant RFC's as well as actual practice in the field. However
cookielib.py is tighly integrated with urllib2 and it's not possible
to use most of the features of cookielib without simultaneously using
urllib2. Unfortunataely we only use httplib because of our dependency
on xmlrpc.client. Without urllib2 cookielib is a non-starter.
on xmlrpclib. Without urllib2 cookielib is a non-starter.
This module is a minimal implementation of Netscape cookies which
works equally well on either the client or server side. It's API is
@@ -110,7 +108,7 @@ class Cookie(object):
cookie = Cookie('session', session_id,
domain=my_domain, path=mypath,
httponly=True, secure=True, expires=expiration)
httpOnly=True, secure=True, expires=expiration)
headers.append(('Set-Cookie', str(cookie)))
@@ -187,7 +185,7 @@ class Cookie(object):
try:
dt = datetime.datetime(*email.utils.parsedate(s)[0:6])
except Exception as e:
except Exception, e:
raise ValueError("unable to parse expires datetime '%s': %s" % (s, e))
return dt
@@ -272,9 +270,8 @@ class Cookie(object):
if match:
key = match.group(1)
value = match.group(2)
# Double quoted value?
if value and value[0] == '"':
if value[0] == '"':
if value[-1] == '"':
value = value[1:-1]
else:
@@ -320,8 +317,7 @@ class Cookie(object):
return cookies
@classmethod
def get_named_cookie_from_string(cls, cookie_string, cookie_name,
request_url=None, timestamp=None):
def get_named_cookie_from_string(cls, cookie_string, cookie_name, request_url=None):
'''
A cookie string may contain multiple cookies, parse the cookie
string and return the last cookie in the string matching the
@@ -343,8 +339,6 @@ class Cookie(object):
if cookie.key == cookie_name:
target_cookie = cookie
if timestamp is not None:
target_cookie.timestamp = timestamp
if request_url is not None:
target_cookie.normalize(request_url)
return target_cookie
@@ -352,6 +346,9 @@ class Cookie(object):
def __init__(self, key, value, domain=None, path=None, max_age=None, expires=None,
secure=None, httponly=None, timestamp=None):
log_mgr.get_logger(self, True)
self.key = key
self.value = value
self.domain = domain
@@ -391,9 +388,9 @@ class Cookie(object):
self._timestamp = None
elif isinstance(value, datetime.datetime):
self._timestamp = value
elif isinstance(value, (six.integer_types, float)):
elif isinstance(value, (int, long, float)):
self._timestamp = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, six.string_types):
elif isinstance(value, basestring):
self._timestamp = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \
@@ -417,9 +414,9 @@ class Cookie(object):
self._expires = None
elif isinstance(value, datetime.datetime):
self._expires = value
elif isinstance(value, (six.integer_types, float)):
elif isinstance(value, (int, long, float)):
self._expires = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, six.string_types):
elif isinstance(value, basestring):
self._expires = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \
@@ -540,7 +537,7 @@ class Cookie(object):
received from.
'''
_scheme, domain, path, _params, _query, _fragment = urlparse(url)
scheme, domain, path, params, query, fragment = urlparse.urlparse(url)
if self.domain is None:
self.domain = domain.lower()
@@ -594,12 +591,10 @@ class Cookie(object):
# FIXME: At the moment we can't import from ipalib at the
# module level because of a dependency loop (cycle) in the
# import. Our module layout needs to be refactored.
# pylint: disable=ipa-forbidden-import
from ipalib.util import validate_domain_name
# pylint: enable=ipa-forbidden-import
try:
validate_domain_name(url_domain)
except Exception:
except Exception, e:
return False
if cookie_domain is None:
@@ -644,10 +639,7 @@ class Cookie(object):
cookie_name = self.key
(
url_scheme, url_domain, url_path,
_url_params, _url_query, _url_fragment
) = urlparse(url)
url_scheme, url_domain, url_path, url_params, url_query, url_fragment = urlparse.urlparse(url)
cookie_expiration = self.get_expiration()
if cookie_expiration is not None:

BIN
ipapython/cookie.pyc Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

BIN
ipapython/dn.pyc Normal file

Binary file not shown.

View File

@@ -17,52 +17,35 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
import dns.name
import dns.exception
import dns.resolver
import copy
import six
from ipapython.ipautil import UnsafeIPAddress
if six.PY3:
unicode = str
logger = logging.getLogger(__name__)
@six.python_2_unicode_compatible
class DNSName(dns.name.Name):
labels = None # make pylint happy
@classmethod
def from_text(cls, labels, origin=None):
return cls(dns.name.from_text(labels, origin))
def __init__(self, labels, origin=None):
if isinstance(labels, str):
#pylint: disable=E1101
labels = dns.name.from_text(labels, origin).labels
elif isinstance(labels, unicode):
#pylint: disable=E1101
labels = dns.name.from_unicode(labels, origin).labels
elif isinstance(labels, dns.name.Name):
labels = labels.labels
try:
if isinstance(labels, six.string_types):
#pylint: disable=E1101
labels = dns.name.from_text(unicode(labels), origin).labels
elif isinstance(labels, dns.name.Name):
labels = labels.labels
super(DNSName, self).__init__(labels)
except UnicodeError as e:
except UnicodeError, e:
# dnspython bug, an invalid domain name returns the UnicodeError
# instead of a dns.exception
raise dns.exception.SyntaxError(e)
def __bool__(self):
def __nonzero__(self):
#dns.name.from_text('@') is represented like empty tuple
#we need to acting '@' as nonzero value
return True
__nonzero__ = __bool__ # for Python 2
def __copy__(self):
return DNSName(self.labels)
@@ -70,19 +53,14 @@ class DNSName(dns.name.Name):
return DNSName(copy.deepcopy(self.labels, memo))
def __str__(self):
return self.to_text()
def __unicode__(self):
return self.to_unicode()
# method ToASCII named by RFC 3490 and python standard library
if six.PY2:
def ToASCII(self):
# must be unicode string in Py2
return self.to_text().decode('ascii')
else:
def ToASCII(self):
return self.to_text()
def canonicalize(self):
return DNSName(super(DNSName, self).canonicalize())
def ToASCII(self):
#method named by RFC 3490 and python standard library
return str(self).decode('ascii') # must be unicode string
def concatenate(self, other):
return DNSName(super(DNSName, self).concatenate(other))
@@ -121,255 +99,3 @@ DNSName.root = DNSName(dns.name.root) # '.'
DNSName.empty = DNSName(dns.name.empty) # '@'
DNSName.ip4_rev_zone = DNSName(('in-addr', 'arpa', ''))
DNSName.ip6_rev_zone = DNSName(('ip6', 'arpa', ''))
# Empty zones are defined in various RFCs. BIND is by default serving them.
# This constat should contain everything listed in
# IANA registry "Locally-Served DNS Zones"
# URL: http://www.iana.org/assignments/locally-served-dns-zones
# + AS112 zone defined in RFC 7534. It is not in the registry for some
# reason but BIND 9.10 is serving it as automatic empty zones.
EMPTY_ZONES = [DNSName(aez).make_absolute() for aez in [
# RFC 1918
"10.IN-ADDR.ARPA", "16.172.IN-ADDR.ARPA", "17.172.IN-ADDR.ARPA",
"18.172.IN-ADDR.ARPA", "19.172.IN-ADDR.ARPA", "20.172.IN-ADDR.ARPA",
"21.172.IN-ADDR.ARPA", "22.172.IN-ADDR.ARPA", "23.172.IN-ADDR.ARPA",
"24.172.IN-ADDR.ARPA", "25.172.IN-ADDR.ARPA", "26.172.IN-ADDR.ARPA",
"27.172.IN-ADDR.ARPA", "28.172.IN-ADDR.ARPA", "29.172.IN-ADDR.ARPA",
"30.172.IN-ADDR.ARPA", "31.172.IN-ADDR.ARPA", "168.192.IN-ADDR.ARPA",
# RFC 6598
"64.100.IN-ADDR.ARPA", "65.100.IN-ADDR.ARPA", "66.100.IN-ADDR.ARPA",
"67.100.IN-ADDR.ARPA", "68.100.IN-ADDR.ARPA", "69.100.IN-ADDR.ARPA",
"70.100.IN-ADDR.ARPA", "71.100.IN-ADDR.ARPA", "72.100.IN-ADDR.ARPA",
"73.100.IN-ADDR.ARPA", "74.100.IN-ADDR.ARPA", "75.100.IN-ADDR.ARPA",
"76.100.IN-ADDR.ARPA", "77.100.IN-ADDR.ARPA", "78.100.IN-ADDR.ARPA",
"79.100.IN-ADDR.ARPA", "80.100.IN-ADDR.ARPA", "81.100.IN-ADDR.ARPA",
"82.100.IN-ADDR.ARPA", "83.100.IN-ADDR.ARPA", "84.100.IN-ADDR.ARPA",
"85.100.IN-ADDR.ARPA", "86.100.IN-ADDR.ARPA", "87.100.IN-ADDR.ARPA",
"88.100.IN-ADDR.ARPA", "89.100.IN-ADDR.ARPA", "90.100.IN-ADDR.ARPA",
"91.100.IN-ADDR.ARPA", "92.100.IN-ADDR.ARPA", "93.100.IN-ADDR.ARPA",
"94.100.IN-ADDR.ARPA", "95.100.IN-ADDR.ARPA", "96.100.IN-ADDR.ARPA",
"97.100.IN-ADDR.ARPA", "98.100.IN-ADDR.ARPA", "99.100.IN-ADDR.ARPA",
"100.100.IN-ADDR.ARPA", "101.100.IN-ADDR.ARPA",
"102.100.IN-ADDR.ARPA", "103.100.IN-ADDR.ARPA",
"104.100.IN-ADDR.ARPA", "105.100.IN-ADDR.ARPA",
"106.100.IN-ADDR.ARPA", "107.100.IN-ADDR.ARPA",
"108.100.IN-ADDR.ARPA", "109.100.IN-ADDR.ARPA",
"110.100.IN-ADDR.ARPA", "111.100.IN-ADDR.ARPA",
"112.100.IN-ADDR.ARPA", "113.100.IN-ADDR.ARPA",
"114.100.IN-ADDR.ARPA", "115.100.IN-ADDR.ARPA",
"116.100.IN-ADDR.ARPA", "117.100.IN-ADDR.ARPA",
"118.100.IN-ADDR.ARPA", "119.100.IN-ADDR.ARPA",
"120.100.IN-ADDR.ARPA", "121.100.IN-ADDR.ARPA",
"122.100.IN-ADDR.ARPA", "123.100.IN-ADDR.ARPA",
"124.100.IN-ADDR.ARPA", "125.100.IN-ADDR.ARPA",
"126.100.IN-ADDR.ARPA", "127.100.IN-ADDR.ARPA",
# RFC 5735 and RFC 5737
"0.IN-ADDR.ARPA", "127.IN-ADDR.ARPA", "254.169.IN-ADDR.ARPA",
"2.0.192.IN-ADDR.ARPA", "100.51.198.IN-ADDR.ARPA",
"113.0.203.IN-ADDR.ARPA", "255.255.255.255.IN-ADDR.ARPA",
# Local IPv6 Unicast Addresses
"0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA",
"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA",
# LOCALLY ASSIGNED LOCAL ADDRESS SCOPE
"D.F.IP6.ARPA", "8.E.F.IP6.ARPA", "9.E.F.IP6.ARPA", "A.E.F.IP6.ARPA",
"B.E.F.IP6.ARPA",
# Example Prefix, RFC 3849.
"8.B.D.0.1.0.0.2.IP6.ARPA",
# RFC 7534
"EMPTY.AS112.ARPA",
]]
def assert_absolute_dnsname(name):
"""Raise AssertionError if name is not DNSName or is not absolute.
>>> assert_absolute_dnsname(DNSName('absolute.name.example.'))
>>> assert_absolute_dnsname(DNSName('relative.name.example'))
Traceback (most recent call last):
...
AssertionError: name must be absolute, ...
>>> assert_absolute_dnsname('absolute.string.example.')
Traceback (most recent call last):
...
AssertionError: name must be DNSName instance, ...
"""
assert isinstance(name, DNSName), ("name must be DNSName instance, "
"got '%s'" % type(name))
assert name.is_absolute(), "name must be absolute, got '%s'" % name
def is_auto_empty_zone(zone):
"""True if specified zone name exactly matches an automatic empty zone.
>>> is_auto_empty_zone(DNSName('in-addr.arpa.'))
False
>>> is_auto_empty_zone(DNSName('10.in-addr.arpa.'))
True
>>> is_auto_empty_zone(DNSName('1.10.in-addr.arpa.'))
False
>>> is_auto_empty_zone(DNSName('10.in-addr.arpa'))
Traceback (most recent call last):
...
AssertionError: ...
"""
assert_absolute_dnsname(zone)
return zone in EMPTY_ZONES
def inside_auto_empty_zone(name):
"""True if specified absolute name is a subdomain of an automatic empty
zone.
DNS domain is a subdomain of itself so this function
returns True for zone apexes, too.
>>> inside_auto_empty_zone(DNSName('in-addr.arpa.'))
False
>>> inside_auto_empty_zone(DNSName('10.in-addr.arpa.'))
True
>>> inside_auto_empty_zone(DNSName('1.10.in-addr.arpa.'))
True
>>> inside_auto_empty_zone(DNSName('1.10.in-addr.arpa'))
Traceback (most recent call last):
...
AssertionError: ...
"""
assert_absolute_dnsname(name)
for aez in EMPTY_ZONES:
if name.is_subdomain(aez):
return True
return False
def related_to_auto_empty_zone(name):
"""True if specified absolute name is a sub/superdomain of an automatic
empty zone.
DNS domain is a subdomain of itself so this function
returns True for zone apexes, too.
>>> related_to_auto_empty_zone(DNSName('.'))
True
>>> related_to_auto_empty_zone(DNSName('in-addr.arpa.'))
True
>>> related_to_auto_empty_zone(DNSName('10.in-addr.arpa.'))
True
>>> related_to_auto_empty_zone(DNSName('1.10.in-addr.arpa.'))
True
>>> related_to_auto_empty_zone(DNSName('unrelated.example.'))
False
>>> related_to_auto_empty_zone(DNSName('1.10.in-addr.arpa'))
Traceback (most recent call last):
...
AssertionError: ...
"""
assert_absolute_dnsname(name)
relations = {dns.name.NAMERELN_SUBDOMAIN,
dns.name.NAMERELN_EQUAL,
dns.name.NAMERELN_SUPERDOMAIN}
return any(name.fullcompare(aez)[0] in relations
for aez in EMPTY_ZONES)
def has_empty_zone_addresses(hostname):
"""Detect if given host is using IP address belonging to
an automatic empty zone.
Information from --ip-address option used in installed is lost by
the time when upgrade is run. Use IP addresses from DNS as best
approximation.
This is brain-dead and duplicates logic from DNS installer
but I did not find other way around.
"""
ip_addresses = resolve_ip_addresses(hostname)
return any(
inside_auto_empty_zone(DNSName(ip.reverse_dns))
for ip in ip_addresses
)
def resolve_rrsets(fqdn, rdtypes):
"""
Get Resource Record sets for given FQDN.
CNAME chain is followed during resolution
but CNAMEs are not returned in the resulting rrset.
:returns:
set of dns.rrset.RRset objects, can be empty
if the FQDN does not exist or if none of rrtypes exist
"""
# empty set of rdtypes would always return empty set of rrsets
assert rdtypes, "rdtypes must not be empty"
if not isinstance(fqdn, DNSName):
fqdn = DNSName(fqdn)
fqdn = fqdn.make_absolute()
rrsets = []
for rdtype in rdtypes:
try:
answer = dns.resolver.query(fqdn, rdtype)
logger.debug('found %d %s records for %s: %s',
len(answer),
rdtype,
fqdn,
' '.join(str(rr) for rr in answer))
rrsets.append(answer.rrset)
except dns.resolver.NXDOMAIN as ex:
logger.debug('%s', ex)
break # no such FQDN, do not iterate
except dns.resolver.NoAnswer as ex:
logger.debug('%s', ex) # record type does not exist for given FQDN
except dns.exception.DNSException as ex:
logger.error('DNS query for %s %s failed: %s', fqdn, rdtype, ex)
raise
return rrsets
def resolve_ip_addresses(fqdn):
"""Get IP addresses from DNS A/AAAA records for given host (using DNS).
:returns:
list of IP addresses as UnsafeIPAddress objects
"""
rrsets = resolve_rrsets(fqdn, ['A', 'AAAA'])
ip_addresses = set()
for rrset in rrsets:
ip_addresses.update({UnsafeIPAddress(ip) for ip in rrset})
return ip_addresses
def check_zone_overlap(zone, raise_on_error=True):
logger.info("Checking DNS domain %s, please wait ...", zone)
if not isinstance(zone, DNSName):
zone = DNSName(zone).make_absolute()
# automatic empty zones always exist so checking them is pointless,
# do not report them to avoid meaningless error messages
if is_auto_empty_zone(zone):
return
try:
containing_zone = dns.resolver.zone_for_name(zone)
except dns.exception.DNSException as e:
msg = ("DNS check for domain %s failed: %s." % (zone, e))
if raise_on_error:
raise ValueError(msg)
else:
logger.warning('%s', msg)
return
if containing_zone == zone:
try:
ns = [ans.to_text() for ans in dns.resolver.query(zone, 'NS')]
except dns.exception.DNSException as e:
logger.debug("Failed to resolve nameserver(s) for domain %s: %s",
zone, e)
ns = []
msg = u"DNS zone {0} already exists in DNS".format(zone)
if ns:
msg += u" and is handled by server(s): {0}".format(', '.join(ns))
raise ValueError(msg)

BIN
ipapython/dnsutil.pyc Normal file

Binary file not shown.

View File

@@ -17,48 +17,132 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import collections
import logging
import os
import httplib
import xml.dom.minidom
import ConfigParser
from urllib import urlencode
import six
# pylint: disable=import-error
from six.moves.urllib.parse import urlencode
# pylint: enable=import-error
import nss.nss as nss
from nss.error import NSPRError
# pylint: disable=ipa-forbidden-import
from ipalib import api, errors
from ipalib.util import create_https_connection
from ipalib.errors import NetworkError
from ipalib.errors import NetworkError, CertificateOperationError
from ipalib.text import _
# pylint: enable=ipa-forbidden-import
from ipapython import ipautil
from ipapython import nsslib, ipautil
from ipaplatform.paths import paths
from ipapython.ipa_log_manager import *
# Python 3 rename. The package is available in "six.moves.http_client", but
# pylint cannot handle classes from that alias
try:
import httplib
except ImportError:
# pylint: disable=import-error
import http.client as httplib
# IPA can use either Dogtag version 9 or 10.
#
# Install tools should use the constants from install_constants, so that they
# install with version 10 if it is available, and with 9 if not.
# After IPA installation, the Dogtag version used is stored in the
# "dogtag_version" config option. (If that is missing, version 9 is assumed.)
# The configured_constants() function below provides constants relevant to
# the configured version.
if six.PY3:
unicode = str
class Dogtag10Constants(object):
DOGTAG_VERSION = 10
UNSECURE_PORT = 8080
AGENT_SECURE_PORT = 8443
EE_SECURE_PORT = 8443
AJP_PORT = 8009
DS_PORT = 389
DS_SECURE_PORT = 636
logger = logging.getLogger(__name__)
SPAWN_BINARY = paths.PKISPAWN
DESTROY_BINARY = paths.PKIDESTROY
Profile = collections.namedtuple('Profile', ['profile_id', 'description', 'store_issued'])
SERVER_ROOT = paths.VAR_LIB_PKI_DIR
PKI_INSTANCE_NAME = 'pki-tomcat'
PKI_ROOT = '%s/%s' % (SERVER_ROOT, PKI_INSTANCE_NAME)
CRL_PUBLISH_PATH = paths.PKI_CA_PUBLISH_DIR
CS_CFG_PATH = '%s/conf/ca/CS.cfg' % PKI_ROOT
PASSWORD_CONF_PATH = '%s/conf/password.conf' % PKI_ROOT
SERVICE_PROFILE_DIR = '%s/ca/profiles/ca' % PKI_ROOT
ALIAS_DIR = paths.PKI_TOMCAT_ALIAS_DIR.rstrip('/')
INCLUDED_PROFILES = {
Profile(u'caIPAserviceCert', u'Standard profile for network services', True),
Profile(u'IECUserRoles', u'User profile that includes IECUserRoles extension from request', True),
Profile(u'KDCs_PKINIT_Certs',
u'Profile for PKINIT support by KDCs',
False),
}
SERVICE_NAME = 'pki_tomcatd'
DEFAULT_PROFILE = u'caIPAserviceCert'
KDC_PROFILE = u'KDCs_PKINIT_Certs'
RACERT_LINE_SEP = '\n'
IPA_SERVICE_PROFILE = '%s/caIPAserviceCert.cfg' % SERVICE_PROFILE_DIR
SIGN_PROFILE = '%s/caJarSigningCert.cfg' % SERVICE_PROFILE_DIR
SHARED_DB = True
DS_USER = "dirsrv"
DS_NAME = "dirsrv"
class Dogtag9Constants(object):
DOGTAG_VERSION = 9
UNSECURE_PORT = 9180
AGENT_SECURE_PORT = 9443
EE_SECURE_PORT = 9444
AJP_PORT = 9447
DS_PORT = 7389
DS_SECURE_PORT = 7636
SPAWN_BINARY = paths.PKICREATE
DESTROY_BINARY = paths.PKISILENT
SERVER_ROOT = paths.VAR_LIB
PKI_INSTANCE_NAME = 'pki-ca'
PKI_ROOT = '%s/%s' % (SERVER_ROOT, PKI_INSTANCE_NAME)
CRL_PUBLISH_PATH = paths.PKI_CA_PUBLISH_DIR
CS_CFG_PATH = '%s/conf/CS.cfg' % PKI_ROOT
PASSWORD_CONF_PATH = '%s/conf/password.conf' % PKI_ROOT
SERVICE_PROFILE_DIR = '%s/profiles/ca' % PKI_ROOT
ALIAS_DIR = '%s/alias' % PKI_ROOT
SERVICE_NAME = 'pki-cad'
RACERT_LINE_SEP = '\r\n'
ADMIN_SECURE_PORT = 9445
EE_CLIENT_AUTH_PORT = 9446
TOMCAT_SERVER_PORT = 9701
IPA_SERVICE_PROFILE = '%s/caIPAserviceCert.cfg' % SERVICE_PROFILE_DIR
SIGN_PROFILE = '%s/caJarSigningCert.cfg' % SERVICE_PROFILE_DIR
SHARED_DB = False
DS_USER = "pkisrv"
DS_NAME = "PKI-IPA"
if os.path.exists(paths.PKISPAWN):
install_constants = Dogtag10Constants
else:
install_constants = Dogtag9Constants
def _get_configured_version(api):
"""Get the version of Dogtag IPA is configured to use
If an API is given, use information in its environment.
Otherwise, use information from the global config file.
"""
if api:
return int(api.env.dogtag_version)
else:
p = ConfigParser.SafeConfigParser()
p.read(paths.IPA_DEFAULT_CONF)
try:
version = p.get('global', 'dogtag_version')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return 9
else:
return int(version)
def configured_constants(api=None):
"""Get the name of the Dogtag CA instance
See get_configured_version
"""
if _get_configured_version(api) >= 10:
return Dogtag10Constants
else:
return Dogtag9Constants
def error_from_xml(doc, message_template):
@@ -66,20 +150,21 @@ def error_from_xml(doc, message_template):
item_node = doc.getElementsByTagName("Error")
reason = item_node[0].childNodes[0].data
return errors.RemoteRetrieveError(reason=reason)
except Exception as e:
except Exception, e:
return errors.RemoteRetrieveError(reason=message_template % e)
def get_ca_certchain(ca_host=None):
def get_ca_certchain(ca_host=None, dogtag_constants=None):
"""
Retrieve the CA Certificate chain from the configured Dogtag server.
"""
if ca_host is None:
ca_host = api.env.ca_host
if dogtag_constants is None:
dogtag_constants = configured_constants()
chain = None
conn = httplib.HTTPConnection(
ca_host,
api.env.ca_install_port or 8080)
conn = httplib.HTTPConnection(ca_host,
api.env.ca_install_port or dogtag_constants.UNSECURE_PORT)
conn.request("GET", "/ca/ee/ca/getCertChain")
res = conn.getresponse()
doc = None
@@ -104,17 +189,7 @@ def get_ca_certchain(ca_host=None):
return chain
def _parse_ca_status(body):
doc = xml.dom.minidom.parseString(body)
try:
item_node = doc.getElementsByTagName("XMLResponse")[0]
item_node = item_node.getElementsByTagName("Status")[0]
return item_node.childNodes[0].data
except IndexError:
raise error_from_xml(doc, _("Retrieving CA status failed: %s"))
def ca_status(ca_host=None):
def ca_status(ca_host=None, use_proxy=True):
"""Return the status of the CA, and the httpd proxy in front of it
The returned status can be:
@@ -124,112 +199,113 @@ def ca_status(ca_host=None):
"""
if ca_host is None:
ca_host = api.env.ca_host
status, _headers, body = http_request(
ca_host, 8080, '/ca/admin/ca/getStatus',
# timeout: CA sometimes forgot to answer, we have to try again
timeout=api.env.http_timeout)
if use_proxy:
# Use port 443 to test the proxy as well
ca_port = 443
else:
ca_port = 8443
status, reason, headers, body = unauthenticated_https_request(
ca_host, ca_port, '/ca/admin/ca/getStatus')
if status == 503:
# Service temporarily unavailable
return status
return reason
elif status != 200:
raise errors.RemoteRetrieveError(
reason=_("Retrieving CA status failed with status %d") % status)
return _parse_ca_status(body)
reason=_("Retrieving CA status failed: %s") % reason)
doc = xml.dom.minidom.parseString(body)
try:
item_node = doc.getElementsByTagName("XMLResponse")[0]
item_node = item_node.getElementsByTagName("Status")[0]
return item_node.childNodes[0].data
except IndexError:
raise error_from_xml(doc, _("Retrieving CA status failed: %s"))
def https_request(
host, port, url, cafile, client_certfile, client_keyfile,
method='POST', headers=None, body=None, **kw):
def https_request(host, port, url, secdir, password, nickname, **kw):
"""
:param method: HTTP request method (defalut: 'POST')
:param url: The path (not complete URL!) to post to.
:param body: The request body (encodes kw if None)
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_headers, http_body)
as (integer, dict, str)
:return: (http_status, http_reason_phrase, http_headers, http_body)
as (integer, unicode, dict, str)
Perform a client authenticated HTTPS request
"""
def connection_factory(host, port):
return create_https_connection(
host, port,
cafile=cafile,
client_certfile=client_certfile,
client_keyfile=client_keyfile,
tls_version_min=api.env.tls_version_min,
tls_version_max=api.env.tls_version_max)
conn = nsslib.NSSConnection(host, port, dbdir=secdir)
conn.set_debuglevel(0)
conn.connect()
conn.sock.set_client_auth_data_callback(
nsslib.client_auth_data_callback,
nickname, password, nss.get_default_certdb())
return conn
if body is None:
body = urlencode(kw)
body = urlencode(kw)
return _httplib_request(
'https', host, port, url, connection_factory, body,
method=method, headers=headers)
'https', host, port, url, connection_factory, body)
def http_request(host, port, url, timeout=None, **kw):
def http_request(host, port, url, **kw):
"""
:param url: The path (not complete URL!) to post to.
:param timeout: Timeout in seconds for waiting for reply.
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_headers, http_body)
as (integer, dict, str)
:return: (http_status, http_reason_phrase, http_headers, http_body)
as (integer, unicode, dict, str)
Perform an HTTP request.
"""
body = urlencode(kw)
if timeout is None:
conn_opt = {}
else:
conn_opt = {"timeout": timeout}
return _httplib_request(
'http', host, port, url, httplib.HTTPConnection, body,
connection_options=conn_opt)
'http', host, port, url, httplib.HTTPConnection, body)
def unauthenticated_https_request(host, port, url, **kw):
"""
:param url: The path (not complete URL!) to post to.
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_reason_phrase, http_headers, http_body)
as (integer, unicode, dict, str)
Perform an unauthenticated HTTPS request.
"""
body = urlencode(kw)
return _httplib_request(
'https', host, port, url, httplib.HTTPSConnection, body)
def _httplib_request(
protocol, host, port, path, connection_factory, request_body,
method='POST', headers=None, connection_options=None):
protocol, host, port, path, connection_factory, request_body):
"""
:param request_body: Request body
:param connection_factory: Connection class to use. Will be called
with the host and port arguments.
:param method: HTTP request method (default: 'POST')
:param connection_options: a dictionary that will be passed to
connection_factory as keyword arguments.
Perform a HTTP(s) request.
"""
if connection_options is None:
connection_options = {}
uri = u'%s://%s%s' % (protocol, ipautil.format_netloc(host, port), path)
logger.debug('request %s %s', method, uri)
logger.debug('request body %r', request_body)
headers = headers or {}
if (
method == 'POST'
and 'content-type' not in (str(k).lower() for k in headers)
):
headers['content-type'] = 'application/x-www-form-urlencoded'
if isinstance(host, unicode):
host = host.encode('utf-8')
uri = '%s://%s%s' % (protocol, ipautil.format_netloc(host, port), path)
root_logger.debug('request %r', uri)
root_logger.debug('request body %r', request_body)
try:
conn = connection_factory(host, port, **connection_options)
conn.request(method, uri, body=request_body, headers=headers)
conn = connection_factory(host, port)
conn.request('POST', uri,
body=request_body,
headers={'Content-type': 'application/x-www-form-urlencoded'},
)
res = conn.getresponse()
http_status = res.status
http_headers = res.msg
http_reason_phrase = unicode(res.reason, 'utf-8')
http_headers = res.msg.dict
http_body = res.read()
conn.close()
except Exception as e:
logger.debug("httplib request failed:", exc_info=True)
except Exception, e:
raise NetworkError(uri=uri, error=str(e))
logger.debug('response status %d', http_status)
logger.debug('response headers %s', http_headers)
logger.debug('response body %r', http_body)
root_logger.debug('request status %d', http_status)
root_logger.debug('request reason_phrase %r', http_reason_phrase)
root_logger.debug('request headers %s', http_headers)
root_logger.debug('request body %r', http_body)
return http_status, http_headers, http_body
return http_status, http_reason_phrase, http_headers, http_body

BIN
ipapython/dogtag.pyc Normal file

Binary file not shown.

View File

@@ -1,47 +0,0 @@
# Authors: Petr Viktorin <pviktori@redhat.com>
#
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
class SetseboolError(Exception):
"""Raised when setting a SELinux boolean fails
:param failed: Dictionary mapping boolean names to intended values
to their intended values, for booleans that cound not be set
:param command: Command the user can run to set the booleans
The initializer arguments are copied to attributes of the same name.
"""
def __init__(self, failed, command):
message = "Could not set SELinux booleans: %s" % ' '.join(
'%s=%s' % (name, value) for name, value in failed.items())
super(SetseboolError, self).__init__(message)
self.failed = failed
self.command = command
def format_service_warning(self, service_name):
"""Format warning for display when this is raised from service install
"""
return '\n'.join([
'WARNING: %(err)s',
'',
'The %(service)s may not function correctly until ',
'the booleans are successfully changed with the command:',
' %(cmd)s',
'Try updating the policycoreutils and selinux-policy packages.'
]) % {'err': self, 'service': service_name, 'cmd': self.command}

View File

@@ -1,88 +0,0 @@
#
# Copyright (C) 2015-2017 FreeIPA Contributors see COPYING for license
#
from collections import deque
class Graph(object):
"""
Simple oriented graph structure
G = (V, E) where G is graph, V set of vertices and E list of edges.
E = (tail, head) where tail and head are vertices
"""
def __init__(self):
self.vertices = set()
self.edges = []
self._adj = dict()
def add_vertex(self, vertex):
self.vertices.add(vertex)
self._adj[vertex] = []
def add_edge(self, tail, head):
if tail not in self.vertices:
raise ValueError("tail is not a vertex")
if head not in self.vertices:
raise ValueError("head is not a vertex")
self.edges.append((tail, head))
self._adj[tail].append(head)
def remove_edge(self, tail, head):
try:
self.edges.remove((tail, head))
except KeyError:
raise ValueError(
"graph does not contain edge: ({0}, {1})".format(tail, head)
)
self._adj[tail].remove(head)
def remove_vertex(self, vertex):
try:
self.vertices.remove(vertex)
except KeyError:
raise ValueError(
"graph does not contain vertex: {0}".format(vertex)
)
# delete _adjacencies
del self._adj[vertex]
for adj in self._adj.values():
adj[:] = [v for v in adj if v != vertex]
# delete edges
self.edges = [
e for e in self.edges if e[0] != vertex and e[1] != vertex
]
def get_tails(self, head):
"""
Get list of vertices where a vertex is on the right side of an edge
"""
return [e[0] for e in self.edges if e[1] == head]
def get_heads(self, tail):
"""
Get list of vertices where a vertex is on the left side of an edge
"""
return [e[1] for e in self.edges if e[0] == tail]
def bfs(self, start=None):
"""
Breadth-first search traversal of the graph from `start` vertex.
Return a set of all visited vertices
"""
if not start:
start = next(iter(self.vertices))
visited = set()
queue = deque([start])
while queue:
vertex = queue.popleft()
if vertex not in visited:
visited.add(vertex)
queue.extend(set(self._adj.get(vertex, [])) - visited)
return visited

View File

@@ -1,7 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Installer framework.
"""

View File

@@ -1,359 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Command line support.
"""
import collections
import enum
import logging
import optparse # pylint: disable=deprecated-module
import signal
import six
from ipapython import admintool
from ipapython.ipa_log_manager import standard_logging_setup
from ipapython.ipautil import (CheckedIPAddress, CheckedIPAddressLoopback,
private_ccache)
from . import core, common
__all__ = ['install_tool', 'uninstall_tool']
if six.PY3:
long = int
NoneType = type(None)
logger = logging.getLogger(__name__)
def _get_usage(configurable_class):
usage = '%prog [options]'
for owner_cls, name in configurable_class.knobs():
knob_cls = getattr(owner_cls, name)
if knob_cls.is_cli_positional():
if knob_cls.cli_metavar is not None:
metavar = knob_cls.cli_metavar
elif knob_cls.cli_names:
metavar = knob_cls.cli_names[0].upper()
else:
metavar = name.replace('_', '-').upper()
try:
knob_cls.default
except AttributeError:
fmt = ' {}'
else:
fmt = ' [{}]'
usage += fmt.format(metavar)
return usage
def install_tool(configurable_class, command_name, log_file_name,
debug_option=False, verbose=False, console_format=None,
use_private_ccache=True, uninstall_log_file_name=None):
if uninstall_log_file_name is not None:
uninstall_kwargs = dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=uninstall_log_file_name,
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
)
else:
uninstall_kwargs = None
return type(
'install_tool({0})'.format(configurable_class.__name__),
(InstallTool,),
dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=log_file_name,
usage=_get_usage(configurable_class),
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
uninstall_kwargs=uninstall_kwargs,
use_private_ccache=use_private_ccache,
)
)
def uninstall_tool(configurable_class, command_name, log_file_name,
debug_option=False, verbose=False, console_format=None):
return type(
'uninstall_tool({0})'.format(configurable_class.__name__),
(UninstallTool,),
dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=log_file_name,
usage=_get_usage(configurable_class),
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
)
)
class ConfigureTool(admintool.AdminTool):
configurable_class = None
debug_option = False
verbose = False
console_format = None
use_private_ccache = True
@staticmethod
def _transform(configurable_class):
raise NotImplementedError
@classmethod
def add_options(cls, parser, positional=False):
transformed_cls = cls._transform(cls.configurable_class)
if issubclass(transformed_cls, common.Interactive):
parser.add_option(
'-U', '--unattended',
dest='unattended',
default=False,
action='store_true',
help="unattended (un)installation never prompts the user",
)
groups = collections.OrderedDict()
# if no group is defined, add the option to the parser top level
groups[None] = parser
for owner_cls, name in transformed_cls.knobs():
knob_cls = getattr(owner_cls, name)
if knob_cls.is_cli_positional() is not positional:
continue
group_cls = knob_cls.group()
try:
opt_group = groups[group_cls]
except KeyError:
opt_group = groups[group_cls] = optparse.OptionGroup(
parser, "{0} options".format(group_cls.description))
parser.add_option_group(opt_group)
knob_type = knob_cls.type
if issubclass(knob_type, list):
try:
# typing.List[X].__parameters__ == (X,)
knob_scalar_type = knob_type.__parameters__[0]
except AttributeError:
knob_scalar_type = str
else:
knob_scalar_type = knob_type
kwargs = dict()
if knob_scalar_type is NoneType:
kwargs['type'] = None
kwargs['const'] = True
kwargs['default'] = False
elif knob_scalar_type is str:
kwargs['type'] = 'string'
elif knob_scalar_type is int:
kwargs['type'] = 'int'
elif knob_scalar_type is long:
kwargs['type'] = 'long'
elif knob_scalar_type is CheckedIPAddressLoopback:
kwargs['type'] = 'ip_with_loopback'
elif knob_scalar_type is CheckedIPAddress:
kwargs['type'] = 'ip'
elif issubclass(knob_scalar_type, enum.Enum):
kwargs['type'] = 'choice'
kwargs['choices'] = [i.value for i in knob_scalar_type]
kwargs['metavar'] = "{{{0}}}".format(
",".join(kwargs['choices']))
else:
kwargs['type'] = 'constructor'
kwargs['constructor'] = knob_scalar_type
kwargs['dest'] = name
if issubclass(knob_type, list):
if kwargs['type'] is None:
kwargs['action'] = 'append_const'
else:
kwargs['action'] = 'append'
else:
if kwargs['type'] is None:
kwargs['action'] = 'store_const'
else:
kwargs['action'] = 'store'
if knob_cls.sensitive:
kwargs['sensitive'] = True
if knob_cls.cli_metavar:
kwargs['metavar'] = knob_cls.cli_metavar
if not positional:
cli_info = (
(knob_cls.deprecated, knob_cls.cli_names),
(True, knob_cls.cli_deprecated_names),
)
else:
cli_info = (
(knob_cls.deprecated, (None,)),
)
for hidden, cli_names in cli_info:
opt_strs = []
for cli_name in cli_names:
if cli_name is None:
cli_name = '--{}'.format(name.replace('_', '-'))
opt_strs.append(cli_name)
if not opt_strs:
continue
if not hidden:
help = knob_cls.description
else:
help = optparse.SUPPRESS_HELP
opt_group.add_option(
*opt_strs,
help=help,
**kwargs
)
super(ConfigureTool, cls).add_options(parser,
debug_option=cls.debug_option)
def __init__(self, options, args):
super(ConfigureTool, self).__init__(options, args)
self.transformed_cls = self._transform(self.configurable_class)
self.positional_arguments = []
for owner_cls, name in self.transformed_cls.knobs():
knob_cls = getattr(owner_cls, name)
if knob_cls.is_cli_positional():
self.positional_arguments.append(name)
# fake option parser to parse positional arguments
# (because optparse does not support positional argument parsing)
fake_option_parser = optparse.OptionParser()
self.add_options(fake_option_parser, True)
fake_option_map = {option.dest: option
for group in fake_option_parser.option_groups
for option in group.option_list}
for index, name in enumerate(self.positional_arguments):
try:
value = self.args.pop(0)
except IndexError:
break
fake_option = fake_option_map[name]
fake_option.process('argument {}'.format(index + 1),
value,
self.options,
self.option_parser)
def validate_options(self, needs_root=True):
super(ConfigureTool, self).validate_options(needs_root=needs_root)
if self.args:
self.option_parser.error("Too many arguments provided")
def _setup_logging(self, log_file_mode='w', no_file=False):
if no_file:
log_file_name = None
elif self.options.log_file:
log_file_name = self.options.log_file
else:
log_file_name = self.log_file_name
standard_logging_setup(
log_file_name,
verbose=self.verbose,
debug=self.options.verbose,
console_format=self.console_format)
if log_file_name:
logger.debug('Logging to %s', log_file_name)
elif not no_file:
logger.debug('Not logging to a file')
def run(self):
kwargs = {}
transformed_cls = self._transform(self.configurable_class)
knob_classes = {n: getattr(c, n) for c, n in transformed_cls.knobs()}
for name in knob_classes:
value = getattr(self.options, name, None)
if value is not None:
kwargs[name] = value
if (issubclass(self.configurable_class, common.Interactive) and
not self.options.unattended):
kwargs['interactive'] = True
try:
cfgr = transformed_cls(**kwargs)
except core.KnobValueError as e:
knob_cls = knob_classes[e.name]
try:
index = self.positional_arguments.index(e.name)
except ValueError:
cli_name = knob_cls.cli_names[0] or e.name.replace('_', '-')
desc = "option {0}".format(cli_name)
else:
desc = "argument {0}".format(index + 1)
self.option_parser.error("{0}: {1}".format(desc, e))
except RuntimeError as e:
self.option_parser.error(str(e))
signal.signal(signal.SIGTERM, self.__signal_handler)
if self.use_private_ccache:
with private_ccache():
super(ConfigureTool, self).run()
cfgr.run()
else:
super(ConfigureTool, self).run()
cfgr.run()
@staticmethod
def __signal_handler(signum, frame):
raise KeyboardInterrupt
class InstallTool(ConfigureTool):
uninstall_kwargs = None
_transform = staticmethod(common.installer)
@classmethod
def add_options(cls, parser, positional=False):
super(InstallTool, cls).add_options(parser, positional)
if cls.uninstall_kwargs is not None:
parser.add_option(
'--uninstall',
dest='uninstall',
default=False,
action='store_true',
help=("uninstall an existing installation. The uninstall can "
"be run with --unattended option"),
)
@classmethod
def get_command_class(cls, options, args):
if cls.uninstall_kwargs is not None and options.uninstall:
uninstall_cls = uninstall_tool(**cls.uninstall_kwargs)
uninstall_cls.option_parser = cls.option_parser
return uninstall_cls
else:
return super(InstallTool, cls).get_command_class(options, args)
class UninstallTool(ConfigureTool):
_transform = staticmethod(common.uninstaller)

View File

@@ -1,118 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Common stuff.
"""
import logging
import traceback
from . import core
from .util import from_
__all__ = ['step', 'Installable', 'Interactive', 'Continuous', 'installer',
'uninstaller']
logger = logging.getLogger(__name__)
def step():
def decorator(func):
cls = core.Component(Step)
cls._installer = staticmethod(func)
return cls
return decorator
class Installable(core.Configurable):
"""
Configurable which does install or uninstall.
"""
uninstalling = core.Property(False)
def _get_components(self):
components = super(Installable, self)._get_components()
if self.uninstalling:
components = reversed(list(components))
return components
def _configure(self):
if self.uninstalling:
return self._uninstall()
else:
return self._install()
def _install(self):
assert not hasattr(super(Installable, self), '_install')
return super(Installable, self)._configure()
def _uninstall(self):
assert not hasattr(super(Installable, self), '_uninstall')
return super(Installable, self)._configure()
class Step(Installable):
@property
def parent(self):
raise AttributeError('parent')
def _install(self):
for _nothing in self._installer(self.parent):
yield from_(super(Step, self)._install())
@staticmethod
def _installer(obj):
yield
def _uninstall(self):
for _nothing in self._uninstaller(self.parent):
yield from_(super(Step, self)._uninstall())
@staticmethod
def _uninstaller(obj):
yield
@classmethod
def uninstaller(cls, func):
cls._uninstaller = staticmethod(func)
return cls
class Interactive(core.Configurable):
interactive = core.Property(False)
class Continuous(core.Configurable):
def _handle_execute_exception(self, exc_info):
try:
super(Continuous, self)._handle_execute_exception(exc_info)
except BaseException as e:
logger.debug("%s", traceback.format_exc())
if isinstance(e, Exception):
logger.error("%s", e)
def installer(cls):
class Installer(cls, Installable):
def __init__(self, **kwargs):
super(Installer, self).__init__(uninstalling=False,
**kwargs)
Installer.__name__ = 'installer({0})'.format(cls.__name__)
return Installer
def uninstaller(cls):
class Uninstaller(Continuous, cls, Installable):
def __init__(self, **kwargs):
super(Uninstaller, self).__init__(uninstalling=True,
**kwargs)
Uninstaller.__name__ = 'uninstaller({0})'.format(cls.__name__)
return Uninstaller

View File

@@ -1,663 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
The framework core.
"""
import abc
import collections
import functools
import itertools
import sys
import six
from . import util
from .util import from_
__all__ = ['InvalidStateError', 'KnobValueError', 'Property', 'knob',
'Configurable', 'group', 'Component', 'Composite']
NoneType = type(None)
builtin_type = type
# Configurable states
_VALIDATE_PENDING = 'VALIDATE_PENDING'
_VALIDATE_RUNNING = 'VALIDATE_RUNNING'
_EXECUTE_PENDING = 'EXECUTE_PENDING'
_EXECUTE_RUNNING = 'EXECUTE_RUNNING'
_STOPPED = 'STOPPED'
_FAILED = 'FAILED'
_CLOSED = 'CLOSED'
_missing = object()
_counter = itertools.count()
@functools.cmp_to_key
def _class_key(a, b):
if a is b:
return 0
elif issubclass(a, b):
return -1
elif issubclass(b, a):
return 1
else:
return 0
class InvalidStateError(Exception):
pass
class KnobValueError(ValueError):
def __init__(self, name, message):
super(KnobValueError, self).__init__(message)
self.name = name
class PropertyBase(six.with_metaclass(util.InnerClassMeta, object)):
# shut up pylint
__outer_class__ = None
__outer_name__ = None
_order = None
@property
def default(self):
raise AttributeError('default')
def __init__(self, outer):
pass
def __get__(self, obj, obj_type):
while obj is not None:
try:
return obj.__dict__[self.__outer_name__]
except KeyError:
pass
obj = obj._get_fallback()
try:
return self.default
except AttributeError:
raise AttributeError(self.__outer_name__)
def __set__(self, obj, value):
try:
obj.__dict__[self.__outer_name__] = value
except KeyError:
raise AttributeError(self.__outer_name__)
def __delete__(self, obj):
try:
del obj.__dict__[self.__outer_name__]
except KeyError:
raise AttributeError(self.__outer_name__)
def Property(default=_missing):
class_dict = {}
if default is not _missing:
class_dict['default'] = default
return util.InnerClassMeta('Property', (PropertyBase,), class_dict)
class KnobBase(PropertyBase):
type = None
sensitive = False
deprecated = False
description = None
cli_names = (None,)
cli_deprecated_names = ()
cli_metavar = None
def __init__(self, outer):
self.outer = outer
def validate(self, value):
pass
@classmethod
def group(cls):
return cls.__outer_class__.group()
@classmethod
def is_cli_positional(cls):
return all(n is not None and not n.startswith('-')
for n in cls.cli_names)
@classmethod
def default_getter(cls, func):
@property
def default(self):
return func(self.outer)
cls.default = default
return cls
@classmethod
def validator(cls, func):
def validate(self, value):
func(self.outer, value)
super(cls, self).validate(value)
cls.validate = validate
return cls
def _knob(type=_missing, default=_missing, bases=_missing, _order=_missing,
sensitive=_missing, deprecated=_missing, description=_missing,
group=_missing, cli_names=_missing, cli_deprecated_names=_missing,
cli_metavar=_missing):
if type is None:
type = NoneType
if bases is _missing:
bases = (KnobBase,)
elif isinstance(bases, builtin_type):
bases = (bases,)
if cli_names is None or isinstance(cli_names, str):
cli_names = (cli_names,)
elif cli_names is not _missing:
cli_names = tuple(cli_names)
if isinstance(cli_deprecated_names, str):
cli_deprecated_names = (cli_deprecated_names,)
elif cli_deprecated_names is not _missing:
cli_deprecated_names = tuple(cli_deprecated_names)
class_dict = {}
if type is not _missing:
class_dict['type'] = type
if default is not _missing:
class_dict['default'] = default
if _order is not _missing:
class_dict['_order'] = _order
if sensitive is not _missing:
class_dict['sensitive'] = sensitive
if deprecated is not _missing:
class_dict['deprecated'] = deprecated
if description is not _missing:
class_dict['description'] = description
if group is not _missing:
class_dict['group'] = group
if cli_names is not _missing:
class_dict['cli_names'] = cli_names
if cli_deprecated_names is not _missing:
class_dict['cli_deprecated_names'] = cli_deprecated_names
if cli_metavar is not _missing:
class_dict['cli_metavar'] = cli_metavar
return util.InnerClassMeta('Knob', bases, class_dict)
def knob(type, default=_missing, **kwargs):
"""
Define a new knob.
"""
return _knob(
type, default,
_order=next(_counter),
**kwargs
)
def extend_knob(base, default=_missing, bases=_missing, group=_missing,
**kwargs):
"""
Extend an existing knob.
"""
if bases is _missing:
bases = (base,)
if group is _missing:
group = staticmethod(base.group)
return _knob(
_missing, default,
bases=bases,
_order=_missing,
group=group,
**kwargs
)
class Configurable(six.with_metaclass(abc.ABCMeta, object)):
"""
Base class of all configurables.
FIXME: details of validate/execute, properties and knobs
"""
@classmethod
def properties(cls):
"""
Iterate over properties defined for the configurable.
"""
assert not hasattr(super(Configurable, cls), 'properties')
seen = set()
for owner_cls in cls.__mro__:
result = []
for name, prop_cls in owner_cls.__dict__.items():
if name in seen:
continue
seen.add(name)
if not isinstance(prop_cls, type):
continue
if not issubclass(prop_cls, PropertyBase):
continue
result.append((prop_cls._order, owner_cls, name))
result = sorted(result, key=lambda r: r[0])
for _order, owner_cls, name in result:
yield owner_cls, name
@classmethod
def knobs(cls):
for owner_cls, name in cls.properties():
prop_cls = getattr(owner_cls, name)
if issubclass(prop_cls, KnobBase):
yield owner_cls, name
@classmethod
def group(cls):
assert not hasattr(super(Configurable, cls), 'group')
return None
def __init__(self, **kwargs):
"""
Initialize the configurable.
"""
cls = self.__class__
for owner_cls, name in cls.properties():
if name.startswith('_'):
continue
prop_cls = getattr(owner_cls, name)
if not isinstance(prop_cls, type):
continue
if not issubclass(prop_cls, PropertyBase):
continue
try:
value = kwargs.pop(name)
except KeyError:
pass
else:
setattr(self, name, value)
for owner_cls, name in cls.knobs():
if name.startswith('_'):
continue
if not isinstance(self, owner_cls):
continue
value = getattr(self, name, None)
if value is None:
continue
prop_cls = getattr(owner_cls, name)
prop = prop_cls(self)
try:
prop.validate(value)
except KnobValueError:
raise
except ValueError as e:
raise KnobValueError(name, str(e))
if kwargs:
extra = sorted(kwargs)
raise TypeError(
"{0}() got {1} unexpected keyword arguments: {2}".format(
type(self).__name__,
len(extra),
', '.join(repr(name) for name in extra)))
self._reset()
def _reset(self):
assert not hasattr(super(Configurable, self), '_reset')
self.__state = _VALIDATE_PENDING
self.__gen = util.run_generator_with_yield_from(self._configure())
def _get_components(self):
assert not hasattr(super(Configurable, self), '_get_components')
raise TypeError("{0} is not composite".format(self))
def _get_fallback(self):
return None
@abc.abstractmethod
def _configure(self):
"""
Coroutine which defines the logic of the configurable.
"""
assert not hasattr(super(Configurable, self), '_configure')
self.__transition(_VALIDATE_RUNNING, _EXECUTE_PENDING)
while self.__state != _EXECUTE_RUNNING:
yield
def run(self):
"""
Run the configurable.
"""
self.validate()
if self.__state == _EXECUTE_PENDING:
self.execute()
def validate(self):
"""
Run the validation part of the configurable.
"""
for _nothing in self._validator():
pass
def _validator(self):
"""
Coroutine which runs the validation part of the configurable.
"""
return self.__runner(_VALIDATE_PENDING,
_VALIDATE_RUNNING,
self._handle_validate_exception)
def execute(self):
"""
Run the execution part of the configurable.
"""
for _nothing in self._executor():
pass
def _executor(self):
"""
Coroutine which runs the execution part of the configurable.
"""
return self.__runner(_EXECUTE_PENDING,
_EXECUTE_RUNNING,
self._handle_execute_exception)
def done(self):
"""
Return True if the configurable has finished.
"""
return self.__state in (_STOPPED, _FAILED, _CLOSED)
def run_until_executing(self, gen):
while self.__state != _EXECUTE_RUNNING:
try:
yield next(gen)
except StopIteration:
break
def __runner(self, pending_state, running_state, exc_handler):
self.__transition(pending_state, running_state)
step = lambda: next(self.__gen)
while True:
try:
step()
except StopIteration:
self.__transition(running_state, _STOPPED)
break
except GeneratorExit:
self.__transition(running_state, _CLOSED)
break
except BaseException:
exc_info = sys.exc_info()
try:
exc_handler(exc_info)
except BaseException:
self.__transition(running_state, _FAILED)
raise
if self.__state != running_state:
break
try:
yield
except BaseException:
exc_info = sys.exc_info()
step = lambda: self.__gen.throw(*exc_info)
else:
step = lambda: next(self.__gen)
def _handle_exception(self, exc_info):
assert not hasattr(super(Configurable, self), '_handle_exception')
six.reraise(*exc_info)
def _handle_validate_exception(self, exc_info):
assert not hasattr(super(Configurable, self),
'_handle_validate_exception')
self._handle_exception(exc_info)
def _handle_execute_exception(self, exc_info):
assert not hasattr(super(Configurable, self),
'_handle_execute_exception')
self._handle_exception(exc_info)
def __transition(self, from_state, to_state):
if self.__state != from_state:
raise InvalidStateError(self.__state)
self.__state = to_state
def group(cls):
def group():
return cls
cls.group = staticmethod(group)
return cls
class ComponentMeta(util.InnerClassMeta, abc.ABCMeta):
pass
class ComponentBase(six.with_metaclass(ComponentMeta, Configurable)):
# shut up pylint
__outer_class__ = None
__outer_name__ = None
_order = None
@classmethod
def group(cls):
result = super(ComponentBase, cls).group()
if result is not None:
return result
else:
return cls.__outer_class__.group()
def __init__(self, parent, **kwargs):
self.__parent = parent
super(ComponentBase, self).__init__(**kwargs)
@property
def parent(self):
return self.__parent
def __get__(self, obj, obj_type):
obj.__dict__[self.__outer_name__] = self
return self
def _get_fallback(self):
return self.__parent
def _handle_exception(self, exc_info):
try:
super(ComponentBase, self)._handle_exception(exc_info)
except BaseException:
exc_info = sys.exc_info()
self.__parent._handle_exception(exc_info)
def Component(cls):
class_dict = {}
class_dict['_order'] = next(_counter)
return ComponentMeta('Component', (ComponentBase, cls), class_dict)
class Composite(Configurable):
"""
Configurable composed of any number of components.
Provides knobs of all child components.
"""
@classmethod
def properties(cls):
name_dict = {}
owner_dict = collections.OrderedDict()
for owner_cls, name in super(Composite, cls).properties():
name_dict[name] = owner_cls
owner_dict.setdefault(owner_cls, []).append(name)
for owner_cls, name in cls.components():
comp_cls = getattr(cls, name)
for owner_cls, name in comp_cls.knobs():
if hasattr(cls, name):
continue
try:
last_owner_cls = name_dict[name]
except KeyError:
name_dict[name] = owner_cls
owner_dict.setdefault(owner_cls, []).append(name)
else:
knob_cls = getattr(owner_cls, name)
last_knob_cls = getattr(last_owner_cls, name)
if issubclass(knob_cls, last_knob_cls):
name_dict[name] = owner_cls
owner_dict[last_owner_cls].remove(name)
owner_dict.setdefault(owner_cls, [])
if name not in owner_dict[owner_cls]:
owner_dict[owner_cls].append(name)
elif not issubclass(last_knob_cls, knob_cls):
raise TypeError("{0}.knobs(): conflicting definitions "
"of '{1}' in {2} and {3}".format(
cls.__name__,
name,
last_owner_cls.__name__,
owner_cls.__name__))
for owner_cls in sorted(owner_dict, key=_class_key):
for name in owner_dict[owner_cls]:
yield owner_cls, name
@classmethod
def components(cls):
assert not hasattr(super(Composite, cls), 'components')
seen = set()
for owner_cls in cls.__mro__:
result = []
for name, comp_cls in owner_cls.__dict__.items():
if name in seen:
continue
seen.add(name)
if not isinstance(comp_cls, type):
continue
if not issubclass(comp_cls, ComponentBase):
continue
result.append((comp_cls._order, owner_cls, name))
result = sorted(result, key=lambda r: r[0])
for _order, owner_cls, name in result:
yield owner_cls, name
def __getattr__(self, name):
for owner_cls, knob_name in self.knobs():
if knob_name == name:
break
else:
raise AttributeError(name)
for component in self.__components:
if isinstance(component, owner_cls):
break
else:
raise AttributeError(name)
return getattr(component, name)
def _reset(self):
self.__components = list(self._get_components())
super(Composite, self)._reset()
def _get_components(self):
for _owner_cls, name in self.components():
yield getattr(self, name)
def _configure(self):
validate = [(c, c._validator()) for c in self.__components]
while True:
new_validate = []
for child, validator in validate:
try:
next(validator)
except StopIteration:
pass
else:
new_validate.append((child, validator))
if not new_validate:
break
validate = new_validate
yield
if not self.__components:
return
yield from_(super(Composite, self)._configure())
execute = [(c, c._executor()) for c in self.__components
if not c.done()]
while True:
new_execute = []
for child, executor in execute:
try:
next(executor)
except StopIteration:
pass
else:
new_execute.append((child, executor))
if not new_execute:
break
execute = new_execute
yield

View File

@@ -1,34 +0,0 @@
#
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
#
import weakref
import six
_cache = weakref.WeakValueDictionary()
class ListMeta(type):
def __getitem__(cls, key):
if not isinstance(key, type):
raise TypeError("Parameters to generic types must be types. "
"Got {!r}.".format(key))
t = ListMeta(
cls.__name__,
cls.__bases__,
{
'__parameters__': (key,),
'__init__': cls.__init__,
}
)
return _cache.get(key, t)
class List(six.with_metaclass(ListMeta, list)):
__parameters__ = ()
def __init__(self, *_args, **_kwargs):
raise TypeError("Type List cannot be instantiated; use list() instead")

View File

@@ -1,165 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Utilities.
"""
import sys
import six
class from_(object):
"""
Wrapper for delegating to a subgenerator.
See `run_generator_with_yield_from`.
"""
__slots__ = ('obj',)
def __init__(self, obj):
self.obj = obj
def run_generator_with_yield_from(gen):
"""
Iterate over a generator object with subgenerator delegation.
This implements Python 3's ``yield from`` expressions, using Python 2
syntax:
>>> def subgen():
... yield 'B'
... yield 'C'
...
>>> def gen():
... yield 'A'
... yield from_(subgen())
... yield 'D'
...
>>> list(run_generator_with_yield_from(gen()))
['A', 'B', 'C', 'D']
Returning value from a subgenerator is not supported.
"""
exc_info = None
value = None
stack = [gen]
while stack:
prev_exc_info, exc_info = exc_info, None
prev_value, value = value, None
gen = stack[-1]
try:
if prev_exc_info is None:
value = gen.send(prev_value)
else:
value = gen.throw(*prev_exc_info)
except StopIteration:
stack.pop()
continue
except BaseException:
exc_info = sys.exc_info()
stack.pop()
continue
else:
if isinstance(value, from_):
stack.append(value.obj)
value = None
continue
try:
value = (yield value)
except BaseException:
exc_info = sys.exc_info()
if exc_info is not None:
six.reraise(*exc_info)
class InnerClassMeta(type):
# pylint: disable=no-value-for-parameter
def __new__(mcs, name, bases, class_dict):
class_dict.pop('__outer_class__', None)
class_dict.pop('__outer_name__', None)
return super(InnerClassMeta, mcs).__new__(mcs, name, bases, class_dict)
def __get__(cls, obj, obj_type):
outer_class, outer_name = cls.__bind(obj_type)
if obj is None:
return cls
assert isinstance(obj, outer_class)
try:
return obj.__dict__[outer_name]
except KeyError:
inner = cls(obj)
try:
getter = inner.__get__
except AttributeError:
return inner
else:
return getter(obj, obj_type)
def __set__(cls, obj, value):
outer_class, outer_name = cls.__bind(obj.__class__)
assert isinstance(obj, outer_class)
inner = cls(obj)
try:
setter = inner.__set__
except AttributeError:
try:
inner.__delete__
except AttributeError:
obj.__dict__[outer_name] = value
else:
raise AttributeError('__set__')
else:
setter(obj, value)
def __delete__(cls, obj):
outer_class, outer_name = cls.__bind(obj.__class__)
assert isinstance(obj, outer_class)
inner = cls(obj)
try:
deleter = inner.__delete__
except AttributeError:
try:
inner.__set__
except AttributeError:
try:
del obj.__dict__[outer_name]
except KeyError:
raise AttributeError(outer_name)
else:
raise AttributeError('__delete__')
else:
deleter(obj)
def __bind(cls, obj_type):
try:
outer_class = cls.__dict__['__outer_class__']
name = cls.__dict__['__outer_name__']
except KeyError:
outer_class, name, value = None, None, None
for outer_class in obj_type.__mro__:
for name, value in six.iteritems(outer_class.__dict__):
if value is cls:
break
if value is cls:
break
assert value is cls
cls.__outer_class__ = outer_class
cls.__outer_name__ = name
cls.__name__ = '.'.join((outer_class.__name__, name))
cls.__qualname__ = cls.__name__
return outer_class, name

3
ipapython/ipa.conf Normal file
View File

@@ -0,0 +1,3 @@
[defaults]
# realm = EXAMPLE.COM
# server = ipa.example.com

View File

@@ -16,20 +16,26 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import re
import time
import warnings
import sys
import six
#-------------------------------------------------------------------------------
# Module exports
__all__ = ['log_mgr', 'root_logger', 'standard_logging_setup',
'ISO8601_UTC_DATETIME_FMT',
'IPA_ROOT_LOGGER_NAME', 'ISO8601_UTC_DATETIME_FMT',
'LOGGING_FORMAT_STDERR', 'LOGGING_FORMAT_STDOUT', 'LOGGING_FORMAT_FILE']
#-------------------------------------------------------------------------------
import sys
import re
import copy
from log_manager import LogManager, parse_log_level
#-------------------------------------------------------------------------------
# Our root logger, all loggers will be descendents of this.
IPA_ROOT_LOGGER_NAME = 'ipa'
# Format string for time.strftime() to produce a ISO 8601 date time
# formatted string in the UTC time zone.
ISO8601_UTC_DATETIME_FMT = '%Y-%m-%dT%H:%M:%SZ'
@@ -56,151 +62,163 @@ LOGGING_FORMAT_STANDARD_CONSOLE = '%(name)-12s: %(levelname)-8s %(message)s'
# Used by standard_logging_setup() for file message
LOGGING_FORMAT_STANDARD_FILE = '%(asctime)s %(levelname)s %(message)s'
#-------------------------------------------------------------------------------
class _DeprecatedLogger(object):
def __init__(self, logger, name):
self._logger = logger
self._name = name
class IPALogManager(LogManager):
'''
Subclass the LogManager to enforce some IPA specfic logging
conventions.
def _warn(self):
warnings.warn(
"{} is deprecated, use a module-level logger".format(self._name),
DeprecationWarning)
* Default to timestamps in UTC.
* Default to ISO 8601 timestamp format.
* Default the message format.
'''
def debug(self, *args, **kwargs):
self._warn()
self._logger.debug(*args, **kwargs)
log_logger_level_config_re = re.compile(r'^log_logger_level_(debug|info|warn|warning|error|critical|\d+)$')
def info(self, *args, **kwargs):
self._warn()
self._logger.info(*args, **kwargs)
def __init__(self, configure_state=None):
'''
:parameters:
configure_state
Used by clients of the log manager to track the
configuration state, may be any object.
'''
def warning(self, *args, **kwargs):
self._warn()
self._logger.warning(*args, **kwargs)
super(IPALogManager, self).__init__(IPA_ROOT_LOGGER_NAME, configure_state)
def error(self, *args, **kwargs):
self._warn()
self._logger.error(*args, **kwargs)
def configure_from_env(self, env, configure_state=None):
'''
Read the loggger configuration from the Env config. The
following items may be configured:
def critical(self, *args, **kwargs):
self._warn()
self._logger.critical(*args, **kwargs)
Logger Levels
*log_logger_XXX = comma separated list of regexps*
def exception(self, *args, **kwargs):
self._warn()
self._logger.exception(*args, **kwargs)
Logger levels can be explicitly specified for specific loggers as
opposed to a global logging level. Specific loggers are indiciated
by a list of regular expressions bound to a level. If a logger's
name matches the regexp then it is assigned that level. The keys
in the Env config must begin with "log_logger_level\_" and then be
followed by a symbolic or numeric log level, for example::
log_logger_level_debug = ipapython\.dn\..*
log_logger_level_35 = ipalib\.plugins\.dogtag
def get_logger(who, bind_logger_names=False):
if isinstance(who, six.string_types):
warnings.warn(
"{}.log_mgr.get_logger is deprecated, use "
"logging.getLogger".format(__name__),
DeprecationWarning)
The first line says any logger belonging to the ipapython.dn module
will have it's level configured to debug.
logger_name = who
else:
caller_globals = sys._getframe(1).f_globals
logger_name = caller_globals.get('__name__', '__main__')
if logger_name == '__main__':
logger_name = caller_globals.get('__file__', logger_name)
logger_name = os.path.basename(logger_name)
The second line say the ipa.plugins.dogtag logger will be
configured to level 35.
logger = logging.getLogger(logger_name)
Note: logger names are a dot ('.') separated list forming a path
in the logger tree. The dot character is also a regular
expression metacharacter (matches any character) therefore you
will usually need to escape the dot in the logger names by
preceeding it with a backslash.
if not isinstance(who, six.string_types):
obj_name = '%s.%s' % (who.__module__, who.__class__.__name__)
logger = _DeprecatedLogger(logger, obj_name)
The return value of this function is a dict with the following
format:
if bind_logger_names:
method = 'log'
if hasattr(who, method):
raise ValueError('%s is already bound to %s' % (method, repr(who)))
setattr(who, method, logger)
logger_regexps
List of (regexp, level) tuples
for method in ('debug',
'info',
'warning',
'error',
'exception',
'critical'):
if hasattr(who, method):
raise ValueError(
'%s is already bound to %s' % (method, repr(who)))
setattr(who, method, getattr(logger, method))
:parameters:
env
Env object configuration values are read from.
configure_state
If other than None update the log manger's configure_state
variable to this object. Clients of the log manager can
use configure_state to track the state of the log manager.
'''
logger_regexps = []
config = {'logger_regexps' : logger_regexps,
}
return logger
for attr in ('debug', 'verbose'):
value = getattr(env, attr, None)
if value is not None:
config[attr] = value
for attr in list(env):
# Get logger level configuration
match = IPALogManager.log_logger_level_config_re.search(attr)
if match:
value = match.group(1)
level = parse_log_level(value)
value = getattr(env, attr)
regexps = re.split('\s*,\s*', value)
# Add the regexp, it maps to the configured level
for regexp in regexps:
logger_regexps.append((regexp, level))
continue
class Filter(object):
def __init__(self, regexp, level):
self.regexp = re.compile(regexp)
self.level = level
self.configure(config, configure_state)
return config
def filter(self, record):
return (not self.regexp.match(record.name) or
record.levelno >= self.level)
def create_log_handlers(self, configs, logger=None, configure_state=None):
'Enforce some IPA specific configurations'
configs = copy.copy(configs)
for cfg in configs:
if not 'time_zone_converter' in cfg:
cfg['time_zone_converter'] = 'utc'
if not 'datefmt' in cfg:
cfg['datefmt'] = ISO8601_UTC_DATETIME_FMT
if not 'format' in cfg:
cfg['format'] = LOGGING_FORMAT_STDOUT
class Formatter(logging.Formatter):
def __init__(
self, fmt=LOGGING_FORMAT_STDOUT, datefmt=ISO8601_UTC_DATETIME_FMT):
super(Formatter, self).__init__(fmt, datefmt)
self.converter = time.gmtime
return super(IPALogManager, self).create_log_handlers(configs, logger, configure_state)
#-------------------------------------------------------------------------------
def standard_logging_setup(filename=None, verbose=False, debug=False,
filemode='w', console_format=None):
if console_format is None:
console_format = LOGGING_FORMAT_STANDARD_CONSOLE
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
filemode='w', console_format=LOGGING_FORMAT_STANDARD_CONSOLE):
handlers = []
# File output is always logged at debug level
if filename is not None:
umask = os.umask(0o177)
try:
file_handler = logging.FileHandler(filename, mode=filemode)
finally:
os.umask(umask)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(Formatter(LOGGING_FORMAT_STANDARD_FILE))
root_logger.addHandler(file_handler)
file_handler = dict(name='file',
filename=filename,
filemode=filemode,
permission=0600,
level='debug',
format=LOGGING_FORMAT_STANDARD_FILE)
handlers.append(file_handler)
level = logging.ERROR
if log_mgr.handlers.has_key('console'):
log_mgr.remove_handler('console')
level = 'error'
if verbose:
level = logging.INFO
level = 'info'
if debug:
level = logging.DEBUG
level = 'debug'
console_handler = logging.StreamHandler()
console_handler.setLevel(level)
console_handler.setFormatter(Formatter(console_format))
root_logger.addHandler(console_handler)
console_handler = dict(name='console',
stream=sys.stderr,
level=level,
format=console_format)
handlers.append(console_handler)
def convert_log_level(value):
try:
level = int(value)
except ValueError:
try:
level = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warn': logging.WARNING,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL
}[value.lower()]
except KeyError:
raise ValueError('unknown log level (%s)' % value)
return level
# default_level must be debug becuase we want the file handler to
# always log at the debug level.
log_mgr.configure(dict(default_level='debug',
handlers=handlers),
configure_state='standard')
return log_mgr.root_logger
#-------------------------------------------------------------------------------
# Single shared instance of log manager
log_mgr = sys.modules[__name__]
#
# By default always starts with stderr console handler at error level
# so messages generated before logging is fully configured have some
# place to got and won't get lost.
root_logger = _DeprecatedLogger(logging.getLogger(),
'{}.log_mgr.root_logger'.format(__name__))
log_mgr = IPALogManager()
log_mgr.configure(dict(default_level='error',
handlers=[dict(name='console',
stream=sys.stderr)]),
configure_state='default')
root_logger = log_mgr.root_logger

Binary file not shown.

File diff suppressed because it is too large Load Diff

BIN
ipapython/ipaldap.pyc Normal file

Binary file not shown.

View File

@@ -1,31 +0,0 @@
Metadata-Version: 1.2
Name: ipapython
Version: 4.6.2
Summary: FreeIPA python support library
Home-page: http://www.freeipa.org/
Author: FreeIPA Developers
Author-email: freeipa-devel@redhat.com
License: GPLv3
Download-URL: http://www.freeipa.org/page/Downloads
Description: FreeIPA python support library
Platform: Linux
Platform: Solaris
Platform: Unix
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Classifier: Programming Language :: C
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Unix
Classifier: Topic :: Internet :: Name Service (DNS)
Classifier: Topic :: Security
Classifier: Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP
Requires-Python: >=2.7.5,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*

View File

@@ -1,34 +0,0 @@
README
__init__.py
admintool.py
certdb.py
config.py
cookie.py
dn.py
dnsutil.py
dogtag.py
errors.py
graph.py
ipa_log_manager.py
ipaldap.py
ipautil.py
ipavalidate.py
kerberos.py
kernel_keyring.py
nsslib.py
session_storage.py
setup.cfg
setup.py
ssh.py
version.py
install/__init__.py
install/cli.py
install/common.py
install/core.py
install/typing.py
install/util.py
ipapython.egg-info/PKG-INFO
ipapython.egg-info/SOURCES.txt
ipapython.egg-info/dependency_links.txt
ipapython.egg-info/requires.txt
ipapython.egg-info/top_level.txt

View File

@@ -1,15 +0,0 @@
cffi
cryptography>=1.6
dnspython>=1.15
gssapi>=1.2.0
ipaplatform==4.6.2
netaddr
netifaces>=0.10.4
python-ldap>=3.0.0b1
six
[:python_version<'3']
enum34
[install]
dbus-python

View File

@@ -1 +0,0 @@
ipapython

File diff suppressed because it is too large Load Diff

BIN
ipapython/ipautil.pyc Normal file

Binary file not shown.

BIN
ipapython/ipavalidate.pyc Normal file

Binary file not shown.

View File

@@ -1,204 +0,0 @@
#
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
#
"""
classes/utils for Kerberos principal name validation/manipulation
"""
import re
import six
from ipapython.ipautil import escape_seq, unescape_seq
if six.PY3:
unicode = str
REALM_SPLIT_RE = re.compile(r'(?<!\\)@')
COMPONENT_SPLIT_RE = re.compile(r'(?<!\\)/')
def parse_princ_name_and_realm(principal, realm=None):
"""
split principal to the <principal_name>, <realm> components
:param principal: unicode representation of principal
:param realm: if not None, replace the parsed realm with the specified one
:returns: tuple containing the principal name and realm
realm will be `None` if no realm was found in the input string
"""
realm_and_name = REALM_SPLIT_RE.split(principal)
if len(realm_and_name) > 2:
raise ValueError(
"Principal is not in <name>@<realm> format")
principal_name = realm_and_name[0]
try:
parsed_realm = realm_and_name[1]
except IndexError:
parsed_realm = None if realm is None else realm
return principal_name, parsed_realm
def split_principal_name(principal_name):
"""
Split principal name (without realm) into the components
NOTE: operates on the following RFC 1510 types:
* NT-PRINCIPAL
* NT-SRV-INST
* NT-SRV-HST
Enterprise principals (NT-ENTERPRISE, see RFC 6806) are also handled
:param principal_name: unicode representation of principal name
:returns: tuple of individual components (i.e. primary name for
NT-PRINCIPAL and NT-ENTERPRISE, primary name and instance for others)
"""
return tuple(COMPONENT_SPLIT_RE.split(principal_name))
@six.python_2_unicode_compatible
class Principal(object):
"""
Container for the principal name and realm according to RFC 1510
"""
def __init__(self, components, realm=None):
if isinstance(components, six.binary_type):
raise TypeError(
"Cannot create a principal object from bytes: {!r}".format(
components)
)
elif isinstance(components, six.string_types):
# parse principal components from realm
self.components, self.realm = self._parse_from_text(
components, realm)
elif isinstance(components, Principal):
self.components = components.components
self.realm = components.realm if realm is None else realm
else:
self.components = tuple(components)
self.realm = realm
def __eq__(self, other):
if not isinstance(other, Principal):
return False
return (self.components == other.components and
self.realm == other.realm)
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return unicode(self) < unicode(other)
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __gt__(self, other):
return not self.__le__(other)
def __ge__(self, other):
return self.__gt__(other) or self.__eq__(other)
def __hash__(self):
return hash(self.components + (self.realm,))
def _parse_from_text(self, principal, realm=None):
"""
parse individual principal name components from the string
representation of the principal. This is done in three steps:
1.) split the string at the unescaped '@'
2.) unescape any leftover '\@' sequences
3.) split the primary at the unescaped '/'
4.) unescape leftover '\/'
:param principal: unicode representation of the principal name
:param realm: if not None, this realm name will be used instead of the
one parsed from `principal`
:returns: tuple containing the principal name components and realm
"""
principal_name, parsed_realm = parse_princ_name_and_realm(
principal, realm=realm)
(principal_name,) = unescape_seq(u'@', principal_name)
if parsed_realm is not None:
(parsed_realm,) = unescape_seq(u'@', parsed_realm)
name_components = split_principal_name(principal_name)
name_components = unescape_seq(u'/', *name_components)
return name_components, parsed_realm
@property
def is_user(self):
return len(self.components) == 1
@property
def is_enterprise(self):
return self.is_user and u'@' in self.components[0]
@property
def is_service(self):
return len(self.components) > 1
@property
def is_host(self):
return (self.is_service and len(self.components) == 2 and
self.components[0] == u'host')
@property
def username(self):
if self.is_user:
return self.components[0]
else:
raise ValueError(
"User name is defined only for user and enterprise principals")
@property
def upn_suffix(self):
if not self.is_enterprise:
raise ValueError("Only enterprise principals have UPN suffix")
return self.components[0].split(u'@')[1]
@property
def hostname(self):
if not (self.is_host or self.is_service):
raise ValueError(
"hostname is defined for host and service principals")
return self.components[-1]
@property
def service_name(self):
if not self.is_service:
raise ValueError(
"Only service principals have meaningful service name")
return u'/'.join(c for c in escape_seq('/', *self.components[:-1]))
def __str__(self):
"""
return the unicode representation of principal
works in reverse of the `from_text` class method
"""
name_components = escape_seq(u'/', *self.components)
name_components = escape_seq(u'@', *name_components)
principal_string = u'/'.join(name_components)
if self.realm is not None:
(realm,) = escape_seq(u'@', self.realm)
principal_string = u'@'.join([principal_string, realm])
return principal_string
def __repr__(self):
return "{0.__module__}.{0.__name__}('{1}')".format(
self.__class__, self)

View File

@@ -18,7 +18,6 @@
#
import os
import six
from ipapython.ipautil import run
@@ -37,29 +36,24 @@ def dump_keys():
"""
Dump all keys
"""
result = run(['keyctl', 'list', KEYRING], raiseonerr=False,
capture_output=True)
return result.output
(stdout, stderr, rc) = run(['keyctl', 'list', KEYRING], raiseonerr=False)
return stdout
def get_real_key(key):
"""
One cannot request a key based on the description it was created with
so find the one we're looking for.
"""
assert isinstance(key, six.string_types)
result = run(['keyctl', 'search', KEYRING, KEYTYPE, key],
raiseonerr=False, capture_output=True)
if result.returncode:
(stdout, stderr, rc) = run(['keyctl', 'search', KEYRING, KEYTYPE, key], raiseonerr=False)
if rc:
raise ValueError('key %s not found' % key)
return result.raw_output.rstrip()
return stdout.rstrip()
def get_persistent_key(key):
assert isinstance(key, six.string_types)
result = run(['keyctl', 'get_persistent', KEYRING, key],
raiseonerr=False, capture_output=True)
if result.returncode:
(stdout, stderr, rc) = run(['keyctl', 'get_persistent', KEYRING, key], raiseonerr=False)
if rc:
raise ValueError('persistent key %s not found' % key)
return result.raw_output.rstrip()
return stdout.rstrip()
def is_persistent_keyring_supported():
uid = os.geteuid()
@@ -74,7 +68,6 @@ def has_key(key):
"""
Returns True/False whether the key exists in the keyring.
"""
assert isinstance(key, six.string_types)
try:
get_real_key(key)
return True
@@ -87,27 +80,22 @@ def read_key(key):
Use pipe instead of print here to ensure we always get the raw data.
"""
assert isinstance(key, six.string_types)
real_key = get_real_key(key)
result = run(['keyctl', 'pipe', real_key], raiseonerr=False,
capture_output=True)
if result.returncode:
raise ValueError('keyctl pipe failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'pipe', real_key], raiseonerr=False)
if rc:
raise ValueError('keyctl pipe failed: %s' % stderr)
return result.raw_output
return stdout
def update_key(key, value):
"""
Update the keyring data. If they key doesn't exist it is created.
"""
assert isinstance(key, six.string_types)
assert isinstance(value, bytes)
if has_key(key):
real_key = get_real_key(key)
result = run(['keyctl', 'pupdate', real_key], stdin=value,
raiseonerr=False)
if result.returncode:
raise ValueError('keyctl pupdate failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'pupdate', real_key], stdin=value, raiseonerr=False)
if rc:
raise ValueError('keyctl pupdate failed: %s' % stderr)
else:
add_key(key, value)
@@ -115,22 +103,17 @@ def add_key(key, value):
"""
Add a key to the kernel keyring.
"""
assert isinstance(key, six.string_types)
assert isinstance(value, bytes)
if has_key(key):
raise ValueError('key %s already exists' % key)
result = run(['keyctl', 'padd', KEYTYPE, key, KEYRING],
stdin=value, raiseonerr=False)
if result.returncode:
raise ValueError('keyctl padd failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'padd', KEYTYPE, key, KEYRING], stdin=value, raiseonerr=False)
if rc:
raise ValueError('keyctl padd failed: %s' % stderr)
def del_key(key):
"""
Remove a key from the keyring
"""
assert isinstance(key, six.string_types)
real_key = get_real_key(key)
result = run(['keyctl', 'unlink', real_key, KEYRING],
raiseonerr=False)
if result.returncode:
raise ValueError('keyctl unlink failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'unlink', real_key, KEYRING], raiseonerr=False)
if rc:
raise ValueError('keyctl unlink failed: %s' % stderr)

Binary file not shown.

1557
ipapython/log_manager.py Normal file

File diff suppressed because it is too large Load Diff

BIN
ipapython/log_manager.pyc Normal file

Binary file not shown.

View File

@@ -0,0 +1,337 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
# John Dennis <jdennis@redhat.com>
#
# Copyright (C) 2009 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import httplib
import getpass
import socket
from ipapython.ipa_log_manager import *
from nss.error import NSPRError
import nss.io as io
import nss.nss as nss
import nss.ssl as ssl
import nss.error as error
from ipaplatform.paths import paths
def auth_certificate_callback(sock, check_sig, is_server, certdb):
cert_is_valid = False
cert = sock.get_peer_certificate()
root_logger.debug("auth_certificate_callback: check_sig=%s is_server=%s\n%s",
check_sig, is_server, str(cert))
pin_args = sock.get_pkcs11_pin_arg()
if pin_args is None:
pin_args = ()
# Define how the cert is being used based upon the is_server flag. This may
# seem backwards, but isn't. If we're a server we're trying to validate a
# client cert. If we're a client we're trying to validate a server cert.
if is_server:
intended_usage = nss.certificateUsageSSLClient
else:
intended_usage = nss.certificateUsageSSLServer
try:
# If the cert fails validation it will raise an exception, the errno attribute
# will be set to the error code matching the reason why the validation failed
# and the strerror attribute will contain a string describing the reason.
approved_usage = cert.verify_now(certdb, check_sig, intended_usage, *pin_args)
except Exception, e:
root_logger.error('cert validation failed for "%s" (%s)', cert.subject, e.strerror)
cert_is_valid = False
return cert_is_valid
root_logger.debug("approved_usage = %s intended_usage = %s",
', '.join(nss.cert_usage_flags(approved_usage)),
', '.join(nss.cert_usage_flags(intended_usage)))
# Is the intended usage a proper subset of the approved usage
if approved_usage & intended_usage:
cert_is_valid = True
else:
cert_is_valid = False
# If this is a server, we're finished
if is_server or not cert_is_valid:
root_logger.debug('cert valid %s for "%s"', cert_is_valid, cert.subject)
return cert_is_valid
# Certificate is OK. Since this is the client side of an SSL
# connection, we need to verify that the name field in the cert
# matches the desired hostname. This is our defense against
# man-in-the-middle attacks.
hostname = sock.get_hostname()
try:
# If the cert fails validation it will raise an exception
cert_is_valid = cert.verify_hostname(hostname)
except Exception, e:
root_logger.error('failed verifying socket hostname "%s" matches cert subject "%s" (%s)',
hostname, cert.subject, e.strerror)
cert_is_valid = False
return cert_is_valid
root_logger.debug('cert valid %s for "%s"', cert_is_valid, cert.subject)
return cert_is_valid
def client_auth_data_callback(ca_names, chosen_nickname, password, certdb):
cert = None
if chosen_nickname:
try:
cert = nss.find_cert_from_nickname(chosen_nickname, password)
priv_key = nss.find_key_by_any_cert(cert, password)
return cert, priv_key
except NSPRError:
return False
else:
nicknames = nss.get_cert_nicknames(certdb, nss.SEC_CERT_NICKNAMES_USER)
for nickname in nicknames:
try:
cert = nss.find_cert_from_nickname(nickname, password)
if cert.check_valid_times():
if cert.has_signer_in_ca_names(ca_names):
priv_key = nss.find_key_by_any_cert(cert, password)
return cert, priv_key
except NSPRError:
return False
return False
_af_dict = {
socket.AF_INET: io.PR_AF_INET,
socket.AF_INET6: io.PR_AF_INET6,
socket.AF_UNSPEC: io.PR_AF_UNSPEC
}
class NSSAddressFamilyFallback(object):
def __init__(self, family):
self.sock_family = family
self.family = self._get_nss_family(self.sock_family)
def _get_nss_family(self, sock_family):
"""
Translate a family from python socket module to nss family.
"""
try:
return _af_dict[sock_family]
except KeyError:
raise ValueError('Uknown socket family %d\n', sock_family)
def _create_socket(self):
self.sock = io.Socket(family=self.family)
def connect_socket(self, host, port):
try:
addr_info = io.AddrInfo(host, family=self.family)
except Exception:
raise NSPRError(
error_code=error.PR_ADDRESS_NOT_SUPPORTED_ERROR,
error_message="Cannot resolve %s using family %s" % (host,
io.addr_family_name(self.family)))
for net_addr in addr_info:
root_logger.debug("Connecting: %s", net_addr)
net_addr.port = port
self.family = net_addr.family
try:
self._create_socket()
self.sock.connect(net_addr)
return
except Exception, e:
root_logger.debug("Could not connect socket to %s, error: %s",
net_addr, str(e))
root_logger.debug("Try to continue with next family...")
continue
raise NSPRError(
error_code=error.PR_ADDRESS_NOT_SUPPORTED_ERROR,
error_message="Could not connect to %s using any address" % host)
class NSSConnection(httplib.HTTPConnection, NSSAddressFamilyFallback):
default_port = httplib.HTTPSConnection.default_port
def __init__(self, host, port=None, strict=None,
dbdir=None, family=socket.AF_UNSPEC, no_init=False):
"""
:param host: the server to connect to
:param port: the port to use (default is set in HTTPConnection)
:param dbdir: the NSS database directory
:param family: network family to use (default AF_UNSPEC)
:param no_init: do not initialize the NSS database. This requires
that the database has already been initialized or
the request will fail.
"""
httplib.HTTPConnection.__init__(self, host, port, strict)
NSSAddressFamilyFallback.__init__(self, family)
if not dbdir:
raise RuntimeError("dbdir is required")
root_logger.debug('%s init %s', self.__class__.__name__, host)
if not no_init and nss.nss_is_initialized():
# close any open NSS database and use the new one
ssl.clear_session_cache()
try:
nss.nss_shutdown()
except NSPRError, e:
if e.errno != error.SEC_ERROR_NOT_INITIALIZED:
raise e
nss.nss_init(dbdir)
ssl.set_domestic_policy()
nss.set_password_callback(self.password_callback)
def _create_socket(self):
# TODO: remove the try block once python-nss is guaranteed to contain
# these values
try:
#pylint: disable=E1101
ssl_enable_renegotiation = ssl.SSL_ENABLE_RENEGOTIATION
ssl_require_safe_negotiation = ssl.SSL_REQUIRE_SAFE_NEGOTIATION
ssl_renegotiate_requires_xtn = ssl.SSL_RENEGOTIATE_REQUIRES_XTN
except:
ssl_enable_renegotiation = 20
ssl_require_safe_negotiation = 21
ssl_renegotiate_requires_xtn = 2
# Create the socket here so we can do things like let the caller
# override the NSS callbacks
self.sock = ssl.SSLSocket(family=self.family)
self.sock.set_ssl_option(ssl.SSL_SECURITY, True)
self.sock.set_ssl_option(ssl.SSL_HANDSHAKE_AS_CLIENT, True)
self.sock.set_ssl_option(ssl_require_safe_negotiation, False)
self.sock.set_ssl_option(ssl_enable_renegotiation, ssl_renegotiate_requires_xtn)
# Provide a callback which notifies us when the SSL handshake is complete
self.sock.set_handshake_callback(self.handshake_callback)
# Provide a callback to verify the servers certificate
self.sock.set_auth_certificate_callback(auth_certificate_callback,
nss.get_default_certdb())
self.sock.set_hostname(self.host)
def password_callback(self, slot, retry, password):
if not retry and password: return password
return getpass.getpass("Enter password for %s: " % slot.token_name);
def handshake_callback(self, sock):
"""
Verify callback. If we get here then the certificate is ok.
"""
root_logger.debug("handshake complete, peer = %s", sock.get_peer_name())
pass
def connect(self):
self.connect_socket(self.host, self.port)
def close(self):
"""Close the connection to the HTTP server."""
if self.sock:
self.sock.close() # close it manually... there may be other refs
self.sock = None
ssl.clear_session_cache()
def endheaders(self, message=None):
"""
Explicitly close the connection if an error is returned after the
headers are sent. This will likely mean the initial SSL handshake
failed. If this isn't done then the connection is never closed and
subsequent NSS activities will fail with a BUSY error.
"""
try:
# FIXME: httplib uses old-style classes so super doesn't work
# Python 2.7 changed the API for endheaders. This is an attempt
# to work across versions
(major, minor, micro, releaselevel, serial) = sys.version_info
if major == 2 and minor < 7:
httplib.HTTPConnection.endheaders(self)
else:
httplib.HTTPConnection.endheaders(self, message)
except NSPRError, e:
self.close()
raise e
class NSSHTTPS(httplib.HTTP):
# We would like to use HTTP 1.1 not the older HTTP 1.0 but xmlrpclib
# and httplib do not play well together. httplib when the protocol
# is 1.1 will add a host header in the request. But xmlrpclib
# always adds a host header irregardless of the HTTP protocol
# version. That means the request ends up with 2 host headers,
# but Apache freaks out if it sees 2 host headers, a known Apache
# issue. httplib has a mechanism to skip adding the host header
# (i.e. skip_host in HTTPConnection.putrequest()) but xmlrpclib
# doesn't use it. Oh well, back to 1.0 :-(
#
#_http_vsn = 11
#_http_vsn_str = 'HTTP/1.1'
_connection_class = NSSConnection
def __init__(self, host='', port=None, strict=None, dbdir=None, no_init=False):
# provide a default host, pass the X509 cert info
# urf. compensate for bad input.
if port == 0:
port = None
self._setup(self._connection_class(host, port, strict, dbdir=dbdir, no_init=no_init))
def getreply(self):
"""
Override so we can close duplicated file connection on non-200
responses. This was causing nss_shutdown() to fail with a busy
error.
"""
(status, reason, msg) = httplib.HTTP.getreply(self)
if status != 200:
self.file.close()
return (status, reason, msg)
#------------------------------------------------------------------------------
if __name__ == "__main__":
standard_logging_setup('nsslib.log', debug=True, filemode='a')
root_logger.info("Start")
if False:
conn = NSSConnection("www.verisign.com", 443, dbdir=paths.NSS_DB_DIR)
conn.set_debuglevel(1)
conn.connect()
conn.request("GET", "/")
response = conn.getresponse()
print response.status
#print response.msg
print response.getheaders()
data = response.read()
#print data
conn.close()
if True:
h = NSSHTTPS("www.verisign.com", 443, dbdir=paths.NSS_DB_DIR)
h.connect()
h.putrequest('GET', '/')
h.endheaders()
http_status, http_reason, headers = h.getreply()
print "status = %s %s" % (http_status, http_reason)
print "headers:\n%s" % headers
f = h.getfile()
data = f.read() # Get the raw HTML
f.close()
#print data

BIN
ipapython/nsslib.pyc Normal file

Binary file not shown.

View File

@@ -0,0 +1,20 @@
PYTHONLIBDIR ?= $(shell python2 -c "from distutils.sysconfig import *; print get_python_lib()")
PACKAGEDIR ?= $(DESTDIR)/$(PYTHONLIBDIR)/ipa
CONFIGDIR ?= $(DESTDIR)/etc/ipa
all:
python2 setup.py build
install:
if [ "$(DESTDIR)" = "" ]; then \
python2 setup.py install; \
else \
python2 setup.py install --root $(DESTDIR); \
fi
clean:
rm -rf build
distclean: clean
maintainer-clean: distclean

View File

@@ -0,0 +1,57 @@
/*
* Authors:
* John Dennis <jdennis@redhat.com>
*
* Copyright (C) 2009 Red Hat
* see file 'COPYING' for use and warranty information
*
* This program is free software you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <Python.h>
PyDoc_STRVAR(setdefaultencoding_doc,
"setdefaultencoding(encoding='utf-8')\n\
\n\
Set the current default string encoding used by the Unicode implementation.\n\
Defaults to utf-8."
);
static PyObject *
setdefaultencoding(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"utf-8", NULL};
char *encoding;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "s:setdefaultencoding", kwlist, &encoding))
return NULL;
if (PyUnicode_SetDefaultEncoding(encoding))
return NULL;
Py_RETURN_NONE;
}
static PyMethodDef methods[] = {
{"setdefaultencoding", (PyCFunction)setdefaultencoding, METH_VARARGS|METH_KEYWORDS, setdefaultencoding_doc},
{NULL, NULL} /* sentinel */
};
PyMODINIT_FUNC
initdefault_encoding_utf8(void)
{
PyUnicode_SetDefaultEncoding("utf-8");
Py_InitModule3("default_encoding_utf8", methods, "Forces the default encoding to utf-8");
}

View File

@@ -0,0 +1,45 @@
# Authors:
# John Dennis <jdennis@redhat.com>
#
# Copyright (C) 2009 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from distutils.core import setup, Extension
from distutils.sysconfig import get_python_inc
import sys
import os
python_header = os.path.join(get_python_inc(plat_specific=0), 'Python.h')
if not os.path.exists(python_header):
sys.exit("Cannot find Python development packages that provide Python.h")
default_encoding_utf8 = Extension('default_encoding_utf8', ['default_encoding_utf8.c'])
setup(name = 'python-default-encoding',
version = '0.1',
description = 'Forces the default encoding in Python to be utf-8',
long_description = 'Forces the default encoding in Python to be utf-8',
author = 'John Dennis',
author_email = 'jdennis@redhat.com',
maintainer = 'John Dennis',
maintainer_email = 'jdennis@redhat.com',
license = 'GPLv3+',
platforms = 'posix',
url = '',
download_url = '',
ext_modules = [default_encoding_utf8],
)

View File

@@ -1,392 +0,0 @@
#
# Copyright (C) 2017 FreeIPA Contributors see COPYING for license
#
import ctypes
import sys
KRB5_CC_NOSUPP = -1765328137
if sys.platform == 'darwin':
LIBKRB5_FILENAME = 'libkrb5.dylib'
else:
LIBKRB5_FILENAME = 'libkrb5.so.3'
try:
LIBKRB5 = ctypes.CDLL(LIBKRB5_FILENAME)
except OSError as e: # pragma: no cover
raise ImportError(str(e))
krb5_int32 = ctypes.c_int32
krb5_error_code = krb5_int32
krb5_magic = krb5_error_code
krb5_enctype = krb5_int32
krb5_octet = ctypes.c_uint8
krb5_timestamp = krb5_int32
class _krb5_context(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_context"""
_fields_ = []
class _krb5_ccache(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_ccache"""
_fields_ = []
class _krb5_data(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_data"""
_fields_ = [
("magic", krb5_magic),
("length", ctypes.c_uint),
("data", ctypes.c_char_p),
]
class krb5_principal_data(ctypes.Structure): # noqa
"""krb5/krb5.h struct krb5_principal_data"""
_fields_ = []
class _krb5_keyblock(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_keyblock"""
_fields_ = [
("magic", krb5_magic),
("enctype", krb5_enctype),
("length", ctypes.c_uint),
("contents", ctypes.POINTER(krb5_octet))
]
class _krb5_ticket_times(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_ticket_times"""
_fields_ = [
("authtime", krb5_timestamp),
("starttime", krb5_timestamp),
("endtime", krb5_timestamp),
("renew_till", krb5_timestamp),
]
class _krb5_address(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_address"""
_fields_ = []
class _krb5_authdata(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_authdata"""
_fields_ = []
krb5_principal = ctypes.POINTER(krb5_principal_data)
krb5_keyblock = _krb5_keyblock
krb5_ticket_times = _krb5_ticket_times
krb5_boolean = ctypes.c_uint
krb5_flags = krb5_int32
krb5_data = _krb5_data
krb5_address_p = ctypes.POINTER(_krb5_address)
krb5_authdata_p = ctypes.POINTER(_krb5_authdata)
class _krb5_creds(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_creds"""
_fields_ = [
("magic", krb5_magic),
("client", krb5_principal),
("server", krb5_principal),
("keyblock", krb5_keyblock),
("times", krb5_ticket_times),
("is_skey", krb5_boolean),
("ticket_flags", krb5_flags),
("addresses", ctypes.POINTER(krb5_address_p)),
("ticket", krb5_data),
("second_ticket", krb5_data),
("authdata", ctypes.POINTER(krb5_authdata_p))
]
class KRB5Error(Exception):
pass
def krb5_errcheck(result, func, arguments):
"""Error checker for krb5_error return value"""
if result != 0:
raise KRB5Error(result, func.__name__, arguments)
krb5_context = ctypes.POINTER(_krb5_context)
krb5_ccache = ctypes.POINTER(_krb5_ccache)
krb5_data_p = ctypes.POINTER(_krb5_data)
krb5_error = ctypes.c_int32
krb5_creds = _krb5_creds
krb5_pointer = ctypes.c_void_p
krb5_cc_cursor = krb5_pointer
krb5_init_context = LIBKRB5.krb5_init_context
krb5_init_context.argtypes = (ctypes.POINTER(krb5_context), )
krb5_init_context.restype = krb5_error
krb5_init_context.errcheck = krb5_errcheck
krb5_free_context = LIBKRB5.krb5_free_context
krb5_free_context.argtypes = (krb5_context, )
krb5_free_context.restype = None
krb5_free_principal = LIBKRB5.krb5_free_principal
krb5_free_principal.argtypes = (krb5_context, krb5_principal)
krb5_free_principal.restype = None
krb5_free_data_contents = LIBKRB5.krb5_free_data_contents
krb5_free_data_contents.argtypes = (krb5_context, krb5_data_p)
krb5_free_data_contents.restype = None
krb5_cc_default = LIBKRB5.krb5_cc_default
krb5_cc_default.argtypes = (krb5_context, ctypes.POINTER(krb5_ccache), )
krb5_cc_default.restype = krb5_error
krb5_cc_default.errcheck = krb5_errcheck
krb5_cc_close = LIBKRB5.krb5_cc_close
krb5_cc_close.argtypes = (krb5_context, krb5_ccache, )
krb5_cc_close.restype = krb5_error
krb5_cc_close.errcheck = krb5_errcheck
krb5_parse_name = LIBKRB5.krb5_parse_name
krb5_parse_name.argtypes = (krb5_context, ctypes.c_char_p,
ctypes.POINTER(krb5_principal), )
krb5_parse_name.restype = krb5_error
krb5_parse_name.errcheck = krb5_errcheck
krb5_cc_set_config = LIBKRB5.krb5_cc_set_config
krb5_cc_set_config.argtypes = (krb5_context, krb5_ccache, krb5_principal,
ctypes.c_char_p, krb5_data_p, )
krb5_cc_set_config.restype = krb5_error
krb5_cc_set_config.errcheck = krb5_errcheck
krb5_cc_get_principal = LIBKRB5.krb5_cc_get_principal
krb5_cc_get_principal.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_principal), )
krb5_cc_get_principal.restype = krb5_error
krb5_cc_get_principal.errcheck = krb5_errcheck
# krb5_build_principal is a variadic function but that can't be expressed
# in a ctypes argtypes definition, so I explicitly listed the number of
# arguments we actually use through the code for type checking purposes
krb5_build_principal = LIBKRB5.krb5_build_principal
krb5_build_principal.argtypes = (krb5_context, ctypes.POINTER(krb5_principal),
ctypes.c_uint, ctypes.c_char_p,
ctypes.c_char_p, ctypes.c_char_p,
ctypes.c_char_p, ctypes.c_char_p, )
krb5_build_principal.restype = krb5_error
krb5_build_principal.errcheck = krb5_errcheck
krb5_cc_start_seq_get = LIBKRB5.krb5_cc_start_seq_get
krb5_cc_start_seq_get.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_cc_cursor), )
krb5_cc_start_seq_get.restype = krb5_error
krb5_cc_start_seq_get.errcheck = krb5_errcheck
krb5_cc_next_cred = LIBKRB5.krb5_cc_next_cred
krb5_cc_next_cred.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_cc_cursor),
ctypes.POINTER(krb5_creds), )
krb5_cc_next_cred.restype = krb5_error
krb5_cc_next_cred.errcheck = krb5_errcheck
krb5_cc_end_seq_get = LIBKRB5.krb5_cc_end_seq_get
krb5_cc_end_seq_get.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_cc_cursor), )
krb5_cc_end_seq_get.restype = krb5_error
krb5_cc_end_seq_get.errcheck = krb5_errcheck
krb5_free_cred_contents = LIBKRB5.krb5_free_cred_contents
krb5_free_cred_contents.argtypes = (krb5_context, ctypes.POINTER(krb5_creds))
krb5_free_cred_contents.restype = krb5_error
krb5_free_cred_contents.errcheck = krb5_errcheck
krb5_principal_compare = LIBKRB5.krb5_principal_compare
krb5_principal_compare.argtypes = (krb5_context, krb5_principal,
krb5_principal, )
krb5_principal_compare.restype = krb5_boolean
krb5_unparse_name = LIBKRB5.krb5_unparse_name
krb5_unparse_name.argtypes = (krb5_context, krb5_principal,
ctypes.POINTER(ctypes.c_char_p), )
krb5_unparse_name.restype = krb5_error
krb5_unparse_name.errcheck = krb5_errcheck
krb5_free_unparsed_name = LIBKRB5.krb5_free_unparsed_name
krb5_free_unparsed_name.argtypes = (krb5_context, ctypes.c_char_p, )
krb5_free_unparsed_name.restype = None
CONF_REALM = b"X-CACHECONF:"
CONF_NAME = b"krb5_ccache_conf_data"
def store_data(princ_name, key, value):
"""
Stores the session cookie in a hidden ccache entry.
"""
if not isinstance(princ_name, bytes):
princ_name = princ_name.encode('utf-8')
if not isinstance(key, bytes):
key = key.encode('ascii')
if not isinstance(value, bytes):
value = value.encode('utf-8')
# FILE ccaches grow every time an entry is stored, so we need
# to avoid storing the same entry multiple times.
oldvalue = get_data(princ_name, key)
if oldvalue == value:
return
context = krb5_context()
principal = krb5_principal()
ccache = krb5_ccache()
try:
krb5_init_context(ctypes.byref(context))
krb5_parse_name(context, ctypes.c_char_p(princ_name),
ctypes.byref(principal))
krb5_cc_default(context, ctypes.byref(ccache))
buf = ctypes.create_string_buffer(value)
data = _krb5_data()
data.data = buf.value
data.length = len(buf)
krb5_cc_set_config(context, ccache, principal, key,
ctypes.byref(data))
finally:
if principal:
krb5_free_principal(context, principal)
if ccache:
krb5_cc_close(context, ccache)
if context:
krb5_free_context(context)
def get_data(princ_name, key):
"""
Gets the session cookie in a hidden ccache entry.
"""
if not isinstance(princ_name, bytes):
princ_name = princ_name.encode('utf-8')
if not isinstance(key, bytes):
key = key.encode('utf-8')
context = krb5_context()
principal = krb5_principal()
srv_princ = krb5_principal()
ccache = krb5_ccache()
pname_princ = krb5_principal()
pname = ctypes.c_char_p()
try:
krb5_init_context(ctypes.byref(context))
krb5_cc_default(context, ctypes.byref(ccache))
krb5_cc_get_principal(context, ccache, ctypes.byref(principal))
# We need to parse and then unparse the name in case the pric_name
# passed in comes w/o a realm attached
krb5_parse_name(context, ctypes.c_char_p(princ_name),
ctypes.byref(pname_princ))
krb5_unparse_name(context, pname_princ, ctypes.byref(pname))
krb5_build_principal(context, ctypes.byref(srv_princ),
len(CONF_REALM), ctypes.c_char_p(CONF_REALM),
ctypes.c_char_p(CONF_NAME), ctypes.c_char_p(key),
pname, ctypes.c_char_p(None))
# Unfortunately we can't just use krb5_cc_get_config()
# because of bugs in some ccache handling code in krb5
# libraries that would always return the first entry
# stored and not the last one, which is the one we want.
cursor = krb5_cc_cursor()
creds = krb5_creds()
got_creds = False
krb5_cc_start_seq_get(context, ccache, ctypes.byref(cursor))
try:
while True:
checkcreds = krb5_creds()
# the next function will throw an error and break out of the
# while loop when we try to access past the last cred
krb5_cc_next_cred(context, ccache, ctypes.byref(cursor),
ctypes.byref(checkcreds))
if (krb5_principal_compare(context, principal,
checkcreds.client) == 1 and
krb5_principal_compare(context, srv_princ,
checkcreds.server) == 1):
if got_creds:
krb5_free_cred_contents(context, ctypes.byref(creds))
creds = checkcreds
got_creds = True
# We do not stop here, as we want the LAST entry
# in the ccache for those ccaches that cannot delete
# but only always append, like FILE
else:
krb5_free_cred_contents(context,
ctypes.byref(checkcreds))
except KRB5Error:
pass
finally:
krb5_cc_end_seq_get(context, ccache, ctypes.byref(cursor))
if got_creds:
data = creds.ticket.data
krb5_free_cred_contents(context, ctypes.byref(creds))
return data
finally:
if principal:
krb5_free_principal(context, principal)
if srv_princ:
krb5_free_principal(context, srv_princ)
if pname_princ:
krb5_free_principal(context, pname_princ)
if pname:
krb5_free_unparsed_name(context, pname)
if ccache:
krb5_cc_close(context, ccache)
if context:
krb5_free_context(context)
def remove_data(princ_name, key):
"""
Removes the hidden ccache entry with the session cookie.
"""
if not isinstance(princ_name, bytes):
princ_name = princ_name.encode('utf-8')
if not isinstance(key, bytes):
key = key.encode('utf-8')
context = krb5_context()
principal = krb5_principal()
ccache = krb5_ccache()
try:
krb5_init_context(ctypes.byref(context))
krb5_parse_name(context, ctypes.c_char_p(princ_name),
ctypes.byref(principal))
krb5_cc_default(context, ctypes.byref(ccache))
try:
krb5_cc_set_config(context, ccache, principal, key, None)
except KRB5Error as e:
if e.args[0] == KRB5_CC_NOSUPP:
# removal not supported with this CC type, just pass
pass
finally:
if principal:
krb5_free_principal(context, principal)
if ccache:
krb5_cc_close(context, ccache)
if context:
krb5_free_context(context)

View File

@@ -1,5 +0,0 @@
[bdist_wheel]
universal = 1
[metadata]
license_file = ../COPYING

81
ipapython/setup.py Executable file → Normal file
View File

@@ -20,36 +20,57 @@
FreeIPA is a server for identity, policy, and audit.
"""
from os.path import abspath, dirname
DOCLINES = __doc__.split("\n")
import os
import sys
import distutils.sysconfig
CLASSIFIERS = """\
Development Status :: 4 - Beta
Intended Audience :: System Environment/Base
License :: GPL
Programming Language :: Python
Operating System :: POSIX
Operating System :: Unix
"""
# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
# update it when the contents of directories change.
if os.path.exists('MANIFEST'): os.remove('MANIFEST')
def setup_package():
from distutils.core import setup
old_path = os.getcwd()
local_path = os.path.dirname(os.path.abspath(sys.argv[0]))
os.chdir(local_path)
sys.path.insert(0,local_path)
try:
setup(
name = "ipapython",
version = "4.0.5",
license = "GPL",
author = "Karl MacMillan, et.al.",
author_email = "kmacmill@redhat.com",
maintainer = "freeIPA Developers",
maintainer_email = "freeipa-devel@redhat.com",
url = "http://www.freeipa.org/",
description = DOCLINES[0],
long_description = "\n".join(DOCLINES[2:]),
download_url = "http://www.freeipa.org/page/Downloads",
classifiers=filter(None, CLASSIFIERS.split('\n')),
platforms = ["Linux", "Solaris", "Unix"],
package_dir = {'ipapython': ''},
packages = [ "ipapython" ],
)
finally:
del sys.path[0]
os.chdir(old_path)
return
if __name__ == '__main__':
# include ../ for ipasetup.py
sys.path.append(dirname(dirname(abspath(__file__))))
from ipasetup import ipasetup # noqa: E402
ipasetup(
name="ipapython",
doc=__doc__,
package_dir={'ipapython': ''},
packages=[
"ipapython",
"ipapython.install"
],
install_requires=[
"cffi",
"cryptography",
"dnspython",
"gssapi",
# "ipalib", # circular dependency
"ipaplatform",
"netaddr",
"netifaces",
"python-ldap",
"six",
],
extras_require={
":python_version<'3'": ["enum34"],
"install": ["dbus-python"], # for certmonger
},
)
setup_package()

76
ipapython/setup.py.in Normal file
View File

@@ -0,0 +1,76 @@
#!/usr/bin/python2
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""FreeIPA python support library
FreeIPA is a server for identity, policy, and audit.
"""
DOCLINES = __doc__.split("\n")
import os
import sys
import distutils.sysconfig
CLASSIFIERS = """\
Development Status :: 4 - Beta
Intended Audience :: System Environment/Base
License :: GPL
Programming Language :: Python
Operating System :: POSIX
Operating System :: Unix
"""
# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
# update it when the contents of directories change.
if os.path.exists('MANIFEST'): os.remove('MANIFEST')
def setup_package():
from distutils.core import setup
old_path = os.getcwd()
local_path = os.path.dirname(os.path.abspath(sys.argv[0]))
os.chdir(local_path)
sys.path.insert(0,local_path)
try:
setup(
name = "ipapython",
version = "__VERSION__",
license = "GPL",
author = "Karl MacMillan, et.al.",
author_email = "kmacmill@redhat.com",
maintainer = "freeIPA Developers",
maintainer_email = "freeipa-devel@redhat.com",
url = "http://www.freeipa.org/",
description = DOCLINES[0],
long_description = "\n".join(DOCLINES[2:]),
download_url = "http://www.freeipa.org/page/Downloads",
classifiers=filter(None, CLASSIFIERS.split('\n')),
platforms = ["Linux", "Solaris", "Unix"],
package_dir = {'ipapython': ''},
packages = [ "ipapython" ],
)
finally:
del sys.path[0]
os.chdir(old_path)
return
if __name__ == '__main__':
setup_package()

View File

@@ -25,13 +25,8 @@ SSH utilities.
import base64
import re
import struct
from hashlib import sha1
from hashlib import sha256 # pylint: disable=E0611
import six
if six.PY3:
unicode = str
from hashlib import md5, sha1
from hashlib import sha256 #pylint: disable=E0611
__all__ = ['SSHPublicKey']
@@ -53,16 +48,15 @@ class SSHPublicKey(object):
self._options = key._options
return
if not isinstance(key, (bytes, unicode)):
raise TypeError("argument must be bytes or unicode, got %s" % type(key).__name__)
if not isinstance(key, (str, unicode)):
raise TypeError("argument must be str or unicode, got %s" % type(key).__name__)
# All valid public key blobs start with 3 null bytes (see RFC 4253
# section 6.6, RFC 4251 section 5 and RFC 4250 section 4.6)
if isinstance(key, bytes) and key[:3] != b'\0\0\0':
if isinstance(key, str) and key[:3] != '\0\0\0':
key = key.decode(encoding)
valid = self._parse_raw(key) or self._parse_base64(key) or self._parse_openssh(key)
if not valid:
raise ValueError("not a valid SSH public key")
@@ -72,7 +66,7 @@ class SSHPublicKey(object):
self._options = options
def _parse_raw(self, key):
if not isinstance(key, bytes):
if not isinstance(key, str):
return False
try:
@@ -101,7 +95,7 @@ class SSHPublicKey(object):
try:
key = base64.b64decode(key)
except (TypeError, ValueError):
except TypeError:
return False
return self._parse_raw(key)
@@ -169,8 +163,7 @@ class SSHPublicKey(object):
return bool(self._options)
def openssh(self):
key = base64.b64encode(self._key).decode('ascii')
out = u'%s %s' % (self._keytype, key)
out = u'%s %s' % (self._keytype, base64.b64encode(self._key))
if self._options:
options = []
@@ -190,10 +183,10 @@ class SSHPublicKey(object):
return out
def fingerprint_hex_sha256(self):
# OpenSSH trims the trailing '=' of base64 sha256 FP representation
fp = base64.b64encode(sha256(self._key).digest()).rstrip(b'=')
return u'SHA256:{fp}'.format(fp=fp.decode('utf-8'))
def fingerprint_hex_md5(self):
fp = md5(self._key).hexdigest().upper()
fp = u':'.join([fp[j:j+2] for j in range(0, len(fp), 2)])
return fp
def _fingerprint_dns(self, fpfunc, fptype):
if self._keytype == 'ssh-rsa':
@@ -202,8 +195,6 @@ class SSHPublicKey(object):
keytype = 2
elif self._keytype.startswith('ecdsa-sha2-') and '@' not in self._keytype:
keytype = 3
elif self._keytype == 'ssh-ed25519':
keytype = 4
else:
return
fp = fpfunc(self._key).hexdigest().upper()

BIN
ipapython/ssh.pyc Normal file

Binary file not shown.

424
ipapython/sysrestore.py Normal file
View File

@@ -0,0 +1,424 @@
# Authors: Mark McLoughlin <markmc@redhat.com>
#
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# This module provides a very simple API which allows
# ipa-xxx-install --uninstall to restore certain
# parts of the system configuration to the way it was
# before ipa-server-install was first run
import os
import os.path
import shutil
from ipapython.ipa_log_manager import *
import ConfigParser
import random
import string
from ipapython import ipautil
from ipaplatform.tasks import tasks
from ipaplatform.paths import paths
SYSRESTORE_PATH = paths.TMP
SYSRESTORE_INDEXFILE = "sysrestore.index"
SYSRESTORE_STATEFILE = "sysrestore.state"
class FileStore:
"""Class for handling backup and restore of files"""
def __init__(self, path = SYSRESTORE_PATH, index_file = SYSRESTORE_INDEXFILE):
"""Create a _StoreFiles object, that uses @path as the
base directory.
The file @path/sysrestore.index is used to store information
about the original location of the saved files.
"""
self._path = path
self._index = os.path.join(self._path, index_file)
self.random = random.Random()
self.files = {}
self._load()
def _load(self):
"""Load the file list from the index file. @files will
be an empty dictionary if the file doesn't exist.
"""
root_logger.debug("Loading Index file from '%s'", self._index)
self.files = {}
p = ConfigParser.SafeConfigParser()
p.read(self._index)
for section in p.sections():
if section == "files":
for (key, value) in p.items(section):
self.files[key] = value
def save(self):
"""Save the file list to @_index. If @files is an empty
dict, then @_index should be removed.
"""
root_logger.debug("Saving Index File to '%s'", self._index)
if len(self.files) == 0:
root_logger.debug(" -> no files, removing file")
if os.path.exists(self._index):
os.remove(self._index)
return
p = ConfigParser.SafeConfigParser()
p.add_section('files')
for (key, value) in self.files.items():
p.set('files', key, str(value))
f = file(self._index, "w")
p.write(f)
f.close()
def backup_file(self, path):
"""Create a copy of the file at @path - so long as a copy
does not already exist - which will be restored to its
original location by restore_files().
"""
root_logger.debug("Backing up system configuration file '%s'", path)
if not os.path.isabs(path):
raise ValueError("Absolute path required")
if not os.path.isfile(path):
root_logger.debug(" -> Not backing up - '%s' doesn't exist", path)
return
(reldir, backupfile) = os.path.split(path)
filename = ""
for i in range(8):
h = "%02x" % self.random.randint(0,255)
filename += h
filename += "-"+backupfile
backup_path = os.path.join(self._path, filename)
if os.path.exists(backup_path):
root_logger.debug(" -> Not backing up - already have a copy of '%s'", path)
return
shutil.copy2(path, backup_path)
stat = os.stat(path)
self.files[filename] = string.join([str(stat.st_mode),str(stat.st_uid),str(stat.st_gid),path], ',')
self.save()
def has_file(self, path):
"""Checks whether file at @path was added to the file store
Returns #True if the file exists in the file store, #False otherwise
"""
result = False
for (key, value) in self.files.items():
(mode,uid,gid,filepath) = string.split(value, ',', 3)
if (filepath == path):
result = True
break
return result
def restore_file(self, path, new_path = None):
"""Restore the copy of a file at @path to its original
location and delete the copy.
Takes optional parameter @new_path which specifies the
location where the file is to be restored.
Returns #True if the file was restored, #False if there
was no backup file to restore
"""
if new_path is None:
root_logger.debug("Restoring system configuration file '%s'", path)
else:
root_logger.debug("Restoring system configuration file '%s' to '%s'", path, new_path)
if not os.path.isabs(path):
raise ValueError("Absolute path required")
if new_path is not None and not os.path.isabs(new_path):
raise ValueError("Absolute new path required")
mode = None
uid = None
gid = None
filename = None
for (key, value) in self.files.items():
(mode,uid,gid,filepath) = string.split(value, ',', 3)
if (filepath == path):
filename = key
break
if not filename:
raise ValueError("No such file name in the index")
backup_path = os.path.join(self._path, filename)
if not os.path.exists(backup_path):
root_logger.debug(" -> Not restoring - '%s' doesn't exist", backup_path)
return False
if new_path is not None:
path = new_path
shutil.move(backup_path, path)
os.chown(path, int(uid), int(gid))
os.chmod(path, int(mode))
tasks.restore_context(path)
del self.files[filename]
self.save()
return True
def restore_all_files(self):
"""Restore the files in the inbdex to their original
location and delete the copy.
Returns #True if the file was restored, #False if there
was no backup file to restore
"""
if len(self.files) == 0:
return False
for (filename, value) in self.files.items():
(mode,uid,gid,path) = string.split(value, ',', 3)
backup_path = os.path.join(self._path, filename)
if not os.path.exists(backup_path):
root_logger.debug(" -> Not restoring - '%s' doesn't exist", backup_path)
continue
shutil.move(backup_path, path)
os.chown(path, int(uid), int(gid))
os.chmod(path, int(mode))
tasks.restore_context(path)
#force file to be deleted
self.files = {}
self.save()
return True
def has_files(self):
"""Return True or False if there are any files in the index
Can be used to determine if a program is configured.
"""
return len(self.files) > 0
def untrack_file(self, path):
"""Remove file at path @path from list of backed up files.
Does not remove any files from the filesystem.
Returns #True if the file was untracked, #False if there
was no backup file to restore
"""
root_logger.debug("Untracking system configuration file '%s'", path)
if not os.path.isabs(path):
raise ValueError("Absolute path required")
mode = None
uid = None
gid = None
filename = None
for (key, value) in self.files.items():
(mode,uid,gid,filepath) = string.split(value, ',', 3)
if (filepath == path):
filename = key
break
if not filename:
raise ValueError("No such file name in the index")
backup_path = os.path.join(self._path, filename)
if not os.path.exists(backup_path):
root_logger.debug(" -> Not restoring - '%s' doesn't exist", backup_path)
return False
try:
os.unlink(backup_path)
except Exception, e:
root_logger.error('Error removing %s: %s' % (backup_path, str(e)))
del self.files[filename]
self.save()
return True
class StateFile:
"""A metadata file for recording system state which can
be backed up and later restored. The format is something
like:
[httpd]
running=True
enabled=False
"""
def __init__(self, path = SYSRESTORE_PATH, state_file = SYSRESTORE_STATEFILE):
"""Create a StateFile object, loading from @path.
The dictionary @modules, a member of the returned object,
is where the state can be modified. @modules is indexed
using a module name to return another dictionary containing
key/value pairs with the saved state of that module.
The keys in these latter dictionaries are arbitrary strings
and the values may either be strings or booleans.
"""
self._path = os.path.join(path, state_file)
self.modules = {}
self._load()
def _load(self):
"""Load the modules from the file @_path. @modules will
be an empty dictionary if the file doesn't exist.
"""
root_logger.debug("Loading StateFile from '%s'", self._path)
self.modules = {}
p = ConfigParser.SafeConfigParser()
p.read(self._path)
for module in p.sections():
self.modules[module] = {}
for (key, value) in p.items(module):
if value == str(True):
value = True
elif value == str(False):
value = False
self.modules[module][key] = value
def save(self):
"""Save the modules to @_path. If @modules is an empty
dict, then @_path should be removed.
"""
root_logger.debug("Saving StateFile to '%s'", self._path)
for module in self.modules.keys():
if len(self.modules[module]) == 0:
del self.modules[module]
if len(self.modules) == 0:
root_logger.debug(" -> no modules, removing file")
if os.path.exists(self._path):
os.remove(self._path)
return
p = ConfigParser.SafeConfigParser()
for module in self.modules.keys():
p.add_section(module)
for (key, value) in self.modules[module].items():
p.set(module, key, str(value))
f = file(self._path, "w")
p.write(f)
f.close()
def backup_state(self, module, key, value):
"""Backup an item of system state from @module, identified
by the string @key and with the value @value. @value may be
a string or boolean.
"""
if not isinstance(value, (str, bool, unicode)):
raise ValueError("Only strings, booleans or unicode strings are supported")
if not self.modules.has_key(module):
self.modules[module] = {}
if not self.modules.has_key(key):
self.modules[module][key] = value
self.save()
def get_state(self, module, key):
"""Return the value of an item of system state from @module,
identified by the string @key.
If the item doesn't exist, #None will be returned, otherwise
the original string or boolean value is returned.
"""
if not self.modules.has_key(module):
return None
return self.modules[module].get(key, None)
def delete_state(self, module, key):
"""Delete system state from @module, identified by the string
@key.
If the item doesn't exist, no change is done.
"""
try:
del self.modules[module][key]
except KeyError:
pass
else:
self.save()
def restore_state(self, module, key):
"""Return the value of an item of system state from @module,
identified by the string @key, and remove it from the backed
up system state.
If the item doesn't exist, #None will be returned, otherwise
the original string or boolean value is returned.
"""
value = self.get_state(module, key)
if value is not None:
self.delete_state(module, key)
return value
def has_state(self, module):
"""Return True or False if there is any state stored for @module.
Can be used to determine if a service is configured.
"""
if self.modules.has_key(module):
return True
else:
return False

BIN
ipapython/sysrestore.pyc Normal file

Binary file not shown.

View File

@@ -18,10 +18,10 @@
#
# The full version including strings
VERSION="4.6.2"
VERSION="4.0.5"
# A fuller version including the vendor tag (e.g. 3.3.3-34.fc20)
VENDOR_VERSION="4.6.2"
VENDOR_VERSION="4.0.5"
# Just the numeric portion of the version so one can do direct numeric
@@ -41,579 +41,8 @@ VENDOR_VERSION="4.6.2"
# IPA 3.2.1: NUM_VERSION=30201
# IPA 3.2.99: NUM_VERSION=30299 (development version)
# IPA 3.3.0: NUM_VERSION=30300
NUM_VERSION=40602
NUM_VERSION=40005
# The version of the API.
API_VERSION=u'2.229'
DEFAULT_PLUGINS = frozenset(l.strip() for l in """
aci/1
aci_add/1
aci_del/1
aci_find/1
aci_mod/1
aci_rename/1
aci_show/1
adtrust_is_enabled/1
automember/1
automember_add/1
automember_add_condition/1
automember_default_group/1
automember_default_group_remove/1
automember_default_group_set/1
automember_default_group_show/1
automember_del/1
automember_find/1
automember_mod/1
automember_rebuild/1
automember_remove_condition/1
automember_show/1
automember_task/1
automountkey/1
automountkey_add/1
automountkey_del/1
automountkey_find/1
automountkey_mod/1
automountkey_show/1
automountlocation/1
automountlocation_add/1
automountlocation_del/1
automountlocation_find/1
automountlocation_show/1
automountlocation_tofiles/1
automountmap/1
automountmap_add/1
automountmap_add_indirect/1
automountmap_del/1
automountmap_find/1
automountmap_mod/1
automountmap_show/1
batch/1
ca/1
ca_add/1
ca_del/1
ca_disable/1
ca_enable/1
ca_find/1
ca_is_enabled/1
ca_mod/1
ca_show/1
caacl/1
caacl_add/1
caacl_add_ca/1
caacl_add_host/1
caacl_add_profile/1
caacl_add_service/1
caacl_add_user/1
caacl_del/1
caacl_disable/1
caacl_enable/1
caacl_find/1
caacl_mod/1
caacl_remove_ca/1
caacl_remove_host/1
caacl_remove_profile/1
caacl_remove_service/1
caacl_remove_user/1
caacl_show/1
cert/1
cert_find/1
cert_remove_hold/1
cert_request/1
cert_revoke/1
cert_show/1
cert_status/1
certmap/1
certmap_match/1
certmapconfig/1
certmapconfig_mod/1
certmapconfig_show/1
certmaprule/1
certmaprule_add/1
certmaprule_del/1
certmaprule_disable/1
certmaprule_enable/1
certmaprule_find/1
certmaprule_mod/1
certmaprule_show/1
certprofile/1
certprofile_del/1
certprofile_find/1
certprofile_import/1
certprofile_mod/1
certprofile_show/1
certreq/1
class/1
class_find/1
class_show/1
command/1
command_defaults/1
command_find/1
command_show/1
compat_is_enabled/1
config/1
config_mod/1
config_show/1
cosentry/1
cosentry_add/1
cosentry_del/1
cosentry_find/1
cosentry_mod/1
cosentry_show/1
delegation/1
delegation_add/1
delegation_del/1
delegation_find/1
delegation_mod/1
delegation_show/1
dns_is_enabled/1
dns_resolve/1
dns_system_records/1
dns_update_system_records/1
dnsa6record/1
dnsaaaarecord/1
dnsafsdbrecord/1
dnsaplrecord/1
dnsarecord/1
dnscertrecord/1
dnscnamerecord/1
dnsconfig/1
dnsconfig_mod/1
dnsconfig_show/1
dnsdhcidrecord/1
dnsdlvrecord/1
dnsdnamerecord/1
dnsdsrecord/1
dnsforwardzone/1
dnsforwardzone_add/1
dnsforwardzone_add_permission/1
dnsforwardzone_del/1
dnsforwardzone_disable/1
dnsforwardzone_enable/1
dnsforwardzone_find/1
dnsforwardzone_mod/1
dnsforwardzone_remove_permission/1
dnsforwardzone_show/1
dnshiprecord/1
dnsipseckeyrecord/1
dnskeyrecord/1
dnskxrecord/1
dnslocrecord/1
dnsmxrecord/1
dnsnaptrrecord/1
dnsnsecrecord/1
dnsnsrecord/1
dnsptrrecord/1
dnsrecord/1
dnsrecord_add/1
dnsrecord_del/1
dnsrecord_delentry/1
dnsrecord_find/1
dnsrecord_mod/1
dnsrecord_show/1
dnsrecord_split_parts/1
dnsrprecord/1
dnsrrsigrecord/1
dnsserver/1
dnsserver_add/1
dnsserver_del/1
dnsserver_find/1
dnsserver_mod/1
dnsserver_show/1
dnssigrecord/1
dnsspfrecord/1
dnssrvrecord/1
dnssshfprecord/1
dnstlsarecord/1
dnstxtrecord/1
dnsurirecord/1
dnszone/1
dnszone_add/1
dnszone_add_permission/1
dnszone_del/1
dnszone_disable/1
dnszone_enable/1
dnszone_find/1
dnszone_mod/1
dnszone_remove_permission/1
dnszone_show/1
domainlevel_get/1
domainlevel_set/1
env/1
group/1
group_add/1
group_add_member/1
group_del/1
group_detach/1
group_find/1
group_mod/1
group_remove_member/1
group_show/1
hbacrule/1
hbacrule_add/1
hbacrule_add_host/1
hbacrule_add_service/1
hbacrule_add_sourcehost/1
hbacrule_add_user/1
hbacrule_del/1
hbacrule_disable/1
hbacrule_enable/1
hbacrule_find/1
hbacrule_mod/1
hbacrule_remove_host/1
hbacrule_remove_service/1
hbacrule_remove_sourcehost/1
hbacrule_remove_user/1
hbacrule_show/1
hbacsvc/1
hbacsvc_add/1
hbacsvc_del/1
hbacsvc_find/1
hbacsvc_mod/1
hbacsvc_show/1
hbacsvcgroup/1
hbacsvcgroup_add/1
hbacsvcgroup_add_member/1
hbacsvcgroup_del/1
hbacsvcgroup_find/1
hbacsvcgroup_mod/1
hbacsvcgroup_remove_member/1
hbacsvcgroup_show/1
hbactest/1
host/1
host_add/1
host_add_cert/1
host_add_managedby/1
host_add_principal/1
host_allow_create_keytab/1
host_allow_retrieve_keytab/1
host_del/1
host_disable/1
host_disallow_create_keytab/1
host_disallow_retrieve_keytab/1
host_find/1
host_mod/1
host_remove_cert/1
host_remove_managedby/1
host_remove_principal/1
host_show/1
hostgroup/1
hostgroup_add/1
hostgroup_add_member/1
hostgroup_del/1
hostgroup_find/1
hostgroup_mod/1
hostgroup_remove_member/1
hostgroup_show/1
i18n_messages/1
idoverridegroup/1
idoverridegroup_add/1
idoverridegroup_del/1
idoverridegroup_find/1
idoverridegroup_mod/1
idoverridegroup_show/1
idoverrideuser/1
idoverrideuser_add/1
idoverrideuser_add_cert/1
idoverrideuser_del/1
idoverrideuser_find/1
idoverrideuser_mod/1
idoverrideuser_remove_cert/1
idoverrideuser_show/1
idrange/1
idrange_add/1
idrange_del/1
idrange_find/1
idrange_mod/1
idrange_show/1
idview/1
idview_add/1
idview_apply/1
idview_del/1
idview_find/1
idview_mod/1
idview_show/1
idview_unapply/1
join/1
json_metadata/1
kra_is_enabled/1
krbtpolicy/1
krbtpolicy_mod/1
krbtpolicy_reset/1
krbtpolicy_show/1
location/1
location_add/1
location_del/1
location_find/1
location_mod/1
location_show/1
metaobject/1
migrate_ds/1
netgroup/1
netgroup_add/1
netgroup_add_member/1
netgroup_del/1
netgroup_find/1
netgroup_mod/1
netgroup_remove_member/1
netgroup_show/1
otpconfig/1
otpconfig_mod/1
otpconfig_show/1
otptoken/1
otptoken_add/1
otptoken_add_managedby/1
otptoken_del/1
otptoken_find/1
otptoken_mod/1
otptoken_remove_managedby/1
otptoken_show/1
output/1
output_find/1
output_show/1
param/1
param_find/1
param_show/1
passwd/1
permission/1
permission_add/1
permission_add_member/1
permission_add_noaci/1
permission_del/1
permission_find/1
permission_mod/1
permission_remove_member/1
permission_show/1
ping/1
pkinit/1
pkinit_status/1
plugins/1
privilege/1
privilege_add/1
privilege_add_member/1
privilege_add_permission/1
privilege_del/1
privilege_find/1
privilege_mod/1
privilege_remove_member/1
privilege_remove_permission/1
privilege_show/1
pwpolicy/1
pwpolicy_add/1
pwpolicy_del/1
pwpolicy_find/1
pwpolicy_mod/1
pwpolicy_show/1
radiusproxy/1
radiusproxy_add/1
radiusproxy_del/1
radiusproxy_find/1
radiusproxy_mod/1
radiusproxy_show/1
realmdomains/1
realmdomains_mod/1
realmdomains_show/1
role/1
role_add/1
role_add_member/1
role_add_privilege/1
role_del/1
role_find/1
role_mod/1
role_remove_member/1
role_remove_privilege/1
role_show/1
schema/1
selfservice/1
selfservice_add/1
selfservice_del/1
selfservice_find/1
selfservice_mod/1
selfservice_show/1
selinuxusermap/1
selinuxusermap_add/1
selinuxusermap_add_host/1
selinuxusermap_add_user/1
selinuxusermap_del/1
selinuxusermap_disable/1
selinuxusermap_enable/1
selinuxusermap_find/1
selinuxusermap_mod/1
selinuxusermap_remove_host/1
selinuxusermap_remove_user/1
selinuxusermap_show/1
server/1
server_conncheck/1
server_del/1
server_find/1
server_mod/1
server_role/1
server_role_find/1
server_role_show/1
server_show/1
service/1
service_add/1
service_add_cert/1
service_add_host/1
service_add_principal/1
service_allow_create_keytab/1
service_allow_retrieve_keytab/1
service_del/1
service_disable/1
service_disallow_create_keytab/1
service_disallow_retrieve_keytab/1
service_find/1
service_mod/1
service_remove_cert/1
service_remove_host/1
service_remove_principal/1
service_show/1
servicedelegationrule/1
servicedelegationrule_add/1
servicedelegationrule_add_member/1
servicedelegationrule_add_target/1
servicedelegationrule_del/1
servicedelegationrule_find/1
servicedelegationrule_remove_member/1
servicedelegationrule_remove_target/1
servicedelegationrule_show/1
servicedelegationtarget/1
servicedelegationtarget_add/1
servicedelegationtarget_add_member/1
servicedelegationtarget_del/1
servicedelegationtarget_find/1
servicedelegationtarget_remove_member/1
servicedelegationtarget_show/1
servrole/1
session_logout/1
sidgen_was_run/1
stageuser/1
stageuser_activate/1
stageuser_add/1
stageuser_add_cert/1
stageuser_add_certmapdata/1
stageuser_add_manager/1
stageuser_add_principal/1
stageuser_del/1
stageuser_find/1
stageuser_mod/1
stageuser_remove_cert/1
stageuser_remove_certmapdata/1
stageuser_remove_manager/1
stageuser_remove_principal/1
stageuser_show/1
sudocmd/1
sudocmd_add/1
sudocmd_del/1
sudocmd_find/1
sudocmd_mod/1
sudocmd_show/1
sudocmdgroup/1
sudocmdgroup_add/1
sudocmdgroup_add_member/1
sudocmdgroup_del/1
sudocmdgroup_find/1
sudocmdgroup_mod/1
sudocmdgroup_remove_member/1
sudocmdgroup_show/1
sudorule/1
sudorule_add/1
sudorule_add_allow_command/1
sudorule_add_deny_command/1
sudorule_add_host/1
sudorule_add_option/1
sudorule_add_runasgroup/1
sudorule_add_runasuser/1
sudorule_add_user/1
sudorule_del/1
sudorule_disable/1
sudorule_enable/1
sudorule_find/1
sudorule_mod/1
sudorule_remove_allow_command/1
sudorule_remove_deny_command/1
sudorule_remove_host/1
sudorule_remove_option/1
sudorule_remove_runasgroup/1
sudorule_remove_runasuser/1
sudorule_remove_user/1
sudorule_show/1
topic/1
topic_find/1
topic_show/1
topologysegment/1
topologysegment_add/1
topologysegment_del/1
topologysegment_find/1
topologysegment_mod/1
topologysegment_reinitialize/1
topologysegment_show/1
topologysuffix/1
topologysuffix_add/1
topologysuffix_del/1
topologysuffix_find/1
topologysuffix_mod/1
topologysuffix_show/1
topologysuffix_verify/1
trust/1
trust_add/1
trust_del/1
trust_fetch_domains/1
trust_find/1
trust_mod/1
trust_resolve/1
trust_show/1
trustconfig/1
trustconfig_mod/1
trustconfig_show/1
trustdomain/1
trustdomain_add/1
trustdomain_del/1
trustdomain_disable/1
trustdomain_enable/1
trustdomain_find/1
trustdomain_mod/1
user/1
user_add/1
user_add_cert/1
user_add_certmapdata/1
user_add_manager/1
user_add_principal/1
user_del/1
user_disable/1
user_enable/1
user_find/1
user_mod/1
user_remove_cert/1
user_remove_certmapdata/1
user_remove_manager/1
user_remove_principal/1
user_show/1
user_stage/1
user_status/1
user_undel/1
user_unlock/1
userstatus/1
vault/1
vault_add_internal/1
vault_add_member/1
vault_add_owner/1
vault_archive_internal/1
vault_del/1
vault_find/1
vault_mod_internal/1
vault_remove_member/1
vault_remove_owner/1
vault_retrieve_internal/1
vault_show/1
vaultconfig/1
vaultconfig_show/1
vaultcontainer/1
vaultcontainer_add_owner/1
vaultcontainer_del/1
vaultcontainer_remove_owner/1
vaultcontainer_show/1
whoami/1
""".strip().splitlines())
API_VERSION=u'2.101'

View File

@@ -18,10 +18,10 @@
#
# The full version including strings
VERSION="@VERSION@"
VERSION="__VERSION__"
# A fuller version including the vendor tag (e.g. 3.3.3-34.fc20)
VENDOR_VERSION="@VERSION@@VENDOR_SUFFIX@"
VENDOR_VERSION="__VENDOR_VERSION__"
# Just the numeric portion of the version so one can do direct numeric
@@ -41,13 +41,8 @@ VENDOR_VERSION="@VERSION@@VENDOR_SUFFIX@"
# IPA 3.2.1: NUM_VERSION=30201
# IPA 3.2.99: NUM_VERSION=30299 (development version)
# IPA 3.3.0: NUM_VERSION=30300
NUM_VERSION=@NUM_VERSION@
NUM_VERSION=__NUM_VERSION__
# The version of the API.
API_VERSION=u'@API_VERSION@'
DEFAULT_PLUGINS = frozenset(l.strip() for l in """
@DEFAULT_PLUGINS@
""".strip().splitlines())
API_VERSION=u'__API_VERSION__'

BIN
ipapython/version.pyc Normal file

Binary file not shown.