Imported Upstream version 4.6.2

This commit is contained in:
Mario Fetka
2021-07-25 07:32:41 +02:00
commit 8ff3be4216
1788 changed files with 1900965 additions and 0 deletions

23
ipapython/Makefile.am Normal file
View File

@@ -0,0 +1,23 @@
include $(top_srcdir)/Makefile.python.am
MOSTLYCLEANFILES = .DEFAULT_PLUGINS
EXTRA_DIST = version.py.in
all-local: version.py
dist-hook: version.py
install-exec-local: version.py
bdist_wheel: version.py
.DEFAULT_PLUGINS: $(top_srcdir)/API.txt
$(AM_V_GEN)awk '$$1 == "default:" { print $$2 }' $< >$@
version.py: version.py.in .DEFAULT_PLUGINS $(top_builddir)/$(CONFIG_STATUS)
$(AM_V_GEN)sed \
-e 's|@API_VERSION[@]|$(API_VERSION)|g' \
-e 's|@NUM_VERSION[@]|$(NUM_VERSION)|g' \
-e 's|@VERSION[@]|$(VERSION)|g' \
-e 's|@VENDOR_SUFFIX[@]|$(VENDOR_SUFFIX)|g' \
-e '/@DEFAULT_PLUGINS[@]/r .DEFAULT_PLUGINS' \
-e '/@DEFAULT_PLUGINS[@]/d' \
$< > $@

639
ipapython/Makefile.in Normal file
View File

@@ -0,0 +1,639 @@
# Makefile.in generated by automake 1.15.1 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2017 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
VPATH = @srcdir@
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
build_triplet = @build@
host_triplet = @host@
subdir = ipapython
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \
$(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/intlmacosx.m4 \
$(top_srcdir)/m4/lib-ld.m4 $(top_srcdir)/m4/lib-link.m4 \
$(top_srcdir)/m4/lib-prefix.m4 $(top_srcdir)/m4/libtool.m4 \
$(top_srcdir)/m4/ltoptions.m4 $(top_srcdir)/m4/ltsugar.m4 \
$(top_srcdir)/m4/ltversion.m4 $(top_srcdir)/m4/lt~obsolete.m4 \
$(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \
$(top_srcdir)/m4/progtest.m4 $(top_srcdir)/VERSION.m4 \
$(top_srcdir)/server.m4 $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
am__DIST_COMMON = $(srcdir)/Makefile.in \
$(top_srcdir)/Makefile.python.am README
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
API_VERSION = @API_VERSION@
AR = @AR@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CC = @CC@
CCDEPMODE = @CCDEPMODE@
CFLAGS = @CFLAGS@
CMOCKA_CFLAGS = @CMOCKA_CFLAGS@
CMOCKA_LIBS = @CMOCKA_LIBS@
CONFIG_STATUS = @CONFIG_STATUS@
CPP = @CPP@
CPPFLAGS = @CPPFLAGS@
CRYPTO_CFLAGS = @CRYPTO_CFLAGS@
CRYPTO_LIBS = @CRYPTO_LIBS@
CYGPATH_W = @CYGPATH_W@
DATA_VERSION = @DATA_VERSION@
DEFS = @DEFS@
DEPDIR = @DEPDIR@
DIRSRV_CFLAGS = @DIRSRV_CFLAGS@
DIRSRV_LIBS = @DIRSRV_LIBS@
DLLTOOL = @DLLTOOL@
DSYMUTIL = @DSYMUTIL@
DUMPBIN = @DUMPBIN@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
EGREP = @EGREP@
EXEEXT = @EXEEXT@
FGREP = @FGREP@
GETTEXT_DOMAIN = @GETTEXT_DOMAIN@
GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@
GIT_BRANCH = @GIT_BRANCH@
GIT_VERSION = @GIT_VERSION@
GMSGFMT = @GMSGFMT@
GMSGFMT_015 = @GMSGFMT_015@
GREP = @GREP@
INI_CFLAGS = @INI_CFLAGS@
INI_LIBS = @INI_LIBS@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
INTLLIBS = @INTLLIBS@
INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@
IPAPLATFORM = @IPAPLATFORM@
IPA_DATA_DIR = @IPA_DATA_DIR@
IPA_SYSCONF_DIR = @IPA_SYSCONF_DIR@
JSLINT = @JSLINT@
KRAD_LIBS = @KRAD_LIBS@
KRB5KDC_SERVICE = @KRB5KDC_SERVICE@
KRB5_CFLAGS = @KRB5_CFLAGS@
KRB5_LIBS = @KRB5_LIBS@
LD = @LD@
LDAP_CFLAGS = @LDAP_CFLAGS@
LDAP_LIBS = @LDAP_LIBS@
LDFLAGS = @LDFLAGS@
LIBICONV = @LIBICONV@
LIBINTL = @LIBINTL@
LIBINTL_LIBS = @LIBINTL_LIBS@
LIBOBJS = @LIBOBJS@
LIBPDB_NAME = @LIBPDB_NAME@
LIBS = @LIBS@
LIBTOOL = @LIBTOOL@
LIBVERTO_CFLAGS = @LIBVERTO_CFLAGS@
LIBVERTO_LIBS = @LIBVERTO_LIBS@
LIPO = @LIPO@
LN_S = @LN_S@
LTLIBICONV = @LTLIBICONV@
LTLIBINTL = @LTLIBINTL@
LTLIBOBJS = @LTLIBOBJS@
LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@
MAKEINFO = @MAKEINFO@
MANIFEST_TOOL = @MANIFEST_TOOL@
MKDIR_P = @MKDIR_P@
MK_ASSIGN = @MK_ASSIGN@
MK_ELSE = @MK_ELSE@
MK_ENDIF = @MK_ENDIF@
MK_IFEQ = @MK_IFEQ@
MSGATTRIB = @MSGATTRIB@
MSGFMT = @MSGFMT@
MSGFMT_015 = @MSGFMT_015@
MSGMERGE = @MSGMERGE@
NAMED_GROUP = @NAMED_GROUP@
NDRNBT_CFLAGS = @NDRNBT_CFLAGS@
NDRNBT_LIBS = @NDRNBT_LIBS@
NDRPAC_CFLAGS = @NDRPAC_CFLAGS@
NDRPAC_LIBS = @NDRPAC_LIBS@
NDR_CFLAGS = @NDR_CFLAGS@
NDR_LIBS = @NDR_LIBS@
NM = @NM@
NMEDIT = @NMEDIT@
NSPR_CFLAGS = @NSPR_CFLAGS@
NSPR_LIBS = @NSPR_LIBS@
NSS_CFLAGS = @NSS_CFLAGS@
NSS_LIBS = @NSS_LIBS@
NUM_VERSION = @NUM_VERSION@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
ODS_USER = @ODS_USER@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
PKG_CONFIG = @PKG_CONFIG@
PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@
PKG_CONFIG_PATH = @PKG_CONFIG_PATH@
POPT_CFLAGS = @POPT_CFLAGS@
POPT_LIBS = @POPT_LIBS@
POSUB = @POSUB@
PYLINT = @PYLINT@
PYTHON = @PYTHON@
PYTHON2 = @PYTHON2@
PYTHON3 = @PYTHON3@
PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@
PYTHON_INSTALL_EXTRA_OPTIONS = @PYTHON_INSTALL_EXTRA_OPTIONS@
PYTHON_PLATFORM = @PYTHON_PLATFORM@
PYTHON_PREFIX = @PYTHON_PREFIX@
PYTHON_VERSION = @PYTHON_VERSION@
RANLIB = @RANLIB@
SAMBA40EXTRA_LIBPATH = @SAMBA40EXTRA_LIBPATH@
SAMBAUTIL_CFLAGS = @SAMBAUTIL_CFLAGS@
SAMBAUTIL_LIBS = @SAMBAUTIL_LIBS@
SASL_CFLAGS = @SASL_CFLAGS@
SASL_LIBS = @SASL_LIBS@
SED = @SED@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
SSSCERTMAP_CFLAGS = @SSSCERTMAP_CFLAGS@
SSSCERTMAP_LIBS = @SSSCERTMAP_LIBS@
SSSIDMAP_CFLAGS = @SSSIDMAP_CFLAGS@
SSSIDMAP_LIBS = @SSSIDMAP_LIBS@
SSSNSSIDMAP_CFLAGS = @SSSNSSIDMAP_CFLAGS@
SSSNSSIDMAP_LIBS = @SSSNSSIDMAP_LIBS@
STRIP = @STRIP@
TALLOC_CFLAGS = @TALLOC_CFLAGS@
TALLOC_LIBS = @TALLOC_LIBS@
TEVENT_CFLAGS = @TEVENT_CFLAGS@
TEVENT_LIBS = @TEVENT_LIBS@
UNISTRING_LIBS = @UNISTRING_LIBS@
UNLINK = @UNLINK@
USE_NLS = @USE_NLS@
UUID_CFLAGS = @UUID_CFLAGS@
UUID_LIBS = @UUID_LIBS@
VENDOR_SUFFIX = @VENDOR_SUFFIX@
VERSION = @VERSION@
XGETTEXT = @XGETTEXT@
XGETTEXT_015 = @XGETTEXT_015@
XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@
XMLRPC_CFLAGS = @XMLRPC_CFLAGS@
XMLRPC_LIBS = @XMLRPC_LIBS@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
ac_ct_AR = @ac_ct_AR@
ac_ct_CC = @ac_ct_CC@
ac_ct_DUMPBIN = @ac_ct_DUMPBIN@
am__include = @am__include@
am__leading_dot = @am__leading_dot@
am__quote = @am__quote@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build = @build@
build_alias = @build_alias@
build_cpu = @build_cpu@
build_os = @build_os@
build_vendor = @build_vendor@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host = @host@
host_alias = @host_alias@
host_cpu = @host_cpu@
host_os = @host_os@
host_vendor = @host_vendor@
htmldir = @htmldir@
i18ntests = @i18ntests@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
krb5rundir = @krb5rundir@
libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
pkgpyexecdir = @pkgpyexecdir@
pkgpythondir = $(pythondir)/$(pkgname)
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
pyexecdir = @pyexecdir@
pythondir = @pythondir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
sysconfenvdir = @sysconfenvdir@
systemdsystemunitdir = @systemdsystemunitdir@
systemdtmpfilesdir = @systemdtmpfilesdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
pkgname = $(shell basename "$(abs_srcdir)")
@VERBOSE_MAKE_FALSE@VERBOSITY = "--quiet"
@VERBOSE_MAKE_TRUE@VERBOSITY = "--verbose"
WHEELDISTDIR = $(top_builddir)/dist/wheels
MOSTLYCLEANFILES = .DEFAULT_PLUGINS
EXTRA_DIST = version.py.in
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(top_srcdir)/Makefile.python.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign ipapython/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign ipapython/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \
esac;
$(top_srcdir)/Makefile.python.am $(am__empty):
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
mostlyclean-libtool:
-rm -f *.lo
clean-libtool:
-rm -rf .libs _libs
tags TAGS:
ctags CTAGS:
cscope cscopelist:
distdir: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
$(MAKE) $(AM_MAKEFLAGS) \
top_distdir="$(top_distdir)" distdir="$(distdir)" \
dist-hook
check-am: all-am
check: check-am
all-am: Makefile all-local
installdirs:
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
-test -z "$(MOSTLYCLEANFILES)" || rm -f $(MOSTLYCLEANFILES)
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic clean-libtool clean-local mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-generic
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am:
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am: install-exec-local
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man:
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic mostlyclean-libtool
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-local
.MAKE: install-am install-strip
.PHONY: all all-am all-local check check-am clean clean-generic \
clean-libtool clean-local cscopelist-am ctags-am dist-hook \
distclean distclean-generic distclean-libtool distdir dvi \
dvi-am html html-am info info-am install install-am \
install-data install-data-am install-dvi install-dvi-am \
install-exec install-exec-am install-exec-local install-html \
install-html-am install-info install-info-am install-man \
install-pdf install-pdf-am install-ps install-ps-am \
install-strip installcheck installcheck-am installdirs \
maintainer-clean maintainer-clean-generic mostlyclean \
mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \
tags-am uninstall uninstall-am uninstall-local
.PRECIOUS: Makefile
# hack to handle back-in-the-hierarchy depedency on ipasetup.py
.PHONY: $(top_builddir)/ipasetup.py
$(top_builddir)/ipasetup.py:
(cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) ipasetup.py)
all-local: $(top_builddir)/ipasetup.py
cd $(srcdir); $(PYTHON) setup.py \
$(VERBOSITY) \
build \
--build-base "$(abs_builddir)/build"
install-exec-local: $(top_builddir)/ipasetup.py
if [ "x$(pkginstall)" != "xfalse" ]; then \
$(PYTHON) $(srcdir)/setup.py \
$(VERBOSITY) \
build \
--build-base "$(abs_builddir)/build" \
install \
--prefix "$(DESTDIR)$(prefix)" \
--single-version-externally-managed \
--record "$(DESTDIR)$(pkgpythondir)/install_files.txt" \
--optimize 1 \
$(PYTHON_INSTALL_EXTRA_OPTIONS); \
fi
uninstall-local:
if [ -f "$(DESTDIR)$(pkgpythondir)/install_files.txt" ]; then \
cat "$(DESTDIR)$(pkgpythondir)/install_files.txt" | xargs rm -rf ; \
fi
rm -rf "$(DESTDIR)$(pkgpythondir)"
clean-local: $(top_builddir)/ipasetup.py
$(PYTHON) "$(srcdir)/setup.py" \
clean \
--all
--build-base "$(abs_builddir)/build"
rm -rf "$(srcdir)/build" "$(srcdir)/dist" "$(srcdir)/MANIFEST"
find "$(srcdir)" \
-name "*.py[co]" -delete -o \
-name "__pycache__" -delete -o \
-name "*.egg-info" -exec rm -rf {} +
# take list of all Python source files and copy them into distdir
# SOURCES.txt does not contain directories so we need to create those
dist-hook: $(top_builddir)/ipasetup.py
$(PYTHON) "$(srcdir)/setup.py" egg_info
PYTHON_SOURCES=$$(cat "$(srcdir)/$(pkgname).egg-info/SOURCES.txt") || exit $$?; \
for FILEN in $${PYTHON_SOURCES}; \
do \
if test -x "$(srcdir)/$${FILEN}"; then MODE=755; else MODE=644; fi; \
$(INSTALL) -D -m $${MODE} "$(srcdir)/$${FILEN}" "$(distdir)/$${FILEN}" || exit $$?; \
done
.PHONY: bdist_wheel
bdist_wheel: $(top_builddir)/ipasetup.py
rm -rf $(WHEELDISTDIR)/$(pkgname)-*.whl
$(PYTHON) "$(srcdir)/setup.py" \
build \
--build-base "$(abs_builddir)/build" \
bdist_wheel \
--dist-dir=$(WHEELDISTDIR)
all-local: version.py
dist-hook: version.py
install-exec-local: version.py
bdist_wheel: version.py
.DEFAULT_PLUGINS: $(top_srcdir)/API.txt
$(AM_V_GEN)awk '$$1 == "default:" { print $$2 }' $< >$@
version.py: version.py.in .DEFAULT_PLUGINS $(top_builddir)/$(CONFIG_STATUS)
$(AM_V_GEN)sed \
-e 's|@API_VERSION[@]|$(API_VERSION)|g' \
-e 's|@NUM_VERSION[@]|$(NUM_VERSION)|g' \
-e 's|@VERSION[@]|$(VERSION)|g' \
-e 's|@VENDOR_SUFFIX[@]|$(VENDOR_SUFFIX)|g' \
-e '/@DEFAULT_PLUGINS[@]/r .DEFAULT_PLUGINS' \
-e '/@DEFAULT_PLUGINS[@]/d' \
$< > $@
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:

15
ipapython/README Normal file
View File

@@ -0,0 +1,15 @@
This is a set of libraries common to IPA clients and servers though mostly
geared currently towards command-line tools.
A brief overview:
config.py - identify the IPA server domain and realm. It uses python-dns to
try to detect this information first and will fall back to
/etc/ipa/default.conf if that fails.
ipautil.py - helper functions
entity.py - entity is the main data type. User and Group extend this class
(but don't add anything currently).
ipavalidate.py - basic data validation routines

0
ipapython/__init__.py Normal file
View File

309
ipapython/admintool.py Normal file
View File

@@ -0,0 +1,309 @@
# Authors:
# Petr Viktorin <pviktori@redhat.com>
#
# Copyright (C) 2012 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""A common framework for command-line admin tools, e.g. install scripts
Handles common operations like option parsing and logging
"""
import logging
import sys
import os
import traceback
from optparse import OptionGroup # pylint: disable=deprecated-module
from ipapython import version
from ipapython import config
from ipapython.ipa_log_manager import standard_logging_setup
logger = logging.getLogger(__name__)
class ScriptError(Exception):
"""An exception that records an error message and a return value
"""
def __init__(self, msg='', rval=1):
if msg is None:
msg = ''
super(ScriptError, self).__init__(msg)
self.rval = rval
@property
def msg(self):
return str(self)
class AdminTool(object):
"""Base class for command-line admin tools
To run the tool, call the main() classmethod with a list of command-line
arguments.
Alternatively, call run_cli() to run with command-line arguments in
sys.argv, and call sys.exit() with the return value.
Some commands actually represent multiple related tools, e.g.
``ipa-server-install`` and ``ipa-server-install --uninstall`` would be
represented by separate classes. Only their options are the same.
To handle this, AdminTool provides classmethods for option parsing
and selecting the appropriate command class.
A class-wide option parser is made by calling add_options.
The options are then parsed into options and arguments, and
get_command_class is called with those to retrieve the class.
That class is then instantiated and run.
Running consists of a few steps:
- validating options or the environment (validate_options)
- setting up logging (setup_logging)
- running the actual command (run)
Any unhandled exceptions are handled in handle_error.
And at the end, either log_success or log_failure is called.
Class attributes to define in subclasses:
command_name - shown in logs
log_file_name - if None, logging is to stderr only
usage - text shown in help
description - text shown in help
See the setup_logging method for more info on logging.
"""
command_name = None
log_file_name = None
usage = None
description = None
_option_parsers = dict()
@classmethod
def make_parser(cls):
"""Create an option parser shared across all instances of this class"""
parser = config.IPAOptionParser(version=version.VERSION,
usage=cls.usage, formatter=config.IPAFormatter(),
description=cls.description)
cls.option_parser = parser
cls.add_options(parser)
@classmethod
def add_options(cls, parser, debug_option=False):
"""Add command-specific options to the option parser
:param parser: The parser to add options to
:param debug_option: Add a --debug option as an alias to --verbose
"""
group = OptionGroup(parser, "Logging and output options")
group.add_option("-v", "--verbose", dest="verbose", default=False,
action="store_true", help="print debugging information")
if debug_option:
group.add_option("-d", "--debug", dest="verbose", default=False,
action="store_true", help="alias for --verbose (deprecated)")
group.add_option("-q", "--quiet", dest="quiet", default=False,
action="store_true", help="output only errors")
group.add_option("--log-file", dest="log_file", default=None,
metavar="FILE", help="log to the given file")
parser.add_option_group(group)
@classmethod
def run_cli(cls):
"""Run this command with sys.argv, exit process with the return value
"""
sys.exit(cls.main(sys.argv))
@classmethod
def main(cls, argv):
"""The main entry point
Parses command-line arguments, selects the actual command class to use
based on them, and runs that command.
:param argv: Command-line arguments.
:return: Command exit code
"""
if cls not in cls._option_parsers:
# We use cls._option_parsers, a dictionary keyed on class, to check
# if we need to create a parser. This is because cls.option_parser
# can refer to the parser of a superclass.
cls.make_parser()
cls._option_parsers[cls] = cls.option_parser
options, args = cls.option_parser.parse_args(argv[1:])
command_class = cls.get_command_class(options, args)
command = command_class(options, args)
return command.execute()
@classmethod
def get_command_class(cls, options, args):
return cls
def __init__(self, options, args):
self.options = options
self.args = args
self.safe_options = self.option_parser.get_safe_opts(options)
def execute(self):
"""Do everything needed after options are parsed
This includes validating options, setting up logging, doing the
actual work, and handling the result.
"""
self._setup_logging(no_file=True)
return_value = 1
try:
self.validate_options()
self.ask_for_options()
self.setup_logging()
return_value = self.run()
except BaseException as exception:
traceback = sys.exc_info()[2]
error_message, return_value = self.handle_error(exception)
if return_value:
self.log_failure(error_message, return_value, exception,
traceback)
return return_value
self.log_success()
return return_value
def validate_options(self, needs_root=False):
"""Validate self.options
It's also possible to compute and store information that will be
useful later, but no changes to the system should be made here.
"""
if needs_root and os.getegid() != 0:
raise ScriptError('Must be root to run %s' % self.command_name, 1)
if self.options.verbose and self.options.quiet:
raise ScriptError(
'The --quiet and --verbose options are mutually exclusive')
def ask_for_options(self):
"""Ask for missing options interactively
Similar to validate_options. This is separate method because we want
any validation errors to abort the script before bothering the user
with prompts.
Any options that might be asked for should also be validated here.
"""
pass
def setup_logging(self, log_file_mode='w'):
"""Set up logging
:param _to_file: Setting this to false will disable logging to file.
For internal use.
If the --log-file option was given or if a filename is in
self.log_file_name, the tool will log to that file. In this case,
all messages are logged.
What is logged to the console depends on command-line options:
the default is INFO; --quiet sets ERROR; --verbose sets DEBUG.
Rules of thumb for logging levels:
- CRITICAL for fatal errors
- ERROR for critical things that the admin must see, even with --quiet
- WARNING for things that need to stand out in the log
- INFO to display normal messages
- DEBUG to spam about everything the program does
- a plain print for things that should not be log (for example,
interactive prompting)
To log, use a module-level logger.
Logging to file is only set up after option validation and prompting;
before that, all output will go to the console only.
"""
root_logger = logging.getLogger()
for handler in root_logger.handlers:
if (isinstance(handler, logging.StreamHandler) and
handler.stream is sys.stderr): # pylint: disable=no-member
root_logger.removeHandler(handler)
break
self._setup_logging(log_file_mode=log_file_mode)
def _setup_logging(self, log_file_mode='w', no_file=False):
if no_file:
log_file_name = None
elif self.options.log_file:
log_file_name = self.options.log_file
else:
log_file_name = self.log_file_name
if self.options.verbose:
console_format = '%(name)s: %(levelname)s: %(message)s'
verbose = True
debug = True
else:
console_format = '%(message)s'
debug = False
if self.options.quiet:
verbose = False
else:
verbose = True
standard_logging_setup(
log_file_name, console_format=console_format,
filemode=log_file_mode, debug=debug, verbose=verbose)
if log_file_name:
logger.debug('Logging to %s', log_file_name)
elif not no_file:
logger.debug('Not logging to a file')
def handle_error(self, exception):
"""Given an exception, return a message (or None) and process exit code
"""
if isinstance(exception, ScriptError):
return exception.msg, exception.rval or 1
elif isinstance(exception, SystemExit):
if isinstance(exception.code, int):
return None, exception.code
return str(exception.code), 1
return str(exception), 1
def run(self):
"""Actual running of the command
This is where the hard work is done. The base implementation logs
the invocation of the command.
If this method returns (i.e. doesn't raise an exception), the tool is
assumed to have run successfully, and the return value is used as the
SystemExit code.
"""
logger.debug('%s was invoked with arguments %s and options: %s',
self.command_name, self.args, self.safe_options)
logger.debug('IPA version %s', version.VENDOR_VERSION)
def log_failure(self, error_message, return_value, exception, backtrace):
logger.debug('%s', ''.join(traceback.format_tb(backtrace)))
logger.debug('The %s command failed, exception: %s: %s',
self.command_name, type(exception).__name__, exception)
if error_message:
logger.error('%s', error_message)
message = "The %s command failed." % self.command_name
if self.log_file_name:
message += " See %s for more information" % self.log_file_name
logger.error('%s', message)
def log_success(self):
logger.info('The %s command was successful', self.command_name)

730
ipapython/certdb.py Normal file
View File

@@ -0,0 +1,730 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2009 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import collections
import logging
import os
import io
import pwd
import grp
import re
import tempfile
from tempfile import NamedTemporaryFile
import shutil
import cryptography.x509
from ipaplatform.paths import paths
from ipapython.dn import DN
from ipapython.kerberos import Principal
from ipapython import ipautil
from ipalib import x509 # pylint: disable=ipa-forbidden-import
logger = logging.getLogger(__name__)
CA_NICKNAME_FMT = "%s IPA CA"
NSS_FILES = ("cert8.db", "key3.db", "secmod.db", "pwdfile.txt")
TrustFlags = collections.namedtuple('TrustFlags', 'has_key trusted ca usages')
EMPTY_TRUST_FLAGS = TrustFlags(False, None, None, None)
IPA_CA_TRUST_FLAGS = TrustFlags(
False, True, True, frozenset({
x509.EKU_SERVER_AUTH,
x509.EKU_CLIENT_AUTH,
x509.EKU_CODE_SIGNING,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_PKINIT_CLIENT_AUTH,
x509.EKU_PKINIT_KDC,
}),
)
EXTERNAL_CA_TRUST_FLAGS = TrustFlags(
False, True, True, frozenset({x509.EKU_SERVER_AUTH}),
)
TRUSTED_PEER_TRUST_FLAGS = TrustFlags(
False, True, False, frozenset({x509.EKU_SERVER_AUTH}),
)
def get_ca_nickname(realm, format=CA_NICKNAME_FMT):
return format % realm
def find_cert_from_txt(cert, start=0):
"""
Given a cert blob (str) which may or may not contian leading and
trailing text, pull out just the certificate part. This will return
the FIRST cert in a stream of data.
:returns: a tuple (IPACertificate, last position in cert)
"""
s = cert.find('-----BEGIN CERTIFICATE-----', start)
e = cert.find('-----END CERTIFICATE-----', s)
if e > 0:
e = e + 25
if s < 0 or e < 0:
raise RuntimeError("Unable to find certificate")
cert = x509.load_pem_x509_certificate(cert[s:e].encode('utf-8'))
return (cert, e)
def parse_trust_flags(trust_flags):
"""
Convert certutil trust flags to TrustFlags object.
"""
has_key = 'u' in trust_flags
if 'p' in trust_flags:
if 'C' in trust_flags or 'P' in trust_flags or 'T' in trust_flags:
raise ValueError("cannot be both trusted and not trusted")
return False, None, None
elif 'C' in trust_flags or 'T' in trust_flags:
if 'P' in trust_flags:
raise ValueError("cannot be both CA and not CA")
ca = True
elif 'P' in trust_flags:
ca = False
else:
return TrustFlags(has_key, None, None, frozenset())
trust_flags = trust_flags.split(',')
ext_key_usage = set()
for i, kp in enumerate((x509.EKU_SERVER_AUTH,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_CODE_SIGNING)):
if 'C' in trust_flags[i] or 'P' in trust_flags[i]:
ext_key_usage.add(kp)
if 'T' in trust_flags[0]:
ext_key_usage.add(x509.EKU_CLIENT_AUTH)
return TrustFlags(has_key, True, ca, frozenset(ext_key_usage))
def unparse_trust_flags(trust_flags):
"""
Convert TrustFlags object to certutil trust flags.
"""
has_key, trusted, ca, ext_key_usage = trust_flags
if trusted is False:
if has_key:
return 'pu,pu,pu'
else:
return 'p,p,p'
elif trusted is None or ca is None:
if has_key:
return 'u,u,u'
else:
return ',,'
elif ext_key_usage is None:
if ca:
if has_key:
return 'CTu,Cu,Cu'
else:
return 'CT,C,C'
else:
if has_key:
return 'Pu,Pu,Pu'
else:
return 'P,P,P'
trust_flags = ['', '', '']
for i, kp in enumerate((x509.EKU_SERVER_AUTH,
x509.EKU_EMAIL_PROTECTION,
x509.EKU_CODE_SIGNING)):
if kp in ext_key_usage:
trust_flags[i] += ('C' if ca else 'P')
if ca and x509.EKU_CLIENT_AUTH in ext_key_usage:
trust_flags[0] += 'T'
if has_key:
for i in range(3):
trust_flags[i] += 'u'
trust_flags = ','.join(trust_flags)
return trust_flags
def verify_kdc_cert_validity(kdc_cert, ca_certs, realm):
with NamedTemporaryFile() as kdc_file, NamedTemporaryFile() as ca_file:
kdc_file.write(kdc_cert.public_bytes(x509.Encoding.PEM))
kdc_file.flush()
x509.write_certificate_list(ca_certs, ca_file.name)
ca_file.flush()
try:
ipautil.run(
[paths.OPENSSL, 'verify', '-CAfile', ca_file.name,
kdc_file.name],
capture_output=True)
except ipautil.CalledProcessError as e:
raise ValueError(e.output)
try:
eku = kdc_cert.extensions.get_extension_for_class(
cryptography.x509.ExtendedKeyUsage)
list(eku.value).index(
cryptography.x509.ObjectIdentifier(x509.EKU_PKINIT_KDC))
except (cryptography.x509.ExtensionNotFound,
ValueError):
raise ValueError("invalid for a KDC")
principal = str(Principal(['krbtgt', realm], realm))
gns = x509.process_othernames(kdc_cert.san_general_names)
for gn in gns:
if isinstance(gn, x509.KRB5PrincipalName) and gn.name == principal:
break
else:
raise ValueError("invalid for realm %s" % realm)
class NSSDatabase(object):
"""A general-purpose wrapper around a NSS cert database
For permanent NSS databases, pass the cert DB directory to __init__
For temporary databases, do not pass nssdir, and call close() when done
to remove the DB. Alternatively, a NSSDatabase can be used as a
context manager that calls close() automatically.
"""
# Traditionally, we used CertDB for our NSS DB operations, but that class
# got too tied to IPA server details, killing reusability.
# BaseCertDB is a class that knows nothing about IPA.
# Generic NSS DB code should be moved here.
def __init__(self, nssdir=None):
if nssdir is None:
self.secdir = tempfile.mkdtemp()
self._is_temporary = True
else:
self.secdir = nssdir
self._is_temporary = False
self.pwd_file = os.path.join(self.secdir, 'pwdfile.txt')
def close(self):
if self._is_temporary:
shutil.rmtree(self.secdir)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
def run_certutil(self, args, stdin=None, **kwargs):
new_args = [paths.CERTUTIL, "-d", self.secdir]
new_args = new_args + args
new_args.extend(['-f', self.pwd_file])
return ipautil.run(new_args, stdin, **kwargs)
def create_db(self, user=None, group=None, mode=None, backup=False):
"""Create cert DB
:param user: User owner the secdir
:param group: Group owner of the secdir
:param mode: Mode of the secdir
:param backup: Backup the sedir files
"""
dirmode = 0o750
filemode = 0o640
pwdfilemode = 0o640
if mode is not None:
dirmode = mode
filemode = mode & 0o666
pwdfilemode = mode & 0o660
uid = -1
gid = -1
if user is not None:
uid = pwd.getpwnam(user).pw_uid
if group is not None:
gid = grp.getgrnam(group).gr_gid
if backup:
for filename in NSS_FILES:
path = os.path.join(self.secdir, filename)
ipautil.backup_file(path)
if not os.path.exists(self.secdir):
os.makedirs(self.secdir, dirmode)
if not os.path.exists(self.pwd_file):
# Create the password file for this db
with io.open(os.open(self.pwd_file,
os.O_CREAT | os.O_WRONLY,
pwdfilemode), 'w', closefd=True) as f:
f.write(ipautil.ipa_generate_password())
f.flush()
self.run_certutil(["-N", "-f", self.pwd_file])
# Finally fix up perms
os.chown(self.secdir, uid, gid)
os.chmod(self.secdir, dirmode)
for filename in NSS_FILES:
path = os.path.join(self.secdir, filename)
if os.path.exists(path):
os.chown(path, uid, gid)
if path == self.pwd_file:
new_mode = pwdfilemode
else:
new_mode = filemode
os.chmod(path, new_mode)
def restore(self):
for filename in NSS_FILES:
path = os.path.join(self.secdir, filename)
backup_path = path + '.orig'
save_path = path + '.ipasave'
try:
if os.path.exists(path):
os.rename(path, save_path)
if os.path.exists(backup_path):
os.rename(backup_path, path)
except OSError as e:
logger.debug('%s', e)
def list_certs(self):
"""Return nicknames and cert flags for all certs in the database
:return: List of (name, trust_flags) tuples
"""
result = self.run_certutil(["-L"], capture_output=True)
certs = result.output.splitlines()
# FIXME, this relies on NSS never changing the formatting of certutil
certlist = []
for cert in certs:
match = re.match(r'^(.+?)\s+(\w*,\w*,\w*)\s*$', cert)
if match:
nickname = match.group(1)
trust_flags = parse_trust_flags(match.group(2))
certlist.append((nickname, trust_flags))
return tuple(certlist)
def find_server_certs(self):
"""Return nicknames and cert flags for server certs in the database
Server certs have an "u" character in the trust flags.
:return: List of (name, trust_flags) tuples
"""
server_certs = []
for name, flags in self.list_certs():
if flags.has_key:
server_certs.append((name, flags))
return server_certs
def get_trust_chain(self, nickname):
"""Return names of certs in a given cert's trust chain
:param nickname: Name of the cert
:return: List of certificate names
"""
root_nicknames = []
result = self.run_certutil(["-O", "-n", nickname], capture_output=True)
chain = result.output.splitlines()
for c in chain:
m = re.match('\s*"(.*)" \[.*', c)
if m:
root_nicknames.append(m.groups()[0])
return root_nicknames
def export_pkcs12(self, nickname, pkcs12_filename, pkcs12_passwd=None):
args = [paths.PK12UTIL, "-d", self.secdir,
"-o", pkcs12_filename,
"-n", nickname,
"-k", self.pwd_file]
pkcs12_password_file = None
if pkcs12_passwd is not None:
pkcs12_password_file = ipautil.write_tmp_file(pkcs12_passwd + '\n')
args = args + ["-w", pkcs12_password_file.name]
try:
ipautil.run(args)
except ipautil.CalledProcessError as e:
if e.returncode == 17:
raise RuntimeError("incorrect password for pkcs#12 file %s" %
pkcs12_filename)
elif e.returncode == 10:
raise RuntimeError("Failed to open %s" % pkcs12_filename)
else:
raise RuntimeError("unknown error exporting pkcs#12 file %s" %
pkcs12_filename)
finally:
if pkcs12_password_file is not None:
pkcs12_password_file.close()
def import_pkcs12(self, pkcs12_filename, pkcs12_passwd=None):
args = [paths.PK12UTIL, "-d", self.secdir,
"-i", pkcs12_filename,
"-k", self.pwd_file, '-v']
pkcs12_password_file = None
if pkcs12_passwd is not None:
pkcs12_password_file = ipautil.write_tmp_file(pkcs12_passwd + '\n')
args = args + ["-w", pkcs12_password_file.name]
try:
ipautil.run(args)
except ipautil.CalledProcessError as e:
if e.returncode == 17:
raise RuntimeError("incorrect password for pkcs#12 file %s" %
pkcs12_filename)
elif e.returncode == 10:
raise RuntimeError("Failed to open %s" % pkcs12_filename)
else:
raise RuntimeError("unknown error import pkcs#12 file %s" %
pkcs12_filename)
finally:
if pkcs12_password_file is not None:
pkcs12_password_file.close()
def import_files(self, files, import_keys=False, key_password=None,
key_nickname=None):
"""
Import certificates and a single private key from multiple files
The files may be in PEM and DER certificate, PKCS#7 certificate chain,
PKCS#8 and raw private key and PKCS#12 formats.
:param files: Names of files to import
:param import_keys: Whether to import private keys
:param key_password: Password to decrypt private keys
:param key_nickname: Nickname of the private key to import from PKCS#12
files
"""
key_file = None
extracted_key = None
extracted_certs = []
for filename in files:
try:
with open(filename, 'rb') as f:
data = f.read()
except IOError as e:
raise RuntimeError(
"Failed to open %s: %s" % (filename, e.strerror))
# Try to parse the file as PEM file
matches = list(
re.finditer(
br'-----BEGIN (.+?)-----(.*?)-----END \1-----',
data, re.DOTALL
)
)
if matches:
loaded = False
for match in matches:
body = match.group()
label = match.group(1)
line = len(data[:match.start() + 1].splitlines())
if label in (b'CERTIFICATE', b'X509 CERTIFICATE',
b'X.509 CERTIFICATE'):
try:
cert = x509.load_pem_x509_certificate(body)
except ValueError as e:
if label != b'CERTIFICATE':
logger.warning(
"Skipping certificate in %s at line %s: "
"%s",
filename, line, e)
continue
else:
extracted_certs.append(cert)
loaded = True
continue
if label in (b'PKCS7', b'PKCS #7 SIGNED DATA',
b'CERTIFICATE'):
try:
certs = x509.pkcs7_to_certs(body)
except ipautil.CalledProcessError as e:
if label == b'CERTIFICATE':
logger.warning(
"Skipping certificate in %s at line %s: "
"%s",
filename, line, e)
else:
logger.warning(
"Skipping PKCS#7 in %s at line %s: %s",
filename, line, e)
continue
else:
extracted_certs.extend(certs)
loaded = True
continue
if label in (b'PRIVATE KEY', b'ENCRYPTED PRIVATE KEY',
b'RSA PRIVATE KEY', b'DSA PRIVATE KEY',
b'EC PRIVATE KEY'):
if not import_keys:
continue
if key_file:
raise RuntimeError(
"Can't load private key from both %s and %s" %
(key_file, filename))
# the args -v2 aes256 -v2prf hmacWithSHA256 are needed
# on OpenSSL 1.0.2 (fips mode). As soon as FreeIPA
# requires OpenSSL 1.1.0 we'll be able to drop them
args = [
paths.OPENSSL, 'pkcs8',
'-topk8',
'-v2', 'aes256', '-v2prf', 'hmacWithSHA256',
'-passout', 'file:' + self.pwd_file,
]
if ((label != b'PRIVATE KEY' and key_password) or
label == b'ENCRYPTED PRIVATE KEY'):
key_pwdfile = ipautil.write_tmp_file(key_password)
args += [
'-passin', 'file:' + key_pwdfile.name,
]
try:
result = ipautil.run(
args, stdin=body, capture_output=True)
except ipautil.CalledProcessError as e:
logger.warning(
"Skipping private key in %s at line %s: %s",
filename, line, e)
continue
else:
extracted_key = result.raw_output
key_file = filename
loaded = True
continue
if loaded:
continue
raise RuntimeError("Failed to load %s" % filename)
# Try to load the file as DER certificate
try:
cert = x509.load_der_x509_certificate(data)
except ValueError:
pass
else:
extracted_certs.append(cert)
continue
# Try to import the file as PKCS#12 file
if import_keys:
try:
self.import_pkcs12(filename, key_password)
except RuntimeError:
pass
else:
if key_file:
raise RuntimeError(
"Can't load private key from both %s and %s" %
(key_file, filename))
key_file = filename
server_certs = self.find_server_certs()
if key_nickname:
for nickname, _trust_flags in server_certs:
if nickname == key_nickname:
break
else:
raise RuntimeError(
"Server certificate \"%s\" not found in %s" %
(key_nickname, filename))
else:
if len(server_certs) > 1:
raise RuntimeError(
"%s server certificates found in %s, "
"expecting only one" %
(len(server_certs), filename))
continue
raise RuntimeError("Failed to load %s" % filename)
if import_keys and not key_file:
raise RuntimeError(
"No server certificates found in %s" % (', '.join(files)))
for cert in extracted_certs:
nickname = str(DN(cert.subject))
self.add_cert(cert, nickname, EMPTY_TRUST_FLAGS)
if extracted_key:
with tempfile.NamedTemporaryFile() as in_file, \
tempfile.NamedTemporaryFile() as out_file:
for cert in extracted_certs:
in_file.write(cert.public_bytes(x509.Encoding.PEM))
in_file.write(extracted_key)
in_file.flush()
out_password = ipautil.ipa_generate_password()
out_pwdfile = ipautil.write_tmp_file(out_password)
args = [
paths.OPENSSL, 'pkcs12',
'-export',
'-in', in_file.name,
'-out', out_file.name,
'-passin', 'file:' + self.pwd_file,
'-passout', 'file:' + out_pwdfile.name,
]
try:
ipautil.run(args)
except ipautil.CalledProcessError as e:
raise RuntimeError(
"No matching certificate found for private key from "
"%s" % key_file)
self.import_pkcs12(out_file.name, out_password)
def trust_root_cert(self, root_nickname, trust_flags):
if root_nickname[:7] == "Builtin":
logger.debug(
"No need to add trust for built-in root CAs, skipping %s",
root_nickname)
else:
trust_flags = unparse_trust_flags(trust_flags)
try:
self.run_certutil(["-M", "-n", root_nickname,
"-t", trust_flags])
except ipautil.CalledProcessError:
raise RuntimeError(
"Setting trust on %s failed" % root_nickname)
def get_cert(self, nickname):
"""
:param nickname: nickname of the certificate in the NSS database
:returns: string in Python2
bytes in Python3
"""
args = ['-L', '-n', nickname, '-a']
try:
result = self.run_certutil(args, capture_output=True)
except ipautil.CalledProcessError:
raise RuntimeError("Failed to get %s" % nickname)
cert, _start = find_cert_from_txt(result.output, start=0)
return cert
def has_nickname(self, nickname):
try:
self.get_cert(nickname)
except RuntimeError:
# This might be error other than "nickname not found". Beware.
return False
else:
return True
def export_pem_cert(self, nickname, location):
"""Export the given cert to PEM file in the given location"""
cert = self.get_cert(nickname)
with open(location, "wb") as fd:
fd.write(cert.public_bytes(x509.Encoding.PEM))
os.chmod(location, 0o444)
def import_pem_cert(self, nickname, flags, location):
"""Import a cert form the given PEM file.
The file must contain exactly one certificate.
"""
try:
with open(location) as fd:
certs = fd.read()
except IOError as e:
raise RuntimeError(
"Failed to open %s: %s" % (location, e.strerror)
)
cert, st = find_cert_from_txt(certs)
self.add_cert(cert, nickname, flags)
try:
find_cert_from_txt(certs, st)
except RuntimeError:
pass
else:
raise ValueError('%s contains more than one certificate' %
location)
def add_cert(self, cert, nick, flags):
flags = unparse_trust_flags(flags)
args = ["-A", "-n", nick, "-t", flags, '-a']
self.run_certutil(args, stdin=cert.public_bytes(x509.Encoding.PEM))
def delete_cert(self, nick):
self.run_certutil(["-D", "-n", nick])
def verify_server_cert_validity(self, nickname, hostname):
"""Verify a certificate is valid for a SSL server with given hostname
Raises a ValueError if the certificate is invalid.
"""
cert = self.get_cert(nickname)
try:
self.run_certutil(['-V', '-n', nickname, '-u', 'V'],
capture_output=True)
except ipautil.CalledProcessError as e:
# certutil output in case of error is
# 'certutil: certificate is invalid: <ERROR_STRING>\n'
raise ValueError(e.output)
try:
cert.match_hostname(hostname)
except ValueError:
raise ValueError('invalid for server %s' % hostname)
def verify_ca_cert_validity(self, nickname):
cert = self.get_cert(nickname)
if not cert.subject:
raise ValueError("has empty subject")
try:
bc = cert.extensions.get_extension_for_class(
cryptography.x509.BasicConstraints)
except cryptography.x509.ExtensionNotFound:
raise ValueError("missing basic constraints")
if not bc.value.ca:
raise ValueError("not a CA certificate")
try:
cert.extensions.get_extension_for_class(
cryptography.x509.SubjectKeyIdentifier)
except cryptography.x509.ExtensionNotFound:
raise ValueError("missing subject key identifier extension")
try:
self.run_certutil(['-V', '-n', nickname, '-u', 'L'],
capture_output=True)
except ipautil.CalledProcessError as e:
# certutil output in case of error is
# 'certutil: certificate is invalid: <ERROR_STRING>\n'
raise ValueError(e.output)
def verify_kdc_cert_validity(self, nickname, realm):
nicknames = self.get_trust_chain(nickname)
certs = [self.get_cert(nickname) for nickname in nicknames]
verify_kdc_cert_validity(certs[-1], certs[:-1], realm)

282
ipapython/config.py Normal file
View File

@@ -0,0 +1,282 @@
# Authors: Karl MacMillan <kmacmill@redhat.com>
#
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
# pylint: disable=deprecated-module
from optparse import (
Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError)
# pylint: enable=deprecated-module
from copy import copy
import socket
import functools
from dns import resolver, rdatatype
from dns.exception import DNSException
import dns.name
# pylint: disable=import-error
from six.moves.configparser import SafeConfigParser
from six.moves.urllib.parse import urlsplit
# pylint: enable=import-error
from ipaplatform.paths import paths
from ipapython.dn import DN
from ipapython.ipautil import CheckedIPAddress, CheckedIPAddressLoopback
class IPAConfigError(Exception):
def __init__(self, msg=''):
self.msg = msg
Exception.__init__(self, msg)
def __repr__(self):
return self.msg
__str__ = __repr__
class IPAFormatter(IndentedHelpFormatter):
"""Our own optparse formatter that indents multiple lined usage string."""
def format_usage(self, usage):
usage_string = "Usage:"
spacing = " " * len(usage_string)
lines = usage.split("\n")
ret = "%s %s\n" % (usage_string, lines[0])
for line in lines[1:]:
ret += "%s %s\n" % (spacing, line)
return ret
def check_ip_option(option, opt, value, allow_loopback=False):
try:
if allow_loopback:
return CheckedIPAddressLoopback(value)
else:
return CheckedIPAddress(value)
except Exception as e:
raise OptionValueError("option {}: invalid IP address {}: {}"
.format(opt, value, e))
def check_dn_option(option, opt, value):
try:
return DN(value)
except Exception as e:
raise OptionValueError("option %s: invalid DN: %s" % (opt, e))
def check_constructor(option, opt, value):
con = option.constructor
assert con is not None, "Oops! Developer forgot to set 'constructor' kwarg"
try:
return con(value)
except Exception as e:
raise OptionValueError("option {} invalid: {}".format(opt, e))
class IPAOption(Option):
"""
optparse.Option subclass with support of options labeled as
security-sensitive such as passwords.
"""
ATTRS = Option.ATTRS + ["sensitive", "constructor"]
TYPES = Option.TYPES + ("ip", "dn", "constructor", "ip_with_loopback")
TYPE_CHECKER = copy(Option.TYPE_CHECKER)
TYPE_CHECKER["ip"] = check_ip_option
TYPE_CHECKER["ip_with_loopback"] = functools.partial(check_ip_option,
allow_loopback=True)
TYPE_CHECKER["dn"] = check_dn_option
TYPE_CHECKER["constructor"] = check_constructor
class IPAOptionParser(OptionParser):
"""
optparse.OptionParser subclass that uses IPAOption by default
for storing options.
"""
def __init__(self,
usage=None,
option_list=None,
option_class=IPAOption,
version=None,
conflict_handler="error",
description=None,
formatter=None,
add_help_option=True,
prog=None):
OptionParser.__init__(self, usage, option_list, option_class,
version, conflict_handler, description,
formatter, add_help_option, prog)
def get_safe_opts(self, opts):
"""
Returns all options except those with sensitive=True in the same
fashion as parse_args would
"""
all_opts_dict = {
o.dest: o for o in self._get_all_options()
if hasattr(o, 'sensitive')
}
safe_opts_dict = {}
for option, value in opts.__dict__.items():
if all_opts_dict[option].sensitive != True:
safe_opts_dict[option] = value
return Values(safe_opts_dict)
def verify_args(parser, args, needed_args = None):
"""Verify that we have all positional arguments we need, if not, exit."""
if needed_args:
needed_list = needed_args.split(" ")
else:
needed_list = []
len_need = len(needed_list)
len_have = len(args)
if len_have > len_need:
parser.error("too many arguments")
elif len_have < len_need:
parser.error("no %s specified" % needed_list[len_have])
class IPAConfig(object):
def __init__(self):
self.default_realm = None
self.default_server = []
self.default_domain = None
def get_realm(self):
if self.default_realm:
return self.default_realm
else:
raise IPAConfigError("no default realm")
def get_server(self):
if len(self.default_server):
return self.default_server
else:
raise IPAConfigError("no default server")
def get_domain(self):
if self.default_domain:
return self.default_domain
else:
raise IPAConfigError("no default domain")
# Global library config
config = IPAConfig()
def __parse_config(discover_server = True):
p = SafeConfigParser()
p.read(paths.IPA_DEFAULT_CONF)
try:
if not config.default_realm:
config.default_realm = p.get("global", "realm")
except Exception:
pass
if discover_server:
try:
s = p.get("global", "xmlrpc_uri")
server = urlsplit(s)
config.default_server.append(server.netloc)
except Exception:
pass
try:
if not config.default_domain:
config.default_domain = p.get("global", "domain")
except Exception:
pass
def __discover_config(discover_server = True):
servers = []
try:
if not config.default_domain:
# try once with REALM -> domain
domain = str(config.default_realm).lower()
name = "_ldap._tcp." + domain
try:
servers = resolver.query(name, rdatatype.SRV)
except DNSException:
# try cycling on domain components of FQDN
try:
domain = dns.name.from_text(socket.getfqdn())
except DNSException:
return False
while True:
domain = domain.parent()
if str(domain) == '.':
return False
name = "_ldap._tcp.%s" % domain
try:
servers = resolver.query(name, rdatatype.SRV)
break
except DNSException:
pass
config.default_domain = str(domain).rstrip(".")
if discover_server:
if not servers:
name = "_ldap._tcp.%s." % config.default_domain
try:
servers = resolver.query(name, rdatatype.SRV)
except DNSException:
pass
for server in servers:
hostname = str(server.target).rstrip(".")
config.default_server.append(hostname)
except Exception:
pass
def add_standard_options(parser):
parser.add_option("--realm", dest="realm", help="Override default IPA realm")
parser.add_option("--server", dest="server",
help="Override default FQDN of IPA server")
parser.add_option("--domain", dest="domain", help="Override default IPA DNS domain")
def init_config(options=None):
if options:
config.default_realm = options.realm
config.default_domain = options.domain
if options.server:
config.default_server.extend(options.server.split(","))
if len(config.default_server):
discover_server = False
else:
discover_server = True
__parse_config(discover_server)
__discover_config(discover_server)
# make sure the server list only contains unique items
new_server = []
for server in config.default_server:
if server not in new_server:
new_server.append(server)
config.default_server = new_server
if not config.default_realm:
raise IPAConfigError("IPA realm not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.")
if not config.default_server:
raise IPAConfigError("IPA server not found in DNS, in the config file (/etc/ipa/default.conf) or on the command line.")
if not config.default_domain:
raise IPAConfigError("IPA domain not found in the config file (/etc/ipa/default.conf) or on the command line.")

681
ipapython/cookie.py Normal file
View File

@@ -0,0 +1,681 @@
# Authors:
# John Dennis <jdennis@redhat.com>
#
# Copyright (C) 2012 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import re
import datetime
import email.utils
from calendar import timegm
import six
# pylint: disable=import-error
from six.moves.urllib.parse import urlparse
# pylint: enable=import-error
'''
Core Python has two cookie libraries, Cookie.py targeted to server
side and cookielib.py targeted to client side. So why this module and
not use the standard libraries?
Cookie.py has some serious bugs, it cannot correctly parse the
HttpOnly, Secure, and Expires cookie attributes (more of a client side
need and not what it was designed for). Since we utilize those
attributes that makes Cookie.py a non-starter. Plus it's API awkard
and limited (we would have to build more on top of it).
The Cookie.py bug reports are:
http://bugs.python.org/issue3073
http://bugs.python.org/issue16611
cookielib.py has a lot of good featuress, a nice API and covers all
the relevant RFC's as well as actual practice in the field. However
cookielib.py is tighly integrated with urllib2 and it's not possible
to use most of the features of cookielib without simultaneously using
urllib2. Unfortunataely we only use httplib because of our dependency
on xmlrpc.client. Without urllib2 cookielib is a non-starter.
This module is a minimal implementation of Netscape cookies which
works equally well on either the client or server side. It's API is
easy to use with cookie attributes as class properties which can be
read or set easily. The Cookie object automatically converts Expires
and Max-Age attributes into datetime objects for easy time
comparision. Cookies in strings can easily be parsed, including
multiple cookies in the HTTP_COOKIE envionment variable.
The cookie RFC is silent on any escaping requirements for cookie
contents as such this module does not provide any automated support
escaping and unescapin.
'''
#-------------------------------------------------------------------------------
class Cookie(object):
'''
A Cookie object has the following attributes:
key
The name of the cookie
value
The value of the cookie
A Cookie also supports these predefined optional attributes. If an
optional attribute is not set on the cookie it's value is None.
domain
Restrict cookie usage to this domain
path
Restrict cookie usage to this path or below
expires
Cookie is invalid after this UTC timestamp
max_age
Cookie is invalid this many seconds in the future.
Has precedence over the expires attribute.
secure
Cookie should only be returned on secure (i.e. SSL/TLS)
connections.
httponly
Cookie is intended only for HTTP communication, it can
never be utilized in any other context (e.g. browser
Javascript).
See the documentation of get_expiration() for an explanation of
how the expires and max-age attributes interact as well as the
role of the timestamp attribute. Expiration values are stored as
datetime objects for easy manipulation and comparision.
There are two ways to instantiate a Cookie object. Either directly
via the constructor or by calling the class function parse() which
returns a list of Cookie objects found in a string.
To create a cookie to sent to a client:
Example:
cookie = Cookie('session', session_id,
domain=my_domain, path=mypath,
httponly=True, secure=True, expires=expiration)
headers.append(('Set-Cookie', str(cookie)))
To receive cookies from a request:
Example:
cookies = Cookie.parse(response.getheader('Set-Cookie'), request_url)
'''
class Expired(ValueError):
pass
class URLMismatch(ValueError):
pass
# regexp to split fields at a semi-colon
field_re = re.compile(r';\s*')
# regexp to locate a key/value pair
kv_pair_re = re.compile(r'^\s*([a-zA-Z0-9\!\#\$\%\&\'\*\+\-\.\^\_\`\|\~]+)\s*=\s*(.*?)\s*$', re.IGNORECASE)
# Reserved attribute names, maps from lower case protocol name to
# object attribute name
attrs = {'domain' : 'domain',
'path' : 'path',
'max-age' : 'max_age',
'expires' : 'expires',
'secure' : 'secure',
'httponly' : 'httponly'}
@classmethod
def datetime_to_time(cls, dt):
'''
Timestamps (timestamp & expires) are stored as datetime
objects in UTC. It's non-obvious how to convert a naive UTC
datetime into a unix time value (seconds since the epoch
UTC). That functionality is oddly missing from the datetime
and time modules. This utility provides that missing
functionality.
'''
# Use timegm from the calendar module
return timegm(dt.utctimetuple())
@classmethod
def datetime_to_string(cls, dt=None):
'''
Given a datetime object in UTC generate RFC 1123 date string.
'''
# Try to verify dt is specified as UTC. If utcoffset is not
# available we'll just have to assume the caller is using the
# correct timezone.
utcoffset = dt.utcoffset()
if utcoffset is not None and utcoffset.total_seconds() != 0.0:
raise ValueError("timezone is not UTC")
# Do not use strftime because it respects the locale, instead
# use the RFC 1123 formatting function which uses only English
return email.utils.formatdate(cls.datetime_to_time(dt), usegmt=True)
@classmethod
def parse_datetime(cls, s):
'''
Parse a RFC 822, RFC 1123 date string, return a datetime naive object in UTC.
'''
s = s.strip()
# Do not use strptime because it respects the locale, instead
# use the RFC 1123 parsing function which uses only English
try:
dt = datetime.datetime(*email.utils.parsedate(s)[0:6])
except Exception as e:
raise ValueError("unable to parse expires datetime '%s': %s" % (s, e))
return dt
@classmethod
def normalize_url_path(cls, url_path):
'''
Given a URL path, possibly empty, return a path consisting
only of directory components. The URL path must end with a
trailing slash for the last path element to be considered a
directory. Also the URL path must begin with a slash. Empty
input returns '/'.
Examples:
'' -> '/'
'/' -> '/'
'foo' -> '/'
'foo/' -> '/'
'/foo -> '/'
'/foo/' -> '/foo'
'/foo/bar' -> '/foo'
'/foo/bar/' -> '/foo/bar'
'''
url_path = url_path.lower()
if not url_path:
return '/'
if not url_path.startswith('/'):
return '/'
if url_path.count('/') <= 1:
return'/'
return url_path[:url_path.rindex('/')]
@classmethod
def parse(cls, cookie_string, request_url=None):
'''
Given a string containing one or more cookies (the
HTTP_COOKIES environment variable typically contains multiple
cookies) parse the string and return a list of Cookie objects
found in the string.
'''
# Our list of returned cookies
cookies = []
# Split the input string at semi-colon boundaries, we call this a
# field. A field may either be a single keyword or a key=value
# pair.
fields = Cookie.field_re.split(cookie_string)
# The input string may have multiple cookies inside it. This is
# common when the string comes from a HTTP_COOKIE environment
# variable. All the cookies will be contenated, separated by a
# semi-colon. Semi-colons are also the separator between
# attributes in a cookie.
#
# To distinguish between two adjacent cookies in a string we
# have to locate the key=value pair at the start of a
# cookie. Unfortunately cookies have attributes that also look
# like key/value pairs, the only way to distinguish a cookie
# attribute from a cookie is the fact the attribute names are
# reserved. A cookie attribute may either be a key/value pair
# or a single key (e.g. HttpOnly). As we scan the cookie we
# first identify the key=value (cookie name, cookie
# value). Then we continue scanning, if a bare key or
# key/value pair follows and is a known reserved keyword than
# that's an attribute belonging to the current cookie. As soon
# as we see a key/value pair whose key is not reserved we know
# we've found a new cookie. Bare keys (no value) can never
# start a new cookie.
# Iterate over all the fields and emit a new cookie whenever the
# next field is not a known attribute.
cookie = None
for field in fields:
match = Cookie.kv_pair_re.search(field)
if match:
key = match.group(1)
value = match.group(2)
# Double quoted value?
if value and value[0] == '"':
if value[-1] == '"':
value = value[1:-1]
else:
raise ValueError("unterminated quote in '%s'" % value)
kv_pair = True
else:
key = field
value = True # True because bare keys are boolean flags
kv_pair = False
is_attribute = key.lower() in Cookie.attrs
# First cookie found, create new cookie object
if cookie is None and kv_pair and not is_attribute:
cookie = Cookie(key, value)
# If start of new cookie then flush previous cookie and create
# a new one (it's a new cookie because it's a key/value pair
# whose key is not a reserved keyword).
elif cookie and kv_pair and not is_attribute:
if request_url is not None:
cookie.normalize(request_url)
cookies.append(cookie)
cookie = Cookie(key, value)
# If it's a reserved keyword add that as an attribute to the
# current cookie being scanned.
elif cookie and is_attribute:
cookie.__set_attr(key, value)
# If we've found a non-empty single token that's not a
# reserved keyword it's an error. An empty token can occur
# when there are two adjacent semi-colons (i.e. "; ;").
# We don't consider empty tokens an error.
elif key:
raise ValueError("unknown cookie token '%s'" % key)
# Flush out final cookie
if cookie:
if request_url is not None:
cookie.normalize(request_url)
cookies.append(cookie)
return cookies
@classmethod
def get_named_cookie_from_string(cls, cookie_string, cookie_name,
request_url=None, timestamp=None):
'''
A cookie string may contain multiple cookies, parse the cookie
string and return the last cookie in the string matching the
cookie name or None if not found.
This is basically a utility wrapper around the parse() class
method which iterates over what parse() returns looking for
the specific cookie.
When cookie_name appears more than once the last instance is
returned rather than the first because the ordering sequence
makes the last instance the current value.
'''
target_cookie = None
cookies = cls.parse(cookie_string)
for cookie in cookies:
if cookie.key == cookie_name:
target_cookie = cookie
if timestamp is not None:
target_cookie.timestamp = timestamp
if request_url is not None:
target_cookie.normalize(request_url)
return target_cookie
def __init__(self, key, value, domain=None, path=None, max_age=None, expires=None,
secure=None, httponly=None, timestamp=None):
self.key = key
self.value = value
self.domain = domain
self.path = path
self.max_age = max_age
self.expires = expires
self.secure = secure
self.httponly = httponly
self.timestamp = timestamp
@property
def timestamp(self):
'''
The UTC moment at which cookie was received for purposes of
computing the expiration given a Max-Age offset. The
expiration will be timestamp + max_age. The timestamp value
will aways be a datetime object.
By default the timestamp will be the moment the Cookie object
is created as this often corresponds to the moment the cookie
is received (the intent of the Max-Age attribute). But becuase
it's sometimes desirable to force a specific moment for
purposes of computing the expiration from the Max-Age the
Cookie timestamp can be updated.
Setting a value of None causes the timestamp to be set to the
current UTC time (now). You may also assign with a numeric
UNIX timestamp (seconds since the epoch UTC) or a formatted time
sting, in all cases the value will be converted to a datetime
object.
'''
return self._timestamp
@timestamp.setter
def timestamp(self, value):
if value is None:
self._timestamp = None
elif isinstance(value, datetime.datetime):
self._timestamp = value
elif isinstance(value, (six.integer_types, float)):
self._timestamp = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, six.string_types):
self._timestamp = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \
value.__class__.__name__)
@property
def expires(self):
'''
The expiration timestamp (in UTC) as a datetime object for the
cookie, or None if not set.
You may assign a value of None, a datetime object, a numeric
UNIX timestamp (seconds since the epoch UTC) or formatted time
string (the latter two will be converted to a datetime object.
'''
return self._expires
@expires.setter
def expires(self, value):
if value is None:
self._expires = None
elif isinstance(value, datetime.datetime):
self._expires = value
elif isinstance(value, (six.integer_types, float)):
self._expires = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, six.string_types):
self._expires = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \
value.__class__.__name__)
@property
def max_age(self):
'''
The lifetime duration of the cookie. Computed as an offset
from the cookie's timestamp.
'''
return self._max_age
@max_age.setter
def max_age(self, value):
if value is None:
self._max_age = None
else:
try:
self._max_age = int(value)
except Exception:
raise ValueError("Max-Age value '%s' not convertable to integer" % value)
def __set_attr(self, name, value):
'''
Sets one of the predefined cookie attributes.
'''
attr_name = Cookie.attrs.get(name.lower(), None)
if attr_name is None:
raise ValueError("unknown cookie attribute '%s'" % name)
setattr(self, attr_name, value)
def __str__(self):
components = []
components.append("%s=%s" % (self.key, self.value))
if self.domain is not None:
components.append("Domain=%s" % self.domain)
if self.path is not None:
components.append("Path=%s" % self.path)
if self.max_age is not None:
components.append("Max-Age=%s" % self.max_age)
if self.expires is not None:
components.append("Expires=%s" % Cookie.datetime_to_string(self.expires))
if self.secure:
components.append("Secure")
if self.httponly:
components.append("HttpOnly")
return '; '.join(components)
def get_expiration(self):
'''
Return the effective expiration of the cookie as a datetime
object or None if no expiration is defined. Expiration may be
defined either by the "Expires" timestamp attribute or the
"Max-Age" duration attribute. If both are set "Max-Age" takes
precedence. If neither is set the cookie has no expiration and
None will be returned.
"Max-Age" specifies the number of seconds in the future from when the
cookie is received until it expires. Effectively it means
adding "Max-Age" seconds to a timestamp to arrive at an
expiration. By default the timestamp used to mark the arrival
of the cookie is set to the moment the cookie object is
created. However sometimes it is desirable to adjust the
received timestamp to something other than the moment of
object creation, therefore you can explicitly set the arrival
timestamp used in the "Max-Age" calculation.
"Expires" specifies an explicit timestamp.
If "Max-Age" is set a datetime object is returned which is the
sum of the arrival timestamp and "Max-Age".
If "Expires" is set a datetime object is returned matching the
timestamp specified as the "Expires" value.
If neither is set None is returned.
'''
if self.max_age is not None:
return self.timestamp + datetime.timedelta(seconds=self.max_age)
if self.expires is not None:
return self.expires
return None
def normalize_expiration(self):
'''
An expiration may be specified either with an explicit
timestamp in the "Expires" attribute or via an offset
specified witht the "Max-Age" attribute. The "Max-Age"
attribute has precedence over "Expires" if both are
specified.
This method normalizes the expiration of the cookie such that
only a "Expires" attribute remains after consideration of the
"Max-Age" attribute. This is useful when storing the cookie
for future reference.
'''
self.expires = self.get_expiration()
self.max_age = None
return self.expires
def set_defaults_from_url(self, url):
'''
If cookie domain and path attributes are not specified then
they assume defaults from the request url the cookie was
received from.
'''
_scheme, domain, path, _params, _query, _fragment = urlparse(url)
if self.domain is None:
self.domain = domain.lower()
if self.path is None:
self.path = self.normalize_url_path(path)
def normalize(self, url):
'''
Missing cookie attributes will receive default values derived
from the request URL. The expiration value is normalized.
'''
self.set_defaults_from_url(url)
self.normalize_expiration()
def http_cookie(self):
'''
Return a string with just the key and value (no attributes).
This is appropriate for including in a HTTP Cookie header.
'''
return '%s=%s;' % (self.key, self.value)
def http_return_ok(self, url):
'''
Tests to see if a cookie should be returned when a request is
sent to a specific URL.
* The request url's host must match the cookie's doman
otherwise raises Cookie.URLMismatch.
* The path in the request url must contain the cookie's path
otherwise raises Cookie.URLMismatch.
* If the cookie defines an expiration date then the current
time must be less or equal to the cookie's expiration
timestamp. Will raise Cookie.Expired if a defined expiration
is not valid.
If the test fails Cookie.Expired or Cookie.URLMismatch will be raised,
otherwise True is returned.
'''
def domain_valid(url_domain, cookie_domain):
'''
Compute domain component and perform test per
RFC 6265, Section 5.1.3. "Domain Matching"
'''
# FIXME: At the moment we can't import from ipalib at the
# module level because of a dependency loop (cycle) in the
# import. Our module layout needs to be refactored.
# pylint: disable=ipa-forbidden-import
from ipalib.util import validate_domain_name
# pylint: enable=ipa-forbidden-import
try:
validate_domain_name(url_domain)
except Exception:
return False
if cookie_domain is None:
return True
url_domain = url_domain.lower()
cookie_domain = cookie_domain.lower()
if url_domain == cookie_domain:
return True
if url_domain.endswith(cookie_domain):
if cookie_domain.startswith('.'):
return True
return False
def path_valid(url_path, cookie_path):
'''
Compute path component and perform test per
RFC 6265, Section 5.1.4. "Paths and Path-Match"
'''
if cookie_path is None:
return True
cookie_path = cookie_path.lower()
request_path = self.normalize_url_path(url_path)
if cookie_path == request_path:
return True
if cookie_path and request_path.startswith(cookie_path):
if cookie_path.endswith('/'):
return True
tail = request_path[len(cookie_path):]
if tail.startswith('/'):
return True
return False
cookie_name = self.key
(
url_scheme, url_domain, url_path,
_url_params, _url_query, _url_fragment
) = urlparse(url)
cookie_expiration = self.get_expiration()
if cookie_expiration is not None:
now = datetime.datetime.utcnow()
if cookie_expiration < now:
raise Cookie.Expired("cookie named '%s'; expired at %s'" % \
(cookie_name,
self.datetime_to_string(cookie_expiration)))
if not domain_valid(url_domain, self.domain):
raise Cookie.URLMismatch("cookie named '%s'; it's domain '%s' does not match URL domain '%s'" % \
(cookie_name, self.domain, url_domain))
if not path_valid(url_path, self.path):
raise Cookie.URLMismatch("cookie named '%s'; it's path '%s' does not contain the URL path '%s'" % \
(cookie_name, self.path, url_path))
url_scheme = url_scheme.lower()
if self.httponly:
if url_scheme not in ('http', 'https'):
raise Cookie.URLMismatch("cookie named '%s'; is restricted to HTTP but it's URL scheme is '%s'" % \
(cookie_name, url_scheme))
if self.secure:
if url_scheme not in ('https',):
raise Cookie.URLMismatch("cookie named '%s'; is restricted to secure transport but it's URL scheme is '%s'" % \
(cookie_name, url_scheme))
return True

1455
ipapython/dn.py Normal file

File diff suppressed because it is too large Load Diff

375
ipapython/dnsutil.py Normal file
View File

@@ -0,0 +1,375 @@
# Authors: Martin Basti <mbasti@redhat.com>
#
# Copyright (C) 2007-2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import logging
import dns.name
import dns.exception
import dns.resolver
import copy
import six
from ipapython.ipautil import UnsafeIPAddress
if six.PY3:
unicode = str
logger = logging.getLogger(__name__)
@six.python_2_unicode_compatible
class DNSName(dns.name.Name):
labels = None # make pylint happy
@classmethod
def from_text(cls, labels, origin=None):
return cls(dns.name.from_text(labels, origin))
def __init__(self, labels, origin=None):
try:
if isinstance(labels, six.string_types):
#pylint: disable=E1101
labels = dns.name.from_text(unicode(labels), origin).labels
elif isinstance(labels, dns.name.Name):
labels = labels.labels
super(DNSName, self).__init__(labels)
except UnicodeError as e:
# dnspython bug, an invalid domain name returns the UnicodeError
# instead of a dns.exception
raise dns.exception.SyntaxError(e)
def __bool__(self):
#dns.name.from_text('@') is represented like empty tuple
#we need to acting '@' as nonzero value
return True
__nonzero__ = __bool__ # for Python 2
def __copy__(self):
return DNSName(self.labels)
def __deepcopy__(self, memo):
return DNSName(copy.deepcopy(self.labels, memo))
def __str__(self):
return self.to_unicode()
# method ToASCII named by RFC 3490 and python standard library
if six.PY2:
def ToASCII(self):
# must be unicode string in Py2
return self.to_text().decode('ascii')
else:
def ToASCII(self):
return self.to_text()
def canonicalize(self):
return DNSName(super(DNSName, self).canonicalize())
def concatenate(self, other):
return DNSName(super(DNSName, self).concatenate(other))
def relativize(self, origin):
return DNSName(super(DNSName, self).relativize(origin))
def derelativize(self, origin):
return DNSName(super(DNSName, self).derelativize(origin))
def choose_relativity(self, origin=None, relativize=True):
return DNSName(super(DNSName, self).choose_relativity(origin=origin,
relativize=relativize))
def make_absolute(self):
return self.derelativize(self.root)
def is_idn(self):
return any(label.startswith('xn--') for label in self.labels)
def is_ip4_reverse(self):
return self.is_subdomain(self.ip4_rev_zone)
def is_ip6_reverse(self):
return self.is_subdomain(self.ip6_rev_zone)
def is_reverse(self):
return self.is_ip4_reverse() or self.is_ip6_reverse()
def is_empty(self):
return len(self.labels) == 0
#DNS public constants
DNSName.root = DNSName(dns.name.root) # '.'
DNSName.empty = DNSName(dns.name.empty) # '@'
DNSName.ip4_rev_zone = DNSName(('in-addr', 'arpa', ''))
DNSName.ip6_rev_zone = DNSName(('ip6', 'arpa', ''))
# Empty zones are defined in various RFCs. BIND is by default serving them.
# This constat should contain everything listed in
# IANA registry "Locally-Served DNS Zones"
# URL: http://www.iana.org/assignments/locally-served-dns-zones
# + AS112 zone defined in RFC 7534. It is not in the registry for some
# reason but BIND 9.10 is serving it as automatic empty zones.
EMPTY_ZONES = [DNSName(aez).make_absolute() for aez in [
# RFC 1918
"10.IN-ADDR.ARPA", "16.172.IN-ADDR.ARPA", "17.172.IN-ADDR.ARPA",
"18.172.IN-ADDR.ARPA", "19.172.IN-ADDR.ARPA", "20.172.IN-ADDR.ARPA",
"21.172.IN-ADDR.ARPA", "22.172.IN-ADDR.ARPA", "23.172.IN-ADDR.ARPA",
"24.172.IN-ADDR.ARPA", "25.172.IN-ADDR.ARPA", "26.172.IN-ADDR.ARPA",
"27.172.IN-ADDR.ARPA", "28.172.IN-ADDR.ARPA", "29.172.IN-ADDR.ARPA",
"30.172.IN-ADDR.ARPA", "31.172.IN-ADDR.ARPA", "168.192.IN-ADDR.ARPA",
# RFC 6598
"64.100.IN-ADDR.ARPA", "65.100.IN-ADDR.ARPA", "66.100.IN-ADDR.ARPA",
"67.100.IN-ADDR.ARPA", "68.100.IN-ADDR.ARPA", "69.100.IN-ADDR.ARPA",
"70.100.IN-ADDR.ARPA", "71.100.IN-ADDR.ARPA", "72.100.IN-ADDR.ARPA",
"73.100.IN-ADDR.ARPA", "74.100.IN-ADDR.ARPA", "75.100.IN-ADDR.ARPA",
"76.100.IN-ADDR.ARPA", "77.100.IN-ADDR.ARPA", "78.100.IN-ADDR.ARPA",
"79.100.IN-ADDR.ARPA", "80.100.IN-ADDR.ARPA", "81.100.IN-ADDR.ARPA",
"82.100.IN-ADDR.ARPA", "83.100.IN-ADDR.ARPA", "84.100.IN-ADDR.ARPA",
"85.100.IN-ADDR.ARPA", "86.100.IN-ADDR.ARPA", "87.100.IN-ADDR.ARPA",
"88.100.IN-ADDR.ARPA", "89.100.IN-ADDR.ARPA", "90.100.IN-ADDR.ARPA",
"91.100.IN-ADDR.ARPA", "92.100.IN-ADDR.ARPA", "93.100.IN-ADDR.ARPA",
"94.100.IN-ADDR.ARPA", "95.100.IN-ADDR.ARPA", "96.100.IN-ADDR.ARPA",
"97.100.IN-ADDR.ARPA", "98.100.IN-ADDR.ARPA", "99.100.IN-ADDR.ARPA",
"100.100.IN-ADDR.ARPA", "101.100.IN-ADDR.ARPA",
"102.100.IN-ADDR.ARPA", "103.100.IN-ADDR.ARPA",
"104.100.IN-ADDR.ARPA", "105.100.IN-ADDR.ARPA",
"106.100.IN-ADDR.ARPA", "107.100.IN-ADDR.ARPA",
"108.100.IN-ADDR.ARPA", "109.100.IN-ADDR.ARPA",
"110.100.IN-ADDR.ARPA", "111.100.IN-ADDR.ARPA",
"112.100.IN-ADDR.ARPA", "113.100.IN-ADDR.ARPA",
"114.100.IN-ADDR.ARPA", "115.100.IN-ADDR.ARPA",
"116.100.IN-ADDR.ARPA", "117.100.IN-ADDR.ARPA",
"118.100.IN-ADDR.ARPA", "119.100.IN-ADDR.ARPA",
"120.100.IN-ADDR.ARPA", "121.100.IN-ADDR.ARPA",
"122.100.IN-ADDR.ARPA", "123.100.IN-ADDR.ARPA",
"124.100.IN-ADDR.ARPA", "125.100.IN-ADDR.ARPA",
"126.100.IN-ADDR.ARPA", "127.100.IN-ADDR.ARPA",
# RFC 5735 and RFC 5737
"0.IN-ADDR.ARPA", "127.IN-ADDR.ARPA", "254.169.IN-ADDR.ARPA",
"2.0.192.IN-ADDR.ARPA", "100.51.198.IN-ADDR.ARPA",
"113.0.203.IN-ADDR.ARPA", "255.255.255.255.IN-ADDR.ARPA",
# Local IPv6 Unicast Addresses
"0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA",
"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA",
# LOCALLY ASSIGNED LOCAL ADDRESS SCOPE
"D.F.IP6.ARPA", "8.E.F.IP6.ARPA", "9.E.F.IP6.ARPA", "A.E.F.IP6.ARPA",
"B.E.F.IP6.ARPA",
# Example Prefix, RFC 3849.
"8.B.D.0.1.0.0.2.IP6.ARPA",
# RFC 7534
"EMPTY.AS112.ARPA",
]]
def assert_absolute_dnsname(name):
"""Raise AssertionError if name is not DNSName or is not absolute.
>>> assert_absolute_dnsname(DNSName('absolute.name.example.'))
>>> assert_absolute_dnsname(DNSName('relative.name.example'))
Traceback (most recent call last):
...
AssertionError: name must be absolute, ...
>>> assert_absolute_dnsname('absolute.string.example.')
Traceback (most recent call last):
...
AssertionError: name must be DNSName instance, ...
"""
assert isinstance(name, DNSName), ("name must be DNSName instance, "
"got '%s'" % type(name))
assert name.is_absolute(), "name must be absolute, got '%s'" % name
def is_auto_empty_zone(zone):
"""True if specified zone name exactly matches an automatic empty zone.
>>> is_auto_empty_zone(DNSName('in-addr.arpa.'))
False
>>> is_auto_empty_zone(DNSName('10.in-addr.arpa.'))
True
>>> is_auto_empty_zone(DNSName('1.10.in-addr.arpa.'))
False
>>> is_auto_empty_zone(DNSName('10.in-addr.arpa'))
Traceback (most recent call last):
...
AssertionError: ...
"""
assert_absolute_dnsname(zone)
return zone in EMPTY_ZONES
def inside_auto_empty_zone(name):
"""True if specified absolute name is a subdomain of an automatic empty
zone.
DNS domain is a subdomain of itself so this function
returns True for zone apexes, too.
>>> inside_auto_empty_zone(DNSName('in-addr.arpa.'))
False
>>> inside_auto_empty_zone(DNSName('10.in-addr.arpa.'))
True
>>> inside_auto_empty_zone(DNSName('1.10.in-addr.arpa.'))
True
>>> inside_auto_empty_zone(DNSName('1.10.in-addr.arpa'))
Traceback (most recent call last):
...
AssertionError: ...
"""
assert_absolute_dnsname(name)
for aez in EMPTY_ZONES:
if name.is_subdomain(aez):
return True
return False
def related_to_auto_empty_zone(name):
"""True if specified absolute name is a sub/superdomain of an automatic
empty zone.
DNS domain is a subdomain of itself so this function
returns True for zone apexes, too.
>>> related_to_auto_empty_zone(DNSName('.'))
True
>>> related_to_auto_empty_zone(DNSName('in-addr.arpa.'))
True
>>> related_to_auto_empty_zone(DNSName('10.in-addr.arpa.'))
True
>>> related_to_auto_empty_zone(DNSName('1.10.in-addr.arpa.'))
True
>>> related_to_auto_empty_zone(DNSName('unrelated.example.'))
False
>>> related_to_auto_empty_zone(DNSName('1.10.in-addr.arpa'))
Traceback (most recent call last):
...
AssertionError: ...
"""
assert_absolute_dnsname(name)
relations = {dns.name.NAMERELN_SUBDOMAIN,
dns.name.NAMERELN_EQUAL,
dns.name.NAMERELN_SUPERDOMAIN}
return any(name.fullcompare(aez)[0] in relations
for aez in EMPTY_ZONES)
def has_empty_zone_addresses(hostname):
"""Detect if given host is using IP address belonging to
an automatic empty zone.
Information from --ip-address option used in installed is lost by
the time when upgrade is run. Use IP addresses from DNS as best
approximation.
This is brain-dead and duplicates logic from DNS installer
but I did not find other way around.
"""
ip_addresses = resolve_ip_addresses(hostname)
return any(
inside_auto_empty_zone(DNSName(ip.reverse_dns))
for ip in ip_addresses
)
def resolve_rrsets(fqdn, rdtypes):
"""
Get Resource Record sets for given FQDN.
CNAME chain is followed during resolution
but CNAMEs are not returned in the resulting rrset.
:returns:
set of dns.rrset.RRset objects, can be empty
if the FQDN does not exist or if none of rrtypes exist
"""
# empty set of rdtypes would always return empty set of rrsets
assert rdtypes, "rdtypes must not be empty"
if not isinstance(fqdn, DNSName):
fqdn = DNSName(fqdn)
fqdn = fqdn.make_absolute()
rrsets = []
for rdtype in rdtypes:
try:
answer = dns.resolver.query(fqdn, rdtype)
logger.debug('found %d %s records for %s: %s',
len(answer),
rdtype,
fqdn,
' '.join(str(rr) for rr in answer))
rrsets.append(answer.rrset)
except dns.resolver.NXDOMAIN as ex:
logger.debug('%s', ex)
break # no such FQDN, do not iterate
except dns.resolver.NoAnswer as ex:
logger.debug('%s', ex) # record type does not exist for given FQDN
except dns.exception.DNSException as ex:
logger.error('DNS query for %s %s failed: %s', fqdn, rdtype, ex)
raise
return rrsets
def resolve_ip_addresses(fqdn):
"""Get IP addresses from DNS A/AAAA records for given host (using DNS).
:returns:
list of IP addresses as UnsafeIPAddress objects
"""
rrsets = resolve_rrsets(fqdn, ['A', 'AAAA'])
ip_addresses = set()
for rrset in rrsets:
ip_addresses.update({UnsafeIPAddress(ip) for ip in rrset})
return ip_addresses
def check_zone_overlap(zone, raise_on_error=True):
logger.info("Checking DNS domain %s, please wait ...", zone)
if not isinstance(zone, DNSName):
zone = DNSName(zone).make_absolute()
# automatic empty zones always exist so checking them is pointless,
# do not report them to avoid meaningless error messages
if is_auto_empty_zone(zone):
return
try:
containing_zone = dns.resolver.zone_for_name(zone)
except dns.exception.DNSException as e:
msg = ("DNS check for domain %s failed: %s." % (zone, e))
if raise_on_error:
raise ValueError(msg)
else:
logger.warning('%s', msg)
return
if containing_zone == zone:
try:
ns = [ans.to_text() for ans in dns.resolver.query(zone, 'NS')]
except dns.exception.DNSException as e:
logger.debug("Failed to resolve nameserver(s) for domain %s: %s",
zone, e)
ns = []
msg = u"DNS zone {0} already exists in DNS".format(zone)
if ns:
msg += u" and is handled by server(s): {0}".format(', '.join(ns))
raise ValueError(msg)

235
ipapython/dogtag.py Normal file
View File

@@ -0,0 +1,235 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2009 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import collections
import logging
import xml.dom.minidom
import six
# pylint: disable=import-error
from six.moves.urllib.parse import urlencode
# pylint: enable=import-error
# pylint: disable=ipa-forbidden-import
from ipalib import api, errors
from ipalib.util import create_https_connection
from ipalib.errors import NetworkError
from ipalib.text import _
# pylint: enable=ipa-forbidden-import
from ipapython import ipautil
# Python 3 rename. The package is available in "six.moves.http_client", but
# pylint cannot handle classes from that alias
try:
import httplib
except ImportError:
# pylint: disable=import-error
import http.client as httplib
if six.PY3:
unicode = str
logger = logging.getLogger(__name__)
Profile = collections.namedtuple('Profile', ['profile_id', 'description', 'store_issued'])
INCLUDED_PROFILES = {
Profile(u'caIPAserviceCert', u'Standard profile for network services', True),
Profile(u'IECUserRoles', u'User profile that includes IECUserRoles extension from request', True),
Profile(u'KDCs_PKINIT_Certs',
u'Profile for PKINIT support by KDCs',
False),
}
DEFAULT_PROFILE = u'caIPAserviceCert'
KDC_PROFILE = u'KDCs_PKINIT_Certs'
def error_from_xml(doc, message_template):
try:
item_node = doc.getElementsByTagName("Error")
reason = item_node[0].childNodes[0].data
return errors.RemoteRetrieveError(reason=reason)
except Exception as e:
return errors.RemoteRetrieveError(reason=message_template % e)
def get_ca_certchain(ca_host=None):
"""
Retrieve the CA Certificate chain from the configured Dogtag server.
"""
if ca_host is None:
ca_host = api.env.ca_host
chain = None
conn = httplib.HTTPConnection(
ca_host,
api.env.ca_install_port or 8080)
conn.request("GET", "/ca/ee/ca/getCertChain")
res = conn.getresponse()
doc = None
if res.status == 200:
data = res.read()
conn.close()
try:
doc = xml.dom.minidom.parseString(data)
try:
item_node = doc.getElementsByTagName("ChainBase64")
chain = item_node[0].childNodes[0].data
except IndexError:
raise error_from_xml(
doc, _("Retrieving CA cert chain failed: %s"))
finally:
if doc:
doc.unlink()
else:
raise errors.RemoteRetrieveError(
reason=_("request failed with HTTP status %d") % res.status)
return chain
def _parse_ca_status(body):
doc = xml.dom.minidom.parseString(body)
try:
item_node = doc.getElementsByTagName("XMLResponse")[0]
item_node = item_node.getElementsByTagName("Status")[0]
return item_node.childNodes[0].data
except IndexError:
raise error_from_xml(doc, _("Retrieving CA status failed: %s"))
def ca_status(ca_host=None):
"""Return the status of the CA, and the httpd proxy in front of it
The returned status can be:
- running
- starting
- Service Temporarily Unavailable
"""
if ca_host is None:
ca_host = api.env.ca_host
status, _headers, body = http_request(
ca_host, 8080, '/ca/admin/ca/getStatus',
# timeout: CA sometimes forgot to answer, we have to try again
timeout=api.env.http_timeout)
if status == 503:
# Service temporarily unavailable
return status
elif status != 200:
raise errors.RemoteRetrieveError(
reason=_("Retrieving CA status failed with status %d") % status)
return _parse_ca_status(body)
def https_request(
host, port, url, cafile, client_certfile, client_keyfile,
method='POST', headers=None, body=None, **kw):
"""
:param method: HTTP request method (defalut: 'POST')
:param url: The path (not complete URL!) to post to.
:param body: The request body (encodes kw if None)
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_headers, http_body)
as (integer, dict, str)
Perform a client authenticated HTTPS request
"""
def connection_factory(host, port):
return create_https_connection(
host, port,
cafile=cafile,
client_certfile=client_certfile,
client_keyfile=client_keyfile,
tls_version_min=api.env.tls_version_min,
tls_version_max=api.env.tls_version_max)
if body is None:
body = urlencode(kw)
return _httplib_request(
'https', host, port, url, connection_factory, body,
method=method, headers=headers)
def http_request(host, port, url, timeout=None, **kw):
"""
:param url: The path (not complete URL!) to post to.
:param timeout: Timeout in seconds for waiting for reply.
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_headers, http_body)
as (integer, dict, str)
Perform an HTTP request.
"""
body = urlencode(kw)
if timeout is None:
conn_opt = {}
else:
conn_opt = {"timeout": timeout}
return _httplib_request(
'http', host, port, url, httplib.HTTPConnection, body,
connection_options=conn_opt)
def _httplib_request(
protocol, host, port, path, connection_factory, request_body,
method='POST', headers=None, connection_options=None):
"""
:param request_body: Request body
:param connection_factory: Connection class to use. Will be called
with the host and port arguments.
:param method: HTTP request method (default: 'POST')
:param connection_options: a dictionary that will be passed to
connection_factory as keyword arguments.
Perform a HTTP(s) request.
"""
if connection_options is None:
connection_options = {}
uri = u'%s://%s%s' % (protocol, ipautil.format_netloc(host, port), path)
logger.debug('request %s %s', method, uri)
logger.debug('request body %r', request_body)
headers = headers or {}
if (
method == 'POST'
and 'content-type' not in (str(k).lower() for k in headers)
):
headers['content-type'] = 'application/x-www-form-urlencoded'
try:
conn = connection_factory(host, port, **connection_options)
conn.request(method, uri, body=request_body, headers=headers)
res = conn.getresponse()
http_status = res.status
http_headers = res.msg
http_body = res.read()
conn.close()
except Exception as e:
logger.debug("httplib request failed:", exc_info=True)
raise NetworkError(uri=uri, error=str(e))
logger.debug('response status %d', http_status)
logger.debug('response headers %s', http_headers)
logger.debug('response body %r', http_body)
return http_status, http_headers, http_body

47
ipapython/errors.py Normal file
View File

@@ -0,0 +1,47 @@
# Authors: Petr Viktorin <pviktori@redhat.com>
#
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
class SetseboolError(Exception):
"""Raised when setting a SELinux boolean fails
:param failed: Dictionary mapping boolean names to intended values
to their intended values, for booleans that cound not be set
:param command: Command the user can run to set the booleans
The initializer arguments are copied to attributes of the same name.
"""
def __init__(self, failed, command):
message = "Could not set SELinux booleans: %s" % ' '.join(
'%s=%s' % (name, value) for name, value in failed.items())
super(SetseboolError, self).__init__(message)
self.failed = failed
self.command = command
def format_service_warning(self, service_name):
"""Format warning for display when this is raised from service install
"""
return '\n'.join([
'WARNING: %(err)s',
'',
'The %(service)s may not function correctly until ',
'the booleans are successfully changed with the command:',
' %(cmd)s',
'Try updating the policycoreutils and selinux-policy packages.'
]) % {'err': self, 'service': service_name, 'cmd': self.command}

88
ipapython/graph.py Normal file
View File

@@ -0,0 +1,88 @@
#
# Copyright (C) 2015-2017 FreeIPA Contributors see COPYING for license
#
from collections import deque
class Graph(object):
"""
Simple oriented graph structure
G = (V, E) where G is graph, V set of vertices and E list of edges.
E = (tail, head) where tail and head are vertices
"""
def __init__(self):
self.vertices = set()
self.edges = []
self._adj = dict()
def add_vertex(self, vertex):
self.vertices.add(vertex)
self._adj[vertex] = []
def add_edge(self, tail, head):
if tail not in self.vertices:
raise ValueError("tail is not a vertex")
if head not in self.vertices:
raise ValueError("head is not a vertex")
self.edges.append((tail, head))
self._adj[tail].append(head)
def remove_edge(self, tail, head):
try:
self.edges.remove((tail, head))
except KeyError:
raise ValueError(
"graph does not contain edge: ({0}, {1})".format(tail, head)
)
self._adj[tail].remove(head)
def remove_vertex(self, vertex):
try:
self.vertices.remove(vertex)
except KeyError:
raise ValueError(
"graph does not contain vertex: {0}".format(vertex)
)
# delete _adjacencies
del self._adj[vertex]
for adj in self._adj.values():
adj[:] = [v for v in adj if v != vertex]
# delete edges
self.edges = [
e for e in self.edges if e[0] != vertex and e[1] != vertex
]
def get_tails(self, head):
"""
Get list of vertices where a vertex is on the right side of an edge
"""
return [e[0] for e in self.edges if e[1] == head]
def get_heads(self, tail):
"""
Get list of vertices where a vertex is on the left side of an edge
"""
return [e[1] for e in self.edges if e[0] == tail]
def bfs(self, start=None):
"""
Breadth-first search traversal of the graph from `start` vertex.
Return a set of all visited vertices
"""
if not start:
start = next(iter(self.vertices))
visited = set()
queue = deque([start])
while queue:
vertex = queue.popleft()
if vertex not in visited:
visited.add(vertex)
queue.extend(set(self._adj.get(vertex, [])) - visited)
return visited

View File

@@ -0,0 +1,7 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Installer framework.
"""

359
ipapython/install/cli.py Normal file
View File

@@ -0,0 +1,359 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Command line support.
"""
import collections
import enum
import logging
import optparse # pylint: disable=deprecated-module
import signal
import six
from ipapython import admintool
from ipapython.ipa_log_manager import standard_logging_setup
from ipapython.ipautil import (CheckedIPAddress, CheckedIPAddressLoopback,
private_ccache)
from . import core, common
__all__ = ['install_tool', 'uninstall_tool']
if six.PY3:
long = int
NoneType = type(None)
logger = logging.getLogger(__name__)
def _get_usage(configurable_class):
usage = '%prog [options]'
for owner_cls, name in configurable_class.knobs():
knob_cls = getattr(owner_cls, name)
if knob_cls.is_cli_positional():
if knob_cls.cli_metavar is not None:
metavar = knob_cls.cli_metavar
elif knob_cls.cli_names:
metavar = knob_cls.cli_names[0].upper()
else:
metavar = name.replace('_', '-').upper()
try:
knob_cls.default
except AttributeError:
fmt = ' {}'
else:
fmt = ' [{}]'
usage += fmt.format(metavar)
return usage
def install_tool(configurable_class, command_name, log_file_name,
debug_option=False, verbose=False, console_format=None,
use_private_ccache=True, uninstall_log_file_name=None):
if uninstall_log_file_name is not None:
uninstall_kwargs = dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=uninstall_log_file_name,
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
)
else:
uninstall_kwargs = None
return type(
'install_tool({0})'.format(configurable_class.__name__),
(InstallTool,),
dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=log_file_name,
usage=_get_usage(configurable_class),
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
uninstall_kwargs=uninstall_kwargs,
use_private_ccache=use_private_ccache,
)
)
def uninstall_tool(configurable_class, command_name, log_file_name,
debug_option=False, verbose=False, console_format=None):
return type(
'uninstall_tool({0})'.format(configurable_class.__name__),
(UninstallTool,),
dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=log_file_name,
usage=_get_usage(configurable_class),
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
)
)
class ConfigureTool(admintool.AdminTool):
configurable_class = None
debug_option = False
verbose = False
console_format = None
use_private_ccache = True
@staticmethod
def _transform(configurable_class):
raise NotImplementedError
@classmethod
def add_options(cls, parser, positional=False):
transformed_cls = cls._transform(cls.configurable_class)
if issubclass(transformed_cls, common.Interactive):
parser.add_option(
'-U', '--unattended',
dest='unattended',
default=False,
action='store_true',
help="unattended (un)installation never prompts the user",
)
groups = collections.OrderedDict()
# if no group is defined, add the option to the parser top level
groups[None] = parser
for owner_cls, name in transformed_cls.knobs():
knob_cls = getattr(owner_cls, name)
if knob_cls.is_cli_positional() is not positional:
continue
group_cls = knob_cls.group()
try:
opt_group = groups[group_cls]
except KeyError:
opt_group = groups[group_cls] = optparse.OptionGroup(
parser, "{0} options".format(group_cls.description))
parser.add_option_group(opt_group)
knob_type = knob_cls.type
if issubclass(knob_type, list):
try:
# typing.List[X].__parameters__ == (X,)
knob_scalar_type = knob_type.__parameters__[0]
except AttributeError:
knob_scalar_type = str
else:
knob_scalar_type = knob_type
kwargs = dict()
if knob_scalar_type is NoneType:
kwargs['type'] = None
kwargs['const'] = True
kwargs['default'] = False
elif knob_scalar_type is str:
kwargs['type'] = 'string'
elif knob_scalar_type is int:
kwargs['type'] = 'int'
elif knob_scalar_type is long:
kwargs['type'] = 'long'
elif knob_scalar_type is CheckedIPAddressLoopback:
kwargs['type'] = 'ip_with_loopback'
elif knob_scalar_type is CheckedIPAddress:
kwargs['type'] = 'ip'
elif issubclass(knob_scalar_type, enum.Enum):
kwargs['type'] = 'choice'
kwargs['choices'] = [i.value for i in knob_scalar_type]
kwargs['metavar'] = "{{{0}}}".format(
",".join(kwargs['choices']))
else:
kwargs['type'] = 'constructor'
kwargs['constructor'] = knob_scalar_type
kwargs['dest'] = name
if issubclass(knob_type, list):
if kwargs['type'] is None:
kwargs['action'] = 'append_const'
else:
kwargs['action'] = 'append'
else:
if kwargs['type'] is None:
kwargs['action'] = 'store_const'
else:
kwargs['action'] = 'store'
if knob_cls.sensitive:
kwargs['sensitive'] = True
if knob_cls.cli_metavar:
kwargs['metavar'] = knob_cls.cli_metavar
if not positional:
cli_info = (
(knob_cls.deprecated, knob_cls.cli_names),
(True, knob_cls.cli_deprecated_names),
)
else:
cli_info = (
(knob_cls.deprecated, (None,)),
)
for hidden, cli_names in cli_info:
opt_strs = []
for cli_name in cli_names:
if cli_name is None:
cli_name = '--{}'.format(name.replace('_', '-'))
opt_strs.append(cli_name)
if not opt_strs:
continue
if not hidden:
help = knob_cls.description
else:
help = optparse.SUPPRESS_HELP
opt_group.add_option(
*opt_strs,
help=help,
**kwargs
)
super(ConfigureTool, cls).add_options(parser,
debug_option=cls.debug_option)
def __init__(self, options, args):
super(ConfigureTool, self).__init__(options, args)
self.transformed_cls = self._transform(self.configurable_class)
self.positional_arguments = []
for owner_cls, name in self.transformed_cls.knobs():
knob_cls = getattr(owner_cls, name)
if knob_cls.is_cli_positional():
self.positional_arguments.append(name)
# fake option parser to parse positional arguments
# (because optparse does not support positional argument parsing)
fake_option_parser = optparse.OptionParser()
self.add_options(fake_option_parser, True)
fake_option_map = {option.dest: option
for group in fake_option_parser.option_groups
for option in group.option_list}
for index, name in enumerate(self.positional_arguments):
try:
value = self.args.pop(0)
except IndexError:
break
fake_option = fake_option_map[name]
fake_option.process('argument {}'.format(index + 1),
value,
self.options,
self.option_parser)
def validate_options(self, needs_root=True):
super(ConfigureTool, self).validate_options(needs_root=needs_root)
if self.args:
self.option_parser.error("Too many arguments provided")
def _setup_logging(self, log_file_mode='w', no_file=False):
if no_file:
log_file_name = None
elif self.options.log_file:
log_file_name = self.options.log_file
else:
log_file_name = self.log_file_name
standard_logging_setup(
log_file_name,
verbose=self.verbose,
debug=self.options.verbose,
console_format=self.console_format)
if log_file_name:
logger.debug('Logging to %s', log_file_name)
elif not no_file:
logger.debug('Not logging to a file')
def run(self):
kwargs = {}
transformed_cls = self._transform(self.configurable_class)
knob_classes = {n: getattr(c, n) for c, n in transformed_cls.knobs()}
for name in knob_classes:
value = getattr(self.options, name, None)
if value is not None:
kwargs[name] = value
if (issubclass(self.configurable_class, common.Interactive) and
not self.options.unattended):
kwargs['interactive'] = True
try:
cfgr = transformed_cls(**kwargs)
except core.KnobValueError as e:
knob_cls = knob_classes[e.name]
try:
index = self.positional_arguments.index(e.name)
except ValueError:
cli_name = knob_cls.cli_names[0] or e.name.replace('_', '-')
desc = "option {0}".format(cli_name)
else:
desc = "argument {0}".format(index + 1)
self.option_parser.error("{0}: {1}".format(desc, e))
except RuntimeError as e:
self.option_parser.error(str(e))
signal.signal(signal.SIGTERM, self.__signal_handler)
if self.use_private_ccache:
with private_ccache():
super(ConfigureTool, self).run()
cfgr.run()
else:
super(ConfigureTool, self).run()
cfgr.run()
@staticmethod
def __signal_handler(signum, frame):
raise KeyboardInterrupt
class InstallTool(ConfigureTool):
uninstall_kwargs = None
_transform = staticmethod(common.installer)
@classmethod
def add_options(cls, parser, positional=False):
super(InstallTool, cls).add_options(parser, positional)
if cls.uninstall_kwargs is not None:
parser.add_option(
'--uninstall',
dest='uninstall',
default=False,
action='store_true',
help=("uninstall an existing installation. The uninstall can "
"be run with --unattended option"),
)
@classmethod
def get_command_class(cls, options, args):
if cls.uninstall_kwargs is not None and options.uninstall:
uninstall_cls = uninstall_tool(**cls.uninstall_kwargs)
uninstall_cls.option_parser = cls.option_parser
return uninstall_cls
else:
return super(InstallTool, cls).get_command_class(options, args)
class UninstallTool(ConfigureTool):
_transform = staticmethod(common.uninstaller)

118
ipapython/install/common.py Normal file
View File

@@ -0,0 +1,118 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Common stuff.
"""
import logging
import traceback
from . import core
from .util import from_
__all__ = ['step', 'Installable', 'Interactive', 'Continuous', 'installer',
'uninstaller']
logger = logging.getLogger(__name__)
def step():
def decorator(func):
cls = core.Component(Step)
cls._installer = staticmethod(func)
return cls
return decorator
class Installable(core.Configurable):
"""
Configurable which does install or uninstall.
"""
uninstalling = core.Property(False)
def _get_components(self):
components = super(Installable, self)._get_components()
if self.uninstalling:
components = reversed(list(components))
return components
def _configure(self):
if self.uninstalling:
return self._uninstall()
else:
return self._install()
def _install(self):
assert not hasattr(super(Installable, self), '_install')
return super(Installable, self)._configure()
def _uninstall(self):
assert not hasattr(super(Installable, self), '_uninstall')
return super(Installable, self)._configure()
class Step(Installable):
@property
def parent(self):
raise AttributeError('parent')
def _install(self):
for _nothing in self._installer(self.parent):
yield from_(super(Step, self)._install())
@staticmethod
def _installer(obj):
yield
def _uninstall(self):
for _nothing in self._uninstaller(self.parent):
yield from_(super(Step, self)._uninstall())
@staticmethod
def _uninstaller(obj):
yield
@classmethod
def uninstaller(cls, func):
cls._uninstaller = staticmethod(func)
return cls
class Interactive(core.Configurable):
interactive = core.Property(False)
class Continuous(core.Configurable):
def _handle_execute_exception(self, exc_info):
try:
super(Continuous, self)._handle_execute_exception(exc_info)
except BaseException as e:
logger.debug("%s", traceback.format_exc())
if isinstance(e, Exception):
logger.error("%s", e)
def installer(cls):
class Installer(cls, Installable):
def __init__(self, **kwargs):
super(Installer, self).__init__(uninstalling=False,
**kwargs)
Installer.__name__ = 'installer({0})'.format(cls.__name__)
return Installer
def uninstaller(cls):
class Uninstaller(Continuous, cls, Installable):
def __init__(self, **kwargs):
super(Uninstaller, self).__init__(uninstalling=True,
**kwargs)
Uninstaller.__name__ = 'uninstaller({0})'.format(cls.__name__)
return Uninstaller

663
ipapython/install/core.py Normal file
View File

@@ -0,0 +1,663 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
The framework core.
"""
import abc
import collections
import functools
import itertools
import sys
import six
from . import util
from .util import from_
__all__ = ['InvalidStateError', 'KnobValueError', 'Property', 'knob',
'Configurable', 'group', 'Component', 'Composite']
NoneType = type(None)
builtin_type = type
# Configurable states
_VALIDATE_PENDING = 'VALIDATE_PENDING'
_VALIDATE_RUNNING = 'VALIDATE_RUNNING'
_EXECUTE_PENDING = 'EXECUTE_PENDING'
_EXECUTE_RUNNING = 'EXECUTE_RUNNING'
_STOPPED = 'STOPPED'
_FAILED = 'FAILED'
_CLOSED = 'CLOSED'
_missing = object()
_counter = itertools.count()
@functools.cmp_to_key
def _class_key(a, b):
if a is b:
return 0
elif issubclass(a, b):
return -1
elif issubclass(b, a):
return 1
else:
return 0
class InvalidStateError(Exception):
pass
class KnobValueError(ValueError):
def __init__(self, name, message):
super(KnobValueError, self).__init__(message)
self.name = name
class PropertyBase(six.with_metaclass(util.InnerClassMeta, object)):
# shut up pylint
__outer_class__ = None
__outer_name__ = None
_order = None
@property
def default(self):
raise AttributeError('default')
def __init__(self, outer):
pass
def __get__(self, obj, obj_type):
while obj is not None:
try:
return obj.__dict__[self.__outer_name__]
except KeyError:
pass
obj = obj._get_fallback()
try:
return self.default
except AttributeError:
raise AttributeError(self.__outer_name__)
def __set__(self, obj, value):
try:
obj.__dict__[self.__outer_name__] = value
except KeyError:
raise AttributeError(self.__outer_name__)
def __delete__(self, obj):
try:
del obj.__dict__[self.__outer_name__]
except KeyError:
raise AttributeError(self.__outer_name__)
def Property(default=_missing):
class_dict = {}
if default is not _missing:
class_dict['default'] = default
return util.InnerClassMeta('Property', (PropertyBase,), class_dict)
class KnobBase(PropertyBase):
type = None
sensitive = False
deprecated = False
description = None
cli_names = (None,)
cli_deprecated_names = ()
cli_metavar = None
def __init__(self, outer):
self.outer = outer
def validate(self, value):
pass
@classmethod
def group(cls):
return cls.__outer_class__.group()
@classmethod
def is_cli_positional(cls):
return all(n is not None and not n.startswith('-')
for n in cls.cli_names)
@classmethod
def default_getter(cls, func):
@property
def default(self):
return func(self.outer)
cls.default = default
return cls
@classmethod
def validator(cls, func):
def validate(self, value):
func(self.outer, value)
super(cls, self).validate(value)
cls.validate = validate
return cls
def _knob(type=_missing, default=_missing, bases=_missing, _order=_missing,
sensitive=_missing, deprecated=_missing, description=_missing,
group=_missing, cli_names=_missing, cli_deprecated_names=_missing,
cli_metavar=_missing):
if type is None:
type = NoneType
if bases is _missing:
bases = (KnobBase,)
elif isinstance(bases, builtin_type):
bases = (bases,)
if cli_names is None or isinstance(cli_names, str):
cli_names = (cli_names,)
elif cli_names is not _missing:
cli_names = tuple(cli_names)
if isinstance(cli_deprecated_names, str):
cli_deprecated_names = (cli_deprecated_names,)
elif cli_deprecated_names is not _missing:
cli_deprecated_names = tuple(cli_deprecated_names)
class_dict = {}
if type is not _missing:
class_dict['type'] = type
if default is not _missing:
class_dict['default'] = default
if _order is not _missing:
class_dict['_order'] = _order
if sensitive is not _missing:
class_dict['sensitive'] = sensitive
if deprecated is not _missing:
class_dict['deprecated'] = deprecated
if description is not _missing:
class_dict['description'] = description
if group is not _missing:
class_dict['group'] = group
if cli_names is not _missing:
class_dict['cli_names'] = cli_names
if cli_deprecated_names is not _missing:
class_dict['cli_deprecated_names'] = cli_deprecated_names
if cli_metavar is not _missing:
class_dict['cli_metavar'] = cli_metavar
return util.InnerClassMeta('Knob', bases, class_dict)
def knob(type, default=_missing, **kwargs):
"""
Define a new knob.
"""
return _knob(
type, default,
_order=next(_counter),
**kwargs
)
def extend_knob(base, default=_missing, bases=_missing, group=_missing,
**kwargs):
"""
Extend an existing knob.
"""
if bases is _missing:
bases = (base,)
if group is _missing:
group = staticmethod(base.group)
return _knob(
_missing, default,
bases=bases,
_order=_missing,
group=group,
**kwargs
)
class Configurable(six.with_metaclass(abc.ABCMeta, object)):
"""
Base class of all configurables.
FIXME: details of validate/execute, properties and knobs
"""
@classmethod
def properties(cls):
"""
Iterate over properties defined for the configurable.
"""
assert not hasattr(super(Configurable, cls), 'properties')
seen = set()
for owner_cls in cls.__mro__:
result = []
for name, prop_cls in owner_cls.__dict__.items():
if name in seen:
continue
seen.add(name)
if not isinstance(prop_cls, type):
continue
if not issubclass(prop_cls, PropertyBase):
continue
result.append((prop_cls._order, owner_cls, name))
result = sorted(result, key=lambda r: r[0])
for _order, owner_cls, name in result:
yield owner_cls, name
@classmethod
def knobs(cls):
for owner_cls, name in cls.properties():
prop_cls = getattr(owner_cls, name)
if issubclass(prop_cls, KnobBase):
yield owner_cls, name
@classmethod
def group(cls):
assert not hasattr(super(Configurable, cls), 'group')
return None
def __init__(self, **kwargs):
"""
Initialize the configurable.
"""
cls = self.__class__
for owner_cls, name in cls.properties():
if name.startswith('_'):
continue
prop_cls = getattr(owner_cls, name)
if not isinstance(prop_cls, type):
continue
if not issubclass(prop_cls, PropertyBase):
continue
try:
value = kwargs.pop(name)
except KeyError:
pass
else:
setattr(self, name, value)
for owner_cls, name in cls.knobs():
if name.startswith('_'):
continue
if not isinstance(self, owner_cls):
continue
value = getattr(self, name, None)
if value is None:
continue
prop_cls = getattr(owner_cls, name)
prop = prop_cls(self)
try:
prop.validate(value)
except KnobValueError:
raise
except ValueError as e:
raise KnobValueError(name, str(e))
if kwargs:
extra = sorted(kwargs)
raise TypeError(
"{0}() got {1} unexpected keyword arguments: {2}".format(
type(self).__name__,
len(extra),
', '.join(repr(name) for name in extra)))
self._reset()
def _reset(self):
assert not hasattr(super(Configurable, self), '_reset')
self.__state = _VALIDATE_PENDING
self.__gen = util.run_generator_with_yield_from(self._configure())
def _get_components(self):
assert not hasattr(super(Configurable, self), '_get_components')
raise TypeError("{0} is not composite".format(self))
def _get_fallback(self):
return None
@abc.abstractmethod
def _configure(self):
"""
Coroutine which defines the logic of the configurable.
"""
assert not hasattr(super(Configurable, self), '_configure')
self.__transition(_VALIDATE_RUNNING, _EXECUTE_PENDING)
while self.__state != _EXECUTE_RUNNING:
yield
def run(self):
"""
Run the configurable.
"""
self.validate()
if self.__state == _EXECUTE_PENDING:
self.execute()
def validate(self):
"""
Run the validation part of the configurable.
"""
for _nothing in self._validator():
pass
def _validator(self):
"""
Coroutine which runs the validation part of the configurable.
"""
return self.__runner(_VALIDATE_PENDING,
_VALIDATE_RUNNING,
self._handle_validate_exception)
def execute(self):
"""
Run the execution part of the configurable.
"""
for _nothing in self._executor():
pass
def _executor(self):
"""
Coroutine which runs the execution part of the configurable.
"""
return self.__runner(_EXECUTE_PENDING,
_EXECUTE_RUNNING,
self._handle_execute_exception)
def done(self):
"""
Return True if the configurable has finished.
"""
return self.__state in (_STOPPED, _FAILED, _CLOSED)
def run_until_executing(self, gen):
while self.__state != _EXECUTE_RUNNING:
try:
yield next(gen)
except StopIteration:
break
def __runner(self, pending_state, running_state, exc_handler):
self.__transition(pending_state, running_state)
step = lambda: next(self.__gen)
while True:
try:
step()
except StopIteration:
self.__transition(running_state, _STOPPED)
break
except GeneratorExit:
self.__transition(running_state, _CLOSED)
break
except BaseException:
exc_info = sys.exc_info()
try:
exc_handler(exc_info)
except BaseException:
self.__transition(running_state, _FAILED)
raise
if self.__state != running_state:
break
try:
yield
except BaseException:
exc_info = sys.exc_info()
step = lambda: self.__gen.throw(*exc_info)
else:
step = lambda: next(self.__gen)
def _handle_exception(self, exc_info):
assert not hasattr(super(Configurable, self), '_handle_exception')
six.reraise(*exc_info)
def _handle_validate_exception(self, exc_info):
assert not hasattr(super(Configurable, self),
'_handle_validate_exception')
self._handle_exception(exc_info)
def _handle_execute_exception(self, exc_info):
assert not hasattr(super(Configurable, self),
'_handle_execute_exception')
self._handle_exception(exc_info)
def __transition(self, from_state, to_state):
if self.__state != from_state:
raise InvalidStateError(self.__state)
self.__state = to_state
def group(cls):
def group():
return cls
cls.group = staticmethod(group)
return cls
class ComponentMeta(util.InnerClassMeta, abc.ABCMeta):
pass
class ComponentBase(six.with_metaclass(ComponentMeta, Configurable)):
# shut up pylint
__outer_class__ = None
__outer_name__ = None
_order = None
@classmethod
def group(cls):
result = super(ComponentBase, cls).group()
if result is not None:
return result
else:
return cls.__outer_class__.group()
def __init__(self, parent, **kwargs):
self.__parent = parent
super(ComponentBase, self).__init__(**kwargs)
@property
def parent(self):
return self.__parent
def __get__(self, obj, obj_type):
obj.__dict__[self.__outer_name__] = self
return self
def _get_fallback(self):
return self.__parent
def _handle_exception(self, exc_info):
try:
super(ComponentBase, self)._handle_exception(exc_info)
except BaseException:
exc_info = sys.exc_info()
self.__parent._handle_exception(exc_info)
def Component(cls):
class_dict = {}
class_dict['_order'] = next(_counter)
return ComponentMeta('Component', (ComponentBase, cls), class_dict)
class Composite(Configurable):
"""
Configurable composed of any number of components.
Provides knobs of all child components.
"""
@classmethod
def properties(cls):
name_dict = {}
owner_dict = collections.OrderedDict()
for owner_cls, name in super(Composite, cls).properties():
name_dict[name] = owner_cls
owner_dict.setdefault(owner_cls, []).append(name)
for owner_cls, name in cls.components():
comp_cls = getattr(cls, name)
for owner_cls, name in comp_cls.knobs():
if hasattr(cls, name):
continue
try:
last_owner_cls = name_dict[name]
except KeyError:
name_dict[name] = owner_cls
owner_dict.setdefault(owner_cls, []).append(name)
else:
knob_cls = getattr(owner_cls, name)
last_knob_cls = getattr(last_owner_cls, name)
if issubclass(knob_cls, last_knob_cls):
name_dict[name] = owner_cls
owner_dict[last_owner_cls].remove(name)
owner_dict.setdefault(owner_cls, [])
if name not in owner_dict[owner_cls]:
owner_dict[owner_cls].append(name)
elif not issubclass(last_knob_cls, knob_cls):
raise TypeError("{0}.knobs(): conflicting definitions "
"of '{1}' in {2} and {3}".format(
cls.__name__,
name,
last_owner_cls.__name__,
owner_cls.__name__))
for owner_cls in sorted(owner_dict, key=_class_key):
for name in owner_dict[owner_cls]:
yield owner_cls, name
@classmethod
def components(cls):
assert not hasattr(super(Composite, cls), 'components')
seen = set()
for owner_cls in cls.__mro__:
result = []
for name, comp_cls in owner_cls.__dict__.items():
if name in seen:
continue
seen.add(name)
if not isinstance(comp_cls, type):
continue
if not issubclass(comp_cls, ComponentBase):
continue
result.append((comp_cls._order, owner_cls, name))
result = sorted(result, key=lambda r: r[0])
for _order, owner_cls, name in result:
yield owner_cls, name
def __getattr__(self, name):
for owner_cls, knob_name in self.knobs():
if knob_name == name:
break
else:
raise AttributeError(name)
for component in self.__components:
if isinstance(component, owner_cls):
break
else:
raise AttributeError(name)
return getattr(component, name)
def _reset(self):
self.__components = list(self._get_components())
super(Composite, self)._reset()
def _get_components(self):
for _owner_cls, name in self.components():
yield getattr(self, name)
def _configure(self):
validate = [(c, c._validator()) for c in self.__components]
while True:
new_validate = []
for child, validator in validate:
try:
next(validator)
except StopIteration:
pass
else:
new_validate.append((child, validator))
if not new_validate:
break
validate = new_validate
yield
if not self.__components:
return
yield from_(super(Composite, self)._configure())
execute = [(c, c._executor()) for c in self.__components
if not c.done()]
while True:
new_execute = []
for child, executor in execute:
try:
next(executor)
except StopIteration:
pass
else:
new_execute.append((child, executor))
if not new_execute:
break
execute = new_execute
yield

View File

@@ -0,0 +1,34 @@
#
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
#
import weakref
import six
_cache = weakref.WeakValueDictionary()
class ListMeta(type):
def __getitem__(cls, key):
if not isinstance(key, type):
raise TypeError("Parameters to generic types must be types. "
"Got {!r}.".format(key))
t = ListMeta(
cls.__name__,
cls.__bases__,
{
'__parameters__': (key,),
'__init__': cls.__init__,
}
)
return _cache.get(key, t)
class List(six.with_metaclass(ListMeta, list)):
__parameters__ = ()
def __init__(self, *_args, **_kwargs):
raise TypeError("Type List cannot be instantiated; use list() instead")

165
ipapython/install/util.py Normal file
View File

@@ -0,0 +1,165 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Utilities.
"""
import sys
import six
class from_(object):
"""
Wrapper for delegating to a subgenerator.
See `run_generator_with_yield_from`.
"""
__slots__ = ('obj',)
def __init__(self, obj):
self.obj = obj
def run_generator_with_yield_from(gen):
"""
Iterate over a generator object with subgenerator delegation.
This implements Python 3's ``yield from`` expressions, using Python 2
syntax:
>>> def subgen():
... yield 'B'
... yield 'C'
...
>>> def gen():
... yield 'A'
... yield from_(subgen())
... yield 'D'
...
>>> list(run_generator_with_yield_from(gen()))
['A', 'B', 'C', 'D']
Returning value from a subgenerator is not supported.
"""
exc_info = None
value = None
stack = [gen]
while stack:
prev_exc_info, exc_info = exc_info, None
prev_value, value = value, None
gen = stack[-1]
try:
if prev_exc_info is None:
value = gen.send(prev_value)
else:
value = gen.throw(*prev_exc_info)
except StopIteration:
stack.pop()
continue
except BaseException:
exc_info = sys.exc_info()
stack.pop()
continue
else:
if isinstance(value, from_):
stack.append(value.obj)
value = None
continue
try:
value = (yield value)
except BaseException:
exc_info = sys.exc_info()
if exc_info is not None:
six.reraise(*exc_info)
class InnerClassMeta(type):
# pylint: disable=no-value-for-parameter
def __new__(mcs, name, bases, class_dict):
class_dict.pop('__outer_class__', None)
class_dict.pop('__outer_name__', None)
return super(InnerClassMeta, mcs).__new__(mcs, name, bases, class_dict)
def __get__(cls, obj, obj_type):
outer_class, outer_name = cls.__bind(obj_type)
if obj is None:
return cls
assert isinstance(obj, outer_class)
try:
return obj.__dict__[outer_name]
except KeyError:
inner = cls(obj)
try:
getter = inner.__get__
except AttributeError:
return inner
else:
return getter(obj, obj_type)
def __set__(cls, obj, value):
outer_class, outer_name = cls.__bind(obj.__class__)
assert isinstance(obj, outer_class)
inner = cls(obj)
try:
setter = inner.__set__
except AttributeError:
try:
inner.__delete__
except AttributeError:
obj.__dict__[outer_name] = value
else:
raise AttributeError('__set__')
else:
setter(obj, value)
def __delete__(cls, obj):
outer_class, outer_name = cls.__bind(obj.__class__)
assert isinstance(obj, outer_class)
inner = cls(obj)
try:
deleter = inner.__delete__
except AttributeError:
try:
inner.__set__
except AttributeError:
try:
del obj.__dict__[outer_name]
except KeyError:
raise AttributeError(outer_name)
else:
raise AttributeError('__delete__')
else:
deleter(obj)
def __bind(cls, obj_type):
try:
outer_class = cls.__dict__['__outer_class__']
name = cls.__dict__['__outer_name__']
except KeyError:
outer_class, name, value = None, None, None
for outer_class in obj_type.__mro__:
for name, value in six.iteritems(outer_class.__dict__):
if value is cls:
break
if value is cls:
break
assert value is cls
cls.__outer_class__ = outer_class
cls.__outer_name__ = name
cls.__name__ = '.'.join((outer_class.__name__, name))
cls.__qualname__ = cls.__name__
return outer_class, name

View File

@@ -0,0 +1,206 @@
# Authors: John Dennis <jdennis@redhat.com>
#
# Copyright (C) 2011 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import re
import time
import warnings
import sys
import six
# Module exports
__all__ = ['log_mgr', 'root_logger', 'standard_logging_setup',
'ISO8601_UTC_DATETIME_FMT',
'LOGGING_FORMAT_STDERR', 'LOGGING_FORMAT_STDOUT', 'LOGGING_FORMAT_FILE']
# Format string for time.strftime() to produce a ISO 8601 date time
# formatted string in the UTC time zone.
ISO8601_UTC_DATETIME_FMT = '%Y-%m-%dT%H:%M:%SZ'
# Logging format string for use with logging stderr handlers
LOGGING_FORMAT_STDERR = 'ipa: %(levelname)s: %(message)s'
# Logging format string for use with logging stdout handlers
LOGGING_FORMAT_STDOUT = '[%(asctime)s %(name)s] <%(levelname)s>: %(message)s'
# Logging format string for use with logging file handlers
LOGGING_FORMAT_FILE = '\t'.join([
'%(asctime)s',
'%(process)d',
'%(threadName)s',
'%(name)s',
'%(levelname)s',
'%(message)s',
])
# Used by standard_logging_setup() for console message
LOGGING_FORMAT_STANDARD_CONSOLE = '%(name)-12s: %(levelname)-8s %(message)s'
# Used by standard_logging_setup() for file message
LOGGING_FORMAT_STANDARD_FILE = '%(asctime)s %(levelname)s %(message)s'
class _DeprecatedLogger(object):
def __init__(self, logger, name):
self._logger = logger
self._name = name
def _warn(self):
warnings.warn(
"{} is deprecated, use a module-level logger".format(self._name),
DeprecationWarning)
def debug(self, *args, **kwargs):
self._warn()
self._logger.debug(*args, **kwargs)
def info(self, *args, **kwargs):
self._warn()
self._logger.info(*args, **kwargs)
def warning(self, *args, **kwargs):
self._warn()
self._logger.warning(*args, **kwargs)
def error(self, *args, **kwargs):
self._warn()
self._logger.error(*args, **kwargs)
def critical(self, *args, **kwargs):
self._warn()
self._logger.critical(*args, **kwargs)
def exception(self, *args, **kwargs):
self._warn()
self._logger.exception(*args, **kwargs)
def get_logger(who, bind_logger_names=False):
if isinstance(who, six.string_types):
warnings.warn(
"{}.log_mgr.get_logger is deprecated, use "
"logging.getLogger".format(__name__),
DeprecationWarning)
logger_name = who
else:
caller_globals = sys._getframe(1).f_globals
logger_name = caller_globals.get('__name__', '__main__')
if logger_name == '__main__':
logger_name = caller_globals.get('__file__', logger_name)
logger_name = os.path.basename(logger_name)
logger = logging.getLogger(logger_name)
if not isinstance(who, six.string_types):
obj_name = '%s.%s' % (who.__module__, who.__class__.__name__)
logger = _DeprecatedLogger(logger, obj_name)
if bind_logger_names:
method = 'log'
if hasattr(who, method):
raise ValueError('%s is already bound to %s' % (method, repr(who)))
setattr(who, method, logger)
for method in ('debug',
'info',
'warning',
'error',
'exception',
'critical'):
if hasattr(who, method):
raise ValueError(
'%s is already bound to %s' % (method, repr(who)))
setattr(who, method, getattr(logger, method))
return logger
class Filter(object):
def __init__(self, regexp, level):
self.regexp = re.compile(regexp)
self.level = level
def filter(self, record):
return (not self.regexp.match(record.name) or
record.levelno >= self.level)
class Formatter(logging.Formatter):
def __init__(
self, fmt=LOGGING_FORMAT_STDOUT, datefmt=ISO8601_UTC_DATETIME_FMT):
super(Formatter, self).__init__(fmt, datefmt)
self.converter = time.gmtime
def standard_logging_setup(filename=None, verbose=False, debug=False,
filemode='w', console_format=None):
if console_format is None:
console_format = LOGGING_FORMAT_STANDARD_CONSOLE
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# File output is always logged at debug level
if filename is not None:
umask = os.umask(0o177)
try:
file_handler = logging.FileHandler(filename, mode=filemode)
finally:
os.umask(umask)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(Formatter(LOGGING_FORMAT_STANDARD_FILE))
root_logger.addHandler(file_handler)
level = logging.ERROR
if verbose:
level = logging.INFO
if debug:
level = logging.DEBUG
console_handler = logging.StreamHandler()
console_handler.setLevel(level)
console_handler.setFormatter(Formatter(console_format))
root_logger.addHandler(console_handler)
def convert_log_level(value):
try:
level = int(value)
except ValueError:
try:
level = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warn': logging.WARNING,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL
}[value.lower()]
except KeyError:
raise ValueError('unknown log level (%s)' % value)
return level
# Single shared instance of log manager
log_mgr = sys.modules[__name__]
root_logger = _DeprecatedLogger(logging.getLogger(),
'{}.log_mgr.root_logger'.format(__name__))

1623
ipapython/ipaldap.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,31 @@
Metadata-Version: 1.2
Name: ipapython
Version: 4.6.2
Summary: FreeIPA python support library
Home-page: http://www.freeipa.org/
Author: FreeIPA Developers
Author-email: freeipa-devel@redhat.com
License: GPLv3
Download-URL: http://www.freeipa.org/page/Downloads
Description: FreeIPA python support library
Platform: Linux
Platform: Solaris
Platform: Unix
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Classifier: Programming Language :: C
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.5
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Classifier: Operating System :: POSIX :: Linux
Classifier: Operating System :: Unix
Classifier: Topic :: Internet :: Name Service (DNS)
Classifier: Topic :: Security
Classifier: Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP
Requires-Python: >=2.7.5,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*

View File

@@ -0,0 +1,34 @@
README
__init__.py
admintool.py
certdb.py
config.py
cookie.py
dn.py
dnsutil.py
dogtag.py
errors.py
graph.py
ipa_log_manager.py
ipaldap.py
ipautil.py
ipavalidate.py
kerberos.py
kernel_keyring.py
nsslib.py
session_storage.py
setup.cfg
setup.py
ssh.py
version.py
install/__init__.py
install/cli.py
install/common.py
install/core.py
install/typing.py
install/util.py
ipapython.egg-info/PKG-INFO
ipapython.egg-info/SOURCES.txt
ipapython.egg-info/dependency_links.txt
ipapython.egg-info/requires.txt
ipapython.egg-info/top_level.txt

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,15 @@
cffi
cryptography>=1.6
dnspython>=1.15
gssapi>=1.2.0
ipaplatform==4.6.2
netaddr
netifaces>=0.10.4
python-ldap>=3.0.0b1
six
[:python_version<'3']
enum34
[install]
dbus-python

View File

@@ -0,0 +1 @@
ipapython

1494
ipapython/ipautil.py Normal file

File diff suppressed because it is too large Load Diff

137
ipapython/ipavalidate.py Normal file
View File

@@ -0,0 +1,137 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import re
def Email(mail, notEmpty=True):
"""Do some basic validation of an e-mail address.
Return True if ok
Return False if not
If notEmpty is True the this will return an error if the field
is "" or None.
"""
usernameRE = re.compile(r"^[^ \t\n\r@<>()]+$", re.I)
domainRE = re.compile(r"^[a-z0-9][a-z0-9\.\-_]*\.[a-z]+$", re.I)
if not mail or mail is None:
if notEmpty is True:
return False
else:
return True
mail = mail.strip()
s = mail.split('@', 1)
try:
username, domain=s
except ValueError:
return False
if not usernameRE.search(username):
return False
if not domainRE.search(domain):
return False
return True
def Plain(text, notEmpty=False, allowSpaces=True):
"""Do some basic validation of a plain text field
Return True if ok
Return False if not
If notEmpty is True the this will return an error if the field
is "" or None.
"""
if (text is None) or (not text.strip()):
if notEmpty is True:
return False
else:
return True
if allowSpaces:
textRE = re.compile(r"^[a-zA-Z_\-0-9\'\ ]*$")
else:
textRE = re.compile(r"^[a-zA-Z_\-0-9\']*$")
if not textRE.search(text):
return False
return True
def String(text, notEmpty=False):
"""A string type. This is much looser in what it allows than plain"""
if text is None or not text.strip():
if notEmpty is True:
return False
else:
return True
return True
def Path(text, notEmpty=False):
"""Do some basic validation of a path
Return True if ok
Return False if not
If notEmpty is True the this will return an error if the field
is "" or None.
"""
textRE = re.compile(r"^[a-zA-Z_\-0-9\\ \.\/\\:]*$")
if not text and notEmpty is True:
return False
if text is None:
if notEmpty is True:
return False
else:
return True
if not textRE.search(text):
return False
return True
def GoodName(text, notEmpty=False):
"""From shadow-utils:
User/group names must match gnu e-regex:
[a-zA-Z0-9_.][a-zA-Z0-9_.-]{0,30}[a-zA-Z0-9_.$-]?
as a non-POSIX, extension, allow "$" as the last char for
sake of Samba 3.x "add machine script"
Return True if ok
Return False if not
"""
textRE = re.compile(r"^[a-zA-Z0-9_.][a-zA-Z0-9_.-]{0,30}[a-zA-Z0-9_.$-]?$")
if not text and notEmpty is True:
return False
if text is None:
if notEmpty is True:
return False
else:
return True
m = textRE.match(text)
if not m or text != m.group(0):
return False
return True

204
ipapython/kerberos.py Normal file
View File

@@ -0,0 +1,204 @@
#
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
#
"""
classes/utils for Kerberos principal name validation/manipulation
"""
import re
import six
from ipapython.ipautil import escape_seq, unescape_seq
if six.PY3:
unicode = str
REALM_SPLIT_RE = re.compile(r'(?<!\\)@')
COMPONENT_SPLIT_RE = re.compile(r'(?<!\\)/')
def parse_princ_name_and_realm(principal, realm=None):
"""
split principal to the <principal_name>, <realm> components
:param principal: unicode representation of principal
:param realm: if not None, replace the parsed realm with the specified one
:returns: tuple containing the principal name and realm
realm will be `None` if no realm was found in the input string
"""
realm_and_name = REALM_SPLIT_RE.split(principal)
if len(realm_and_name) > 2:
raise ValueError(
"Principal is not in <name>@<realm> format")
principal_name = realm_and_name[0]
try:
parsed_realm = realm_and_name[1]
except IndexError:
parsed_realm = None if realm is None else realm
return principal_name, parsed_realm
def split_principal_name(principal_name):
"""
Split principal name (without realm) into the components
NOTE: operates on the following RFC 1510 types:
* NT-PRINCIPAL
* NT-SRV-INST
* NT-SRV-HST
Enterprise principals (NT-ENTERPRISE, see RFC 6806) are also handled
:param principal_name: unicode representation of principal name
:returns: tuple of individual components (i.e. primary name for
NT-PRINCIPAL and NT-ENTERPRISE, primary name and instance for others)
"""
return tuple(COMPONENT_SPLIT_RE.split(principal_name))
@six.python_2_unicode_compatible
class Principal(object):
"""
Container for the principal name and realm according to RFC 1510
"""
def __init__(self, components, realm=None):
if isinstance(components, six.binary_type):
raise TypeError(
"Cannot create a principal object from bytes: {!r}".format(
components)
)
elif isinstance(components, six.string_types):
# parse principal components from realm
self.components, self.realm = self._parse_from_text(
components, realm)
elif isinstance(components, Principal):
self.components = components.components
self.realm = components.realm if realm is None else realm
else:
self.components = tuple(components)
self.realm = realm
def __eq__(self, other):
if not isinstance(other, Principal):
return False
return (self.components == other.components and
self.realm == other.realm)
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return unicode(self) < unicode(other)
def __le__(self, other):
return self.__lt__(other) or self.__eq__(other)
def __gt__(self, other):
return not self.__le__(other)
def __ge__(self, other):
return self.__gt__(other) or self.__eq__(other)
def __hash__(self):
return hash(self.components + (self.realm,))
def _parse_from_text(self, principal, realm=None):
"""
parse individual principal name components from the string
representation of the principal. This is done in three steps:
1.) split the string at the unescaped '@'
2.) unescape any leftover '\@' sequences
3.) split the primary at the unescaped '/'
4.) unescape leftover '\/'
:param principal: unicode representation of the principal name
:param realm: if not None, this realm name will be used instead of the
one parsed from `principal`
:returns: tuple containing the principal name components and realm
"""
principal_name, parsed_realm = parse_princ_name_and_realm(
principal, realm=realm)
(principal_name,) = unescape_seq(u'@', principal_name)
if parsed_realm is not None:
(parsed_realm,) = unescape_seq(u'@', parsed_realm)
name_components = split_principal_name(principal_name)
name_components = unescape_seq(u'/', *name_components)
return name_components, parsed_realm
@property
def is_user(self):
return len(self.components) == 1
@property
def is_enterprise(self):
return self.is_user and u'@' in self.components[0]
@property
def is_service(self):
return len(self.components) > 1
@property
def is_host(self):
return (self.is_service and len(self.components) == 2 and
self.components[0] == u'host')
@property
def username(self):
if self.is_user:
return self.components[0]
else:
raise ValueError(
"User name is defined only for user and enterprise principals")
@property
def upn_suffix(self):
if not self.is_enterprise:
raise ValueError("Only enterprise principals have UPN suffix")
return self.components[0].split(u'@')[1]
@property
def hostname(self):
if not (self.is_host or self.is_service):
raise ValueError(
"hostname is defined for host and service principals")
return self.components[-1]
@property
def service_name(self):
if not self.is_service:
raise ValueError(
"Only service principals have meaningful service name")
return u'/'.join(c for c in escape_seq('/', *self.components[:-1]))
def __str__(self):
"""
return the unicode representation of principal
works in reverse of the `from_text` class method
"""
name_components = escape_seq(u'/', *self.components)
name_components = escape_seq(u'@', *name_components)
principal_string = u'/'.join(name_components)
if self.realm is not None:
(realm,) = escape_seq(u'@', self.realm)
principal_string = u'@'.join([principal_string, realm])
return principal_string
def __repr__(self):
return "{0.__module__}.{0.__name__}('{1}')".format(
self.__class__, self)

136
ipapython/kernel_keyring.py Normal file
View File

@@ -0,0 +1,136 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2012 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import six
from ipapython.ipautil import run
# NOTE: Absolute path not required for keyctl since we reset the environment
# in ipautil.run.
# Use the session keyring so the same user can have a different principal
# in different shells. This was explicitly chosen over @us because then
# it is not possible to use KRB5CCNAME to have a different user principal.
# The same session would always be used and the first principal would
# always win.
KEYRING = '@s'
KEYTYPE = 'user'
def dump_keys():
"""
Dump all keys
"""
result = run(['keyctl', 'list', KEYRING], raiseonerr=False,
capture_output=True)
return result.output
def get_real_key(key):
"""
One cannot request a key based on the description it was created with
so find the one we're looking for.
"""
assert isinstance(key, six.string_types)
result = run(['keyctl', 'search', KEYRING, KEYTYPE, key],
raiseonerr=False, capture_output=True)
if result.returncode:
raise ValueError('key %s not found' % key)
return result.raw_output.rstrip()
def get_persistent_key(key):
assert isinstance(key, six.string_types)
result = run(['keyctl', 'get_persistent', KEYRING, key],
raiseonerr=False, capture_output=True)
if result.returncode:
raise ValueError('persistent key %s not found' % key)
return result.raw_output.rstrip()
def is_persistent_keyring_supported():
uid = os.geteuid()
try:
get_persistent_key(str(uid))
except ValueError:
return False
return True
def has_key(key):
"""
Returns True/False whether the key exists in the keyring.
"""
assert isinstance(key, six.string_types)
try:
get_real_key(key)
return True
except ValueError:
return False
def read_key(key):
"""
Read the keyring and return the value for key.
Use pipe instead of print here to ensure we always get the raw data.
"""
assert isinstance(key, six.string_types)
real_key = get_real_key(key)
result = run(['keyctl', 'pipe', real_key], raiseonerr=False,
capture_output=True)
if result.returncode:
raise ValueError('keyctl pipe failed: %s' % result.error_log)
return result.raw_output
def update_key(key, value):
"""
Update the keyring data. If they key doesn't exist it is created.
"""
assert isinstance(key, six.string_types)
assert isinstance(value, bytes)
if has_key(key):
real_key = get_real_key(key)
result = run(['keyctl', 'pupdate', real_key], stdin=value,
raiseonerr=False)
if result.returncode:
raise ValueError('keyctl pupdate failed: %s' % result.error_log)
else:
add_key(key, value)
def add_key(key, value):
"""
Add a key to the kernel keyring.
"""
assert isinstance(key, six.string_types)
assert isinstance(value, bytes)
if has_key(key):
raise ValueError('key %s already exists' % key)
result = run(['keyctl', 'padd', KEYTYPE, key, KEYRING],
stdin=value, raiseonerr=False)
if result.returncode:
raise ValueError('keyctl padd failed: %s' % result.error_log)
def del_key(key):
"""
Remove a key from the keyring
"""
assert isinstance(key, six.string_types)
real_key = get_real_key(key)
result = run(['keyctl', 'unlink', real_key, KEYRING],
raiseonerr=False)
if result.returncode:
raise ValueError('keyctl unlink failed: %s' % result.error_log)

0
ipapython/nsslib.py Normal file
View File

View File

@@ -0,0 +1,392 @@
#
# Copyright (C) 2017 FreeIPA Contributors see COPYING for license
#
import ctypes
import sys
KRB5_CC_NOSUPP = -1765328137
if sys.platform == 'darwin':
LIBKRB5_FILENAME = 'libkrb5.dylib'
else:
LIBKRB5_FILENAME = 'libkrb5.so.3'
try:
LIBKRB5 = ctypes.CDLL(LIBKRB5_FILENAME)
except OSError as e: # pragma: no cover
raise ImportError(str(e))
krb5_int32 = ctypes.c_int32
krb5_error_code = krb5_int32
krb5_magic = krb5_error_code
krb5_enctype = krb5_int32
krb5_octet = ctypes.c_uint8
krb5_timestamp = krb5_int32
class _krb5_context(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_context"""
_fields_ = []
class _krb5_ccache(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_ccache"""
_fields_ = []
class _krb5_data(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_data"""
_fields_ = [
("magic", krb5_magic),
("length", ctypes.c_uint),
("data", ctypes.c_char_p),
]
class krb5_principal_data(ctypes.Structure): # noqa
"""krb5/krb5.h struct krb5_principal_data"""
_fields_ = []
class _krb5_keyblock(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_keyblock"""
_fields_ = [
("magic", krb5_magic),
("enctype", krb5_enctype),
("length", ctypes.c_uint),
("contents", ctypes.POINTER(krb5_octet))
]
class _krb5_ticket_times(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_ticket_times"""
_fields_ = [
("authtime", krb5_timestamp),
("starttime", krb5_timestamp),
("endtime", krb5_timestamp),
("renew_till", krb5_timestamp),
]
class _krb5_address(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_address"""
_fields_ = []
class _krb5_authdata(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_authdata"""
_fields_ = []
krb5_principal = ctypes.POINTER(krb5_principal_data)
krb5_keyblock = _krb5_keyblock
krb5_ticket_times = _krb5_ticket_times
krb5_boolean = ctypes.c_uint
krb5_flags = krb5_int32
krb5_data = _krb5_data
krb5_address_p = ctypes.POINTER(_krb5_address)
krb5_authdata_p = ctypes.POINTER(_krb5_authdata)
class _krb5_creds(ctypes.Structure): # noqa
"""krb5/krb5.h struct _krb5_creds"""
_fields_ = [
("magic", krb5_magic),
("client", krb5_principal),
("server", krb5_principal),
("keyblock", krb5_keyblock),
("times", krb5_ticket_times),
("is_skey", krb5_boolean),
("ticket_flags", krb5_flags),
("addresses", ctypes.POINTER(krb5_address_p)),
("ticket", krb5_data),
("second_ticket", krb5_data),
("authdata", ctypes.POINTER(krb5_authdata_p))
]
class KRB5Error(Exception):
pass
def krb5_errcheck(result, func, arguments):
"""Error checker for krb5_error return value"""
if result != 0:
raise KRB5Error(result, func.__name__, arguments)
krb5_context = ctypes.POINTER(_krb5_context)
krb5_ccache = ctypes.POINTER(_krb5_ccache)
krb5_data_p = ctypes.POINTER(_krb5_data)
krb5_error = ctypes.c_int32
krb5_creds = _krb5_creds
krb5_pointer = ctypes.c_void_p
krb5_cc_cursor = krb5_pointer
krb5_init_context = LIBKRB5.krb5_init_context
krb5_init_context.argtypes = (ctypes.POINTER(krb5_context), )
krb5_init_context.restype = krb5_error
krb5_init_context.errcheck = krb5_errcheck
krb5_free_context = LIBKRB5.krb5_free_context
krb5_free_context.argtypes = (krb5_context, )
krb5_free_context.restype = None
krb5_free_principal = LIBKRB5.krb5_free_principal
krb5_free_principal.argtypes = (krb5_context, krb5_principal)
krb5_free_principal.restype = None
krb5_free_data_contents = LIBKRB5.krb5_free_data_contents
krb5_free_data_contents.argtypes = (krb5_context, krb5_data_p)
krb5_free_data_contents.restype = None
krb5_cc_default = LIBKRB5.krb5_cc_default
krb5_cc_default.argtypes = (krb5_context, ctypes.POINTER(krb5_ccache), )
krb5_cc_default.restype = krb5_error
krb5_cc_default.errcheck = krb5_errcheck
krb5_cc_close = LIBKRB5.krb5_cc_close
krb5_cc_close.argtypes = (krb5_context, krb5_ccache, )
krb5_cc_close.restype = krb5_error
krb5_cc_close.errcheck = krb5_errcheck
krb5_parse_name = LIBKRB5.krb5_parse_name
krb5_parse_name.argtypes = (krb5_context, ctypes.c_char_p,
ctypes.POINTER(krb5_principal), )
krb5_parse_name.restype = krb5_error
krb5_parse_name.errcheck = krb5_errcheck
krb5_cc_set_config = LIBKRB5.krb5_cc_set_config
krb5_cc_set_config.argtypes = (krb5_context, krb5_ccache, krb5_principal,
ctypes.c_char_p, krb5_data_p, )
krb5_cc_set_config.restype = krb5_error
krb5_cc_set_config.errcheck = krb5_errcheck
krb5_cc_get_principal = LIBKRB5.krb5_cc_get_principal
krb5_cc_get_principal.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_principal), )
krb5_cc_get_principal.restype = krb5_error
krb5_cc_get_principal.errcheck = krb5_errcheck
# krb5_build_principal is a variadic function but that can't be expressed
# in a ctypes argtypes definition, so I explicitly listed the number of
# arguments we actually use through the code for type checking purposes
krb5_build_principal = LIBKRB5.krb5_build_principal
krb5_build_principal.argtypes = (krb5_context, ctypes.POINTER(krb5_principal),
ctypes.c_uint, ctypes.c_char_p,
ctypes.c_char_p, ctypes.c_char_p,
ctypes.c_char_p, ctypes.c_char_p, )
krb5_build_principal.restype = krb5_error
krb5_build_principal.errcheck = krb5_errcheck
krb5_cc_start_seq_get = LIBKRB5.krb5_cc_start_seq_get
krb5_cc_start_seq_get.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_cc_cursor), )
krb5_cc_start_seq_get.restype = krb5_error
krb5_cc_start_seq_get.errcheck = krb5_errcheck
krb5_cc_next_cred = LIBKRB5.krb5_cc_next_cred
krb5_cc_next_cred.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_cc_cursor),
ctypes.POINTER(krb5_creds), )
krb5_cc_next_cred.restype = krb5_error
krb5_cc_next_cred.errcheck = krb5_errcheck
krb5_cc_end_seq_get = LIBKRB5.krb5_cc_end_seq_get
krb5_cc_end_seq_get.argtypes = (krb5_context, krb5_ccache,
ctypes.POINTER(krb5_cc_cursor), )
krb5_cc_end_seq_get.restype = krb5_error
krb5_cc_end_seq_get.errcheck = krb5_errcheck
krb5_free_cred_contents = LIBKRB5.krb5_free_cred_contents
krb5_free_cred_contents.argtypes = (krb5_context, ctypes.POINTER(krb5_creds))
krb5_free_cred_contents.restype = krb5_error
krb5_free_cred_contents.errcheck = krb5_errcheck
krb5_principal_compare = LIBKRB5.krb5_principal_compare
krb5_principal_compare.argtypes = (krb5_context, krb5_principal,
krb5_principal, )
krb5_principal_compare.restype = krb5_boolean
krb5_unparse_name = LIBKRB5.krb5_unparse_name
krb5_unparse_name.argtypes = (krb5_context, krb5_principal,
ctypes.POINTER(ctypes.c_char_p), )
krb5_unparse_name.restype = krb5_error
krb5_unparse_name.errcheck = krb5_errcheck
krb5_free_unparsed_name = LIBKRB5.krb5_free_unparsed_name
krb5_free_unparsed_name.argtypes = (krb5_context, ctypes.c_char_p, )
krb5_free_unparsed_name.restype = None
CONF_REALM = b"X-CACHECONF:"
CONF_NAME = b"krb5_ccache_conf_data"
def store_data(princ_name, key, value):
"""
Stores the session cookie in a hidden ccache entry.
"""
if not isinstance(princ_name, bytes):
princ_name = princ_name.encode('utf-8')
if not isinstance(key, bytes):
key = key.encode('ascii')
if not isinstance(value, bytes):
value = value.encode('utf-8')
# FILE ccaches grow every time an entry is stored, so we need
# to avoid storing the same entry multiple times.
oldvalue = get_data(princ_name, key)
if oldvalue == value:
return
context = krb5_context()
principal = krb5_principal()
ccache = krb5_ccache()
try:
krb5_init_context(ctypes.byref(context))
krb5_parse_name(context, ctypes.c_char_p(princ_name),
ctypes.byref(principal))
krb5_cc_default(context, ctypes.byref(ccache))
buf = ctypes.create_string_buffer(value)
data = _krb5_data()
data.data = buf.value
data.length = len(buf)
krb5_cc_set_config(context, ccache, principal, key,
ctypes.byref(data))
finally:
if principal:
krb5_free_principal(context, principal)
if ccache:
krb5_cc_close(context, ccache)
if context:
krb5_free_context(context)
def get_data(princ_name, key):
"""
Gets the session cookie in a hidden ccache entry.
"""
if not isinstance(princ_name, bytes):
princ_name = princ_name.encode('utf-8')
if not isinstance(key, bytes):
key = key.encode('utf-8')
context = krb5_context()
principal = krb5_principal()
srv_princ = krb5_principal()
ccache = krb5_ccache()
pname_princ = krb5_principal()
pname = ctypes.c_char_p()
try:
krb5_init_context(ctypes.byref(context))
krb5_cc_default(context, ctypes.byref(ccache))
krb5_cc_get_principal(context, ccache, ctypes.byref(principal))
# We need to parse and then unparse the name in case the pric_name
# passed in comes w/o a realm attached
krb5_parse_name(context, ctypes.c_char_p(princ_name),
ctypes.byref(pname_princ))
krb5_unparse_name(context, pname_princ, ctypes.byref(pname))
krb5_build_principal(context, ctypes.byref(srv_princ),
len(CONF_REALM), ctypes.c_char_p(CONF_REALM),
ctypes.c_char_p(CONF_NAME), ctypes.c_char_p(key),
pname, ctypes.c_char_p(None))
# Unfortunately we can't just use krb5_cc_get_config()
# because of bugs in some ccache handling code in krb5
# libraries that would always return the first entry
# stored and not the last one, which is the one we want.
cursor = krb5_cc_cursor()
creds = krb5_creds()
got_creds = False
krb5_cc_start_seq_get(context, ccache, ctypes.byref(cursor))
try:
while True:
checkcreds = krb5_creds()
# the next function will throw an error and break out of the
# while loop when we try to access past the last cred
krb5_cc_next_cred(context, ccache, ctypes.byref(cursor),
ctypes.byref(checkcreds))
if (krb5_principal_compare(context, principal,
checkcreds.client) == 1 and
krb5_principal_compare(context, srv_princ,
checkcreds.server) == 1):
if got_creds:
krb5_free_cred_contents(context, ctypes.byref(creds))
creds = checkcreds
got_creds = True
# We do not stop here, as we want the LAST entry
# in the ccache for those ccaches that cannot delete
# but only always append, like FILE
else:
krb5_free_cred_contents(context,
ctypes.byref(checkcreds))
except KRB5Error:
pass
finally:
krb5_cc_end_seq_get(context, ccache, ctypes.byref(cursor))
if got_creds:
data = creds.ticket.data
krb5_free_cred_contents(context, ctypes.byref(creds))
return data
finally:
if principal:
krb5_free_principal(context, principal)
if srv_princ:
krb5_free_principal(context, srv_princ)
if pname_princ:
krb5_free_principal(context, pname_princ)
if pname:
krb5_free_unparsed_name(context, pname)
if ccache:
krb5_cc_close(context, ccache)
if context:
krb5_free_context(context)
def remove_data(princ_name, key):
"""
Removes the hidden ccache entry with the session cookie.
"""
if not isinstance(princ_name, bytes):
princ_name = princ_name.encode('utf-8')
if not isinstance(key, bytes):
key = key.encode('utf-8')
context = krb5_context()
principal = krb5_principal()
ccache = krb5_ccache()
try:
krb5_init_context(ctypes.byref(context))
krb5_parse_name(context, ctypes.c_char_p(princ_name),
ctypes.byref(principal))
krb5_cc_default(context, ctypes.byref(ccache))
try:
krb5_cc_set_config(context, ccache, principal, key, None)
except KRB5Error as e:
if e.args[0] == KRB5_CC_NOSUPP:
# removal not supported with this CC type, just pass
pass
finally:
if principal:
krb5_free_principal(context, principal)
if ccache:
krb5_cc_close(context, ccache)
if context:
krb5_free_context(context)

5
ipapython/setup.cfg Normal file
View File

@@ -0,0 +1,5 @@
[bdist_wheel]
universal = 1
[metadata]
license_file = ../COPYING

55
ipapython/setup.py Executable file
View File

@@ -0,0 +1,55 @@
#!/usr/bin/python2
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""FreeIPA python support library
FreeIPA is a server for identity, policy, and audit.
"""
from os.path import abspath, dirname
import sys
if __name__ == '__main__':
# include ../ for ipasetup.py
sys.path.append(dirname(dirname(abspath(__file__))))
from ipasetup import ipasetup # noqa: E402
ipasetup(
name="ipapython",
doc=__doc__,
package_dir={'ipapython': ''},
packages=[
"ipapython",
"ipapython.install"
],
install_requires=[
"cffi",
"cryptography",
"dnspython",
"gssapi",
# "ipalib", # circular dependency
"ipaplatform",
"netaddr",
"netifaces",
"python-ldap",
"six",
],
extras_require={
":python_version<'3'": ["enum34"],
"install": ["dbus-python"], # for certmonger
},
)

216
ipapython/ssh.py Normal file
View File

@@ -0,0 +1,216 @@
# Authors:
# Jan Cholasta <jcholast@redhat.com>
#
# Copyright (C) 2012 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
SSH utilities.
"""
import base64
import re
import struct
from hashlib import sha1
from hashlib import sha256 # pylint: disable=E0611
import six
if six.PY3:
unicode = str
__all__ = ['SSHPublicKey']
OPENSSH_BASE_REGEX = re.compile(r'^[\t ]*(?P<keytype>[^\x00\n\r]+?) [\t ]*(?P<key>[^\x00\n\r]+?)(?:[\t ]+(?P<comment>[^\x00\n\r]*?)[\t ]*)?$')
OPENSSH_OPTIONS_REGEX = re.compile(r'(?P<name>[-0-9A-Za-z]+)(?:="(?P<value>(?:\\"|[^\x00\n\r"])*)")?')
class SSHPublicKey(object):
"""
SSH public key object.
"""
__slots__ = ('_key', '_keytype', '_comment', '_options')
def __init__(self, key, comment=None, options=None, encoding='utf-8'):
if isinstance(key, SSHPublicKey):
self._key = key._key
self._keytype = key._keytype
self._comment = key._comment
self._options = key._options
return
if not isinstance(key, (bytes, unicode)):
raise TypeError("argument must be bytes or unicode, got %s" % type(key).__name__)
# All valid public key blobs start with 3 null bytes (see RFC 4253
# section 6.6, RFC 4251 section 5 and RFC 4250 section 4.6)
if isinstance(key, bytes) and key[:3] != b'\0\0\0':
key = key.decode(encoding)
valid = self._parse_raw(key) or self._parse_base64(key) or self._parse_openssh(key)
if not valid:
raise ValueError("not a valid SSH public key")
if comment is not None:
self._comment = comment
if options is not None:
self._options = options
def _parse_raw(self, key):
if not isinstance(key, bytes):
return False
try:
(ktlen,) = struct.unpack('>I', key[:4])
except struct.error:
return False
if ktlen < 1 or ktlen > len(key) - 4:
return False
try:
keytype = key[4:ktlen+4].decode('ascii')
except UnicodeDecodeError:
return False
self._key = key
self._keytype = keytype
self._options = {}
self._comment = None
return True
def _parse_base64(self, key):
if not isinstance(key, unicode):
return False
try:
key = base64.b64decode(key)
except (TypeError, ValueError):
return False
return self._parse_raw(key)
def _parse_openssh_without_options(self, key):
match = OPENSSH_BASE_REGEX.match(key)
if not match:
return False
if not self._parse_base64(match.group('key')):
return False
if self._keytype != match.group('keytype'):
return False
self._comment = match.group('comment')
return True
def _parse_openssh_with_options(self, key):
key = key.lstrip('\t ')
options = {}
while True:
match = OPENSSH_OPTIONS_REGEX.match(key)
if not match:
return False
name = match.group('name').lower()
value = match.group('value')
if value:
value = value.replace('\\"', '"')
options[name] = value
key = key[len(match.group(0)):]
key0, key = key[:1], key[1:]
if key0 != ',':
break
if not self._parse_openssh_without_options(key):
return False
self._options = options
return True
def _parse_openssh(self, key):
if not isinstance(key, unicode):
return False
if self._parse_openssh_without_options(key):
return True
else:
return self._parse_openssh_with_options(key)
def keytype(self):
return self._keytype
def comment(self):
return self._comment
def has_options(self):
return bool(self._options)
def openssh(self):
key = base64.b64encode(self._key).decode('ascii')
out = u'%s %s' % (self._keytype, key)
if self._options:
options = []
for name in sorted(self._options):
value = self._options[name]
if value is None:
options.append(name)
else:
value = value.replace('"', '\\"')
options.append(u'%s="%s"' % (name, value))
options = u','.join(options)
out = u'%s %s' % (options, out)
if self._comment:
out = u'%s %s' % (out, self._comment)
return out
def fingerprint_hex_sha256(self):
# OpenSSH trims the trailing '=' of base64 sha256 FP representation
fp = base64.b64encode(sha256(self._key).digest()).rstrip(b'=')
return u'SHA256:{fp}'.format(fp=fp.decode('utf-8'))
def _fingerprint_dns(self, fpfunc, fptype):
if self._keytype == 'ssh-rsa':
keytype = 1
elif self._keytype == 'ssh-dss':
keytype = 2
elif self._keytype.startswith('ecdsa-sha2-') and '@' not in self._keytype:
keytype = 3
elif self._keytype == 'ssh-ed25519':
keytype = 4
else:
return
fp = fpfunc(self._key).hexdigest().upper()
return u'%d %d %s' % (keytype, fptype, fp)
def fingerprint_dns_sha1(self):
return self._fingerprint_dns(sha1, 1)
def fingerprint_dns_sha256(self):
return self._fingerprint_dns(sha256, 2)

619
ipapython/version.py Normal file
View File

@@ -0,0 +1,619 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# The full version including strings
VERSION="4.6.2"
# A fuller version including the vendor tag (e.g. 3.3.3-34.fc20)
VENDOR_VERSION="4.6.2"
# Just the numeric portion of the version so one can do direct numeric
# comparisons to see if the API is compatible.
#
# How NUM_VERSION was generated changed over time:
# Before IPA 3.1.3, it was simply concatenated decimal numbers:
# IPA 2.2.2: NUM_VERSION=222
# IPA 2.2.99: NUM_VERSION=2299 (development version)
# IPA 3.1.0: NUM_VERSION=310
# IPA 3.1.3: NUM_VERSION=313
# In IPA 3.1.4 and 3.2.0, the version was taken as an octal number due to a bug
# (https://fedorahosted.org/freeipa/ticket/3622):
# IPA 3.1.4: NUM_VERSION=12356 (octal 030104)
# IPA 3.2.0: NUM_VERSION=12416 (octal 030200)
# After IPA 3.2.0, it is decimal number where each part has two digits:
# IPA 3.2.1: NUM_VERSION=30201
# IPA 3.2.99: NUM_VERSION=30299 (development version)
# IPA 3.3.0: NUM_VERSION=30300
NUM_VERSION=40602
# The version of the API.
API_VERSION=u'2.229'
DEFAULT_PLUGINS = frozenset(l.strip() for l in """
aci/1
aci_add/1
aci_del/1
aci_find/1
aci_mod/1
aci_rename/1
aci_show/1
adtrust_is_enabled/1
automember/1
automember_add/1
automember_add_condition/1
automember_default_group/1
automember_default_group_remove/1
automember_default_group_set/1
automember_default_group_show/1
automember_del/1
automember_find/1
automember_mod/1
automember_rebuild/1
automember_remove_condition/1
automember_show/1
automember_task/1
automountkey/1
automountkey_add/1
automountkey_del/1
automountkey_find/1
automountkey_mod/1
automountkey_show/1
automountlocation/1
automountlocation_add/1
automountlocation_del/1
automountlocation_find/1
automountlocation_show/1
automountlocation_tofiles/1
automountmap/1
automountmap_add/1
automountmap_add_indirect/1
automountmap_del/1
automountmap_find/1
automountmap_mod/1
automountmap_show/1
batch/1
ca/1
ca_add/1
ca_del/1
ca_disable/1
ca_enable/1
ca_find/1
ca_is_enabled/1
ca_mod/1
ca_show/1
caacl/1
caacl_add/1
caacl_add_ca/1
caacl_add_host/1
caacl_add_profile/1
caacl_add_service/1
caacl_add_user/1
caacl_del/1
caacl_disable/1
caacl_enable/1
caacl_find/1
caacl_mod/1
caacl_remove_ca/1
caacl_remove_host/1
caacl_remove_profile/1
caacl_remove_service/1
caacl_remove_user/1
caacl_show/1
cert/1
cert_find/1
cert_remove_hold/1
cert_request/1
cert_revoke/1
cert_show/1
cert_status/1
certmap/1
certmap_match/1
certmapconfig/1
certmapconfig_mod/1
certmapconfig_show/1
certmaprule/1
certmaprule_add/1
certmaprule_del/1
certmaprule_disable/1
certmaprule_enable/1
certmaprule_find/1
certmaprule_mod/1
certmaprule_show/1
certprofile/1
certprofile_del/1
certprofile_find/1
certprofile_import/1
certprofile_mod/1
certprofile_show/1
certreq/1
class/1
class_find/1
class_show/1
command/1
command_defaults/1
command_find/1
command_show/1
compat_is_enabled/1
config/1
config_mod/1
config_show/1
cosentry/1
cosentry_add/1
cosentry_del/1
cosentry_find/1
cosentry_mod/1
cosentry_show/1
delegation/1
delegation_add/1
delegation_del/1
delegation_find/1
delegation_mod/1
delegation_show/1
dns_is_enabled/1
dns_resolve/1
dns_system_records/1
dns_update_system_records/1
dnsa6record/1
dnsaaaarecord/1
dnsafsdbrecord/1
dnsaplrecord/1
dnsarecord/1
dnscertrecord/1
dnscnamerecord/1
dnsconfig/1
dnsconfig_mod/1
dnsconfig_show/1
dnsdhcidrecord/1
dnsdlvrecord/1
dnsdnamerecord/1
dnsdsrecord/1
dnsforwardzone/1
dnsforwardzone_add/1
dnsforwardzone_add_permission/1
dnsforwardzone_del/1
dnsforwardzone_disable/1
dnsforwardzone_enable/1
dnsforwardzone_find/1
dnsforwardzone_mod/1
dnsforwardzone_remove_permission/1
dnsforwardzone_show/1
dnshiprecord/1
dnsipseckeyrecord/1
dnskeyrecord/1
dnskxrecord/1
dnslocrecord/1
dnsmxrecord/1
dnsnaptrrecord/1
dnsnsecrecord/1
dnsnsrecord/1
dnsptrrecord/1
dnsrecord/1
dnsrecord_add/1
dnsrecord_del/1
dnsrecord_delentry/1
dnsrecord_find/1
dnsrecord_mod/1
dnsrecord_show/1
dnsrecord_split_parts/1
dnsrprecord/1
dnsrrsigrecord/1
dnsserver/1
dnsserver_add/1
dnsserver_del/1
dnsserver_find/1
dnsserver_mod/1
dnsserver_show/1
dnssigrecord/1
dnsspfrecord/1
dnssrvrecord/1
dnssshfprecord/1
dnstlsarecord/1
dnstxtrecord/1
dnsurirecord/1
dnszone/1
dnszone_add/1
dnszone_add_permission/1
dnszone_del/1
dnszone_disable/1
dnszone_enable/1
dnszone_find/1
dnszone_mod/1
dnszone_remove_permission/1
dnszone_show/1
domainlevel_get/1
domainlevel_set/1
env/1
group/1
group_add/1
group_add_member/1
group_del/1
group_detach/1
group_find/1
group_mod/1
group_remove_member/1
group_show/1
hbacrule/1
hbacrule_add/1
hbacrule_add_host/1
hbacrule_add_service/1
hbacrule_add_sourcehost/1
hbacrule_add_user/1
hbacrule_del/1
hbacrule_disable/1
hbacrule_enable/1
hbacrule_find/1
hbacrule_mod/1
hbacrule_remove_host/1
hbacrule_remove_service/1
hbacrule_remove_sourcehost/1
hbacrule_remove_user/1
hbacrule_show/1
hbacsvc/1
hbacsvc_add/1
hbacsvc_del/1
hbacsvc_find/1
hbacsvc_mod/1
hbacsvc_show/1
hbacsvcgroup/1
hbacsvcgroup_add/1
hbacsvcgroup_add_member/1
hbacsvcgroup_del/1
hbacsvcgroup_find/1
hbacsvcgroup_mod/1
hbacsvcgroup_remove_member/1
hbacsvcgroup_show/1
hbactest/1
host/1
host_add/1
host_add_cert/1
host_add_managedby/1
host_add_principal/1
host_allow_create_keytab/1
host_allow_retrieve_keytab/1
host_del/1
host_disable/1
host_disallow_create_keytab/1
host_disallow_retrieve_keytab/1
host_find/1
host_mod/1
host_remove_cert/1
host_remove_managedby/1
host_remove_principal/1
host_show/1
hostgroup/1
hostgroup_add/1
hostgroup_add_member/1
hostgroup_del/1
hostgroup_find/1
hostgroup_mod/1
hostgroup_remove_member/1
hostgroup_show/1
i18n_messages/1
idoverridegroup/1
idoverridegroup_add/1
idoverridegroup_del/1
idoverridegroup_find/1
idoverridegroup_mod/1
idoverridegroup_show/1
idoverrideuser/1
idoverrideuser_add/1
idoverrideuser_add_cert/1
idoverrideuser_del/1
idoverrideuser_find/1
idoverrideuser_mod/1
idoverrideuser_remove_cert/1
idoverrideuser_show/1
idrange/1
idrange_add/1
idrange_del/1
idrange_find/1
idrange_mod/1
idrange_show/1
idview/1
idview_add/1
idview_apply/1
idview_del/1
idview_find/1
idview_mod/1
idview_show/1
idview_unapply/1
join/1
json_metadata/1
kra_is_enabled/1
krbtpolicy/1
krbtpolicy_mod/1
krbtpolicy_reset/1
krbtpolicy_show/1
location/1
location_add/1
location_del/1
location_find/1
location_mod/1
location_show/1
metaobject/1
migrate_ds/1
netgroup/1
netgroup_add/1
netgroup_add_member/1
netgroup_del/1
netgroup_find/1
netgroup_mod/1
netgroup_remove_member/1
netgroup_show/1
otpconfig/1
otpconfig_mod/1
otpconfig_show/1
otptoken/1
otptoken_add/1
otptoken_add_managedby/1
otptoken_del/1
otptoken_find/1
otptoken_mod/1
otptoken_remove_managedby/1
otptoken_show/1
output/1
output_find/1
output_show/1
param/1
param_find/1
param_show/1
passwd/1
permission/1
permission_add/1
permission_add_member/1
permission_add_noaci/1
permission_del/1
permission_find/1
permission_mod/1
permission_remove_member/1
permission_show/1
ping/1
pkinit/1
pkinit_status/1
plugins/1
privilege/1
privilege_add/1
privilege_add_member/1
privilege_add_permission/1
privilege_del/1
privilege_find/1
privilege_mod/1
privilege_remove_member/1
privilege_remove_permission/1
privilege_show/1
pwpolicy/1
pwpolicy_add/1
pwpolicy_del/1
pwpolicy_find/1
pwpolicy_mod/1
pwpolicy_show/1
radiusproxy/1
radiusproxy_add/1
radiusproxy_del/1
radiusproxy_find/1
radiusproxy_mod/1
radiusproxy_show/1
realmdomains/1
realmdomains_mod/1
realmdomains_show/1
role/1
role_add/1
role_add_member/1
role_add_privilege/1
role_del/1
role_find/1
role_mod/1
role_remove_member/1
role_remove_privilege/1
role_show/1
schema/1
selfservice/1
selfservice_add/1
selfservice_del/1
selfservice_find/1
selfservice_mod/1
selfservice_show/1
selinuxusermap/1
selinuxusermap_add/1
selinuxusermap_add_host/1
selinuxusermap_add_user/1
selinuxusermap_del/1
selinuxusermap_disable/1
selinuxusermap_enable/1
selinuxusermap_find/1
selinuxusermap_mod/1
selinuxusermap_remove_host/1
selinuxusermap_remove_user/1
selinuxusermap_show/1
server/1
server_conncheck/1
server_del/1
server_find/1
server_mod/1
server_role/1
server_role_find/1
server_role_show/1
server_show/1
service/1
service_add/1
service_add_cert/1
service_add_host/1
service_add_principal/1
service_allow_create_keytab/1
service_allow_retrieve_keytab/1
service_del/1
service_disable/1
service_disallow_create_keytab/1
service_disallow_retrieve_keytab/1
service_find/1
service_mod/1
service_remove_cert/1
service_remove_host/1
service_remove_principal/1
service_show/1
servicedelegationrule/1
servicedelegationrule_add/1
servicedelegationrule_add_member/1
servicedelegationrule_add_target/1
servicedelegationrule_del/1
servicedelegationrule_find/1
servicedelegationrule_remove_member/1
servicedelegationrule_remove_target/1
servicedelegationrule_show/1
servicedelegationtarget/1
servicedelegationtarget_add/1
servicedelegationtarget_add_member/1
servicedelegationtarget_del/1
servicedelegationtarget_find/1
servicedelegationtarget_remove_member/1
servicedelegationtarget_show/1
servrole/1
session_logout/1
sidgen_was_run/1
stageuser/1
stageuser_activate/1
stageuser_add/1
stageuser_add_cert/1
stageuser_add_certmapdata/1
stageuser_add_manager/1
stageuser_add_principal/1
stageuser_del/1
stageuser_find/1
stageuser_mod/1
stageuser_remove_cert/1
stageuser_remove_certmapdata/1
stageuser_remove_manager/1
stageuser_remove_principal/1
stageuser_show/1
sudocmd/1
sudocmd_add/1
sudocmd_del/1
sudocmd_find/1
sudocmd_mod/1
sudocmd_show/1
sudocmdgroup/1
sudocmdgroup_add/1
sudocmdgroup_add_member/1
sudocmdgroup_del/1
sudocmdgroup_find/1
sudocmdgroup_mod/1
sudocmdgroup_remove_member/1
sudocmdgroup_show/1
sudorule/1
sudorule_add/1
sudorule_add_allow_command/1
sudorule_add_deny_command/1
sudorule_add_host/1
sudorule_add_option/1
sudorule_add_runasgroup/1
sudorule_add_runasuser/1
sudorule_add_user/1
sudorule_del/1
sudorule_disable/1
sudorule_enable/1
sudorule_find/1
sudorule_mod/1
sudorule_remove_allow_command/1
sudorule_remove_deny_command/1
sudorule_remove_host/1
sudorule_remove_option/1
sudorule_remove_runasgroup/1
sudorule_remove_runasuser/1
sudorule_remove_user/1
sudorule_show/1
topic/1
topic_find/1
topic_show/1
topologysegment/1
topologysegment_add/1
topologysegment_del/1
topologysegment_find/1
topologysegment_mod/1
topologysegment_reinitialize/1
topologysegment_show/1
topologysuffix/1
topologysuffix_add/1
topologysuffix_del/1
topologysuffix_find/1
topologysuffix_mod/1
topologysuffix_show/1
topologysuffix_verify/1
trust/1
trust_add/1
trust_del/1
trust_fetch_domains/1
trust_find/1
trust_mod/1
trust_resolve/1
trust_show/1
trustconfig/1
trustconfig_mod/1
trustconfig_show/1
trustdomain/1
trustdomain_add/1
trustdomain_del/1
trustdomain_disable/1
trustdomain_enable/1
trustdomain_find/1
trustdomain_mod/1
user/1
user_add/1
user_add_cert/1
user_add_certmapdata/1
user_add_manager/1
user_add_principal/1
user_del/1
user_disable/1
user_enable/1
user_find/1
user_mod/1
user_remove_cert/1
user_remove_certmapdata/1
user_remove_manager/1
user_remove_principal/1
user_show/1
user_stage/1
user_status/1
user_undel/1
user_unlock/1
userstatus/1
vault/1
vault_add_internal/1
vault_add_member/1
vault_add_owner/1
vault_archive_internal/1
vault_del/1
vault_find/1
vault_mod_internal/1
vault_remove_member/1
vault_remove_owner/1
vault_retrieve_internal/1
vault_show/1
vaultconfig/1
vaultconfig_show/1
vaultcontainer/1
vaultcontainer_add_owner/1
vaultcontainer_del/1
vaultcontainer_remove_owner/1
vaultcontainer_show/1
whoami/1
""".strip().splitlines())

53
ipapython/version.py.in Normal file
View File

@@ -0,0 +1,53 @@
# Authors: Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2007 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# The full version including strings
VERSION="@VERSION@"
# A fuller version including the vendor tag (e.g. 3.3.3-34.fc20)
VENDOR_VERSION="@VERSION@@VENDOR_SUFFIX@"
# Just the numeric portion of the version so one can do direct numeric
# comparisons to see if the API is compatible.
#
# How NUM_VERSION was generated changed over time:
# Before IPA 3.1.3, it was simply concatenated decimal numbers:
# IPA 2.2.2: NUM_VERSION=222
# IPA 2.2.99: NUM_VERSION=2299 (development version)
# IPA 3.1.0: NUM_VERSION=310
# IPA 3.1.3: NUM_VERSION=313
# In IPA 3.1.4 and 3.2.0, the version was taken as an octal number due to a bug
# (https://fedorahosted.org/freeipa/ticket/3622):
# IPA 3.1.4: NUM_VERSION=12356 (octal 030104)
# IPA 3.2.0: NUM_VERSION=12416 (octal 030200)
# After IPA 3.2.0, it is decimal number where each part has two digits:
# IPA 3.2.1: NUM_VERSION=30201
# IPA 3.2.99: NUM_VERSION=30299 (development version)
# IPA 3.3.0: NUM_VERSION=30300
NUM_VERSION=@NUM_VERSION@
# The version of the API.
API_VERSION=u'@API_VERSION@'
DEFAULT_PLUGINS = frozenset(l.strip() for l in """
@DEFAULT_PLUGINS@
""".strip().splitlines())