Imported Debian patch 4.7.2-3

This commit is contained in:
Timo Aaltonen
2019-05-06 08:43:34 +03:00
committed by Mario Fetka
parent 27edeba051
commit 8bc559c5a1
917 changed files with 1068993 additions and 1184676 deletions

View File

@@ -1,7 +1,7 @@
# Makefile.in generated by automake 1.16.2 from Makefile.am.
# Makefile.in generated by automake 1.16.1 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2020 Free Software Foundation, Inc.
# Copyright (C) 1994-2018 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
@@ -185,8 +185,6 @@ JSLINT = @JSLINT@
KRAD_LIBS = @KRAD_LIBS@
KRB5KDC_SERVICE = @KRB5KDC_SERVICE@
KRB5_CFLAGS = @KRB5_CFLAGS@
KRB5_GSSAPI_CFLAGS = @KRB5_GSSAPI_CFLAGS@
KRB5_GSSAPI_LIBS = @KRB5_GSSAPI_LIBS@
KRB5_LIBS = @KRB5_LIBS@
LD = @LD@
LDAP_CFLAGS = @LDAP_CFLAGS@
@@ -229,10 +227,11 @@ NM = @NM@
NMEDIT = @NMEDIT@
NSPR_CFLAGS = @NSPR_CFLAGS@
NSPR_LIBS = @NSPR_LIBS@
NSS_CFLAGS = @NSS_CFLAGS@
NSS_LIBS = @NSS_LIBS@
NUM_VERSION = @NUM_VERSION@
OBJDUMP = @OBJDUMP@
OBJEXT = @OBJEXT@
ODS_GROUP = @ODS_GROUP@
ODS_USER = @ODS_USER@
OTOOL = @OTOOL@
OTOOL64 = @OTOOL64@
@@ -253,6 +252,8 @@ POPT_LIBS = @POPT_LIBS@
POSUB = @POSUB@
PYLINT = @PYLINT@
PYTHON = @PYTHON@
PYTHON2 = @PYTHON2@
PYTHON3 = @PYTHON3@
PYTHON_EXEC_PREFIX = @PYTHON_EXEC_PREFIX@
PYTHON_INSTALL_EXTRA_OPTIONS = @PYTHON_INSTALL_EXTRA_OPTIONS@
PYTHON_PLATFORM = @PYTHON_PLATFORM@
@@ -340,9 +341,7 @@ program_transform_name = @program_transform_name@
psdir = @psdir@
pyexecdir = @pyexecdir@
pythondir = @pythondir@
runstatedir = @runstatedir@
sbindir = @sbindir@
selinux_makefile = @selinux_makefile@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@

View File

@@ -28,7 +28,6 @@ import os
import traceback
from optparse import OptionGroup # pylint: disable=deprecated-module
from ipaplatform.osinfo import osinfo
from ipapython import version
from ipapython import config
from ipapython.ipa_log_manager import standard_logging_setup
@@ -54,7 +53,7 @@ class ScriptError(Exception):
return str(self)
class AdminTool:
class AdminTool(object):
"""Base class for command-line admin tools
To run the tool, call the main() classmethod with a list of command-line
@@ -94,6 +93,7 @@ class AdminTool:
log_file_name = None
usage = None
description = None
ignore_return_codes = ()
_option_parsers = dict()
@@ -162,7 +162,6 @@ class AdminTool:
def __init__(self, options, args):
self.options = options
self.args = args
self.log_file_initialized = False
self.safe_options = self.option_parser.get_safe_opts(options)
def execute(self):
@@ -185,7 +184,7 @@ class AdminTool:
return_value = exception.rval # pylint: disable=no-member
traceback = sys.exc_info()[2]
error_message, return_value = self.handle_error(exception)
if return_value:
if return_value and return_value not in self.ignore_return_codes:
self.log_failure(error_message, return_value, exception,
traceback)
return return_value
@@ -249,15 +248,12 @@ class AdminTool:
break
self._setup_logging(log_file_mode=log_file_mode)
if self.log_file_name:
self.log_file_initialized = True
def _setup_logging(self, log_file_mode='w', no_file=False):
if no_file:
log_file_name = None
elif self.options.log_file:
log_file_name = self.options.log_file
self.log_file_name = log_file_name
else:
log_file_name = self.log_file_name
if self.options.verbose:
@@ -284,7 +280,7 @@ class AdminTool:
"""Given an exception, return a message (or None) and process exit code
"""
if isinstance(exception, ScriptError):
return exception.msg, exception.rval
return exception.msg, exception.rval or 1
elif isinstance(exception, SystemExit):
if isinstance(exception.code, int):
return None, exception.code
@@ -305,8 +301,6 @@ class AdminTool:
logger.debug('%s was invoked with arguments %s and options: %s',
self.command_name, self.args, self.safe_options)
logger.debug('IPA version %s', version.VENDOR_VERSION)
logger.debug('IPA platform %s', osinfo.platform)
logger.debug('IPA os-release %s %s', osinfo.name, osinfo.version)
def log_failure(self, error_message, return_value, exception, backtrace):
logger.debug('%s', ''.join(traceback.format_tb(backtrace)))
@@ -314,13 +308,10 @@ class AdminTool:
self.command_name, type(exception).__name__, exception)
if error_message:
logger.error('%s', error_message)
if return_value == 0:
# A script may raise an exception but still want quit gracefully,
# like the case of ipa-client-install called from
# ipa-server-install.
return
message = "The %s command failed." % self.command_name
if self.log_file_initialized and return_value != SERVER_NOT_CONFIGURED:
if self.log_file_name and return_value != 2:
# magic value because this is common between server and client
# but imports are not straigthforward
message += " See %s for more information" % self.log_file_name
logger.error('%s', message)

View File

@@ -19,18 +19,16 @@
from __future__ import absolute_import
import collections
import datetime
import logging
import os
import io
import pwd
import grp
import re
import shutil
import stat
import tempfile
from ctypes.util import find_library
from tempfile import NamedTemporaryFile
import shutil
import cryptography.x509
@@ -74,10 +72,6 @@ TRUSTED_PEER_TRUST_FLAGS = TrustFlags(
)
def nss_supports_dbm():
return bool(find_library("nssdbm3"))
def get_ca_nickname(realm, format=CA_NICKNAME_FMT):
return format % realm
@@ -179,11 +173,6 @@ def unparse_trust_flags(trust_flags):
def verify_kdc_cert_validity(kdc_cert, ca_certs, realm):
"""
Verifies the validity of a kdc_cert, ensuring it is trusted by
the ca_certs chain, has a PKINIT_KDC extended key usage support,
and verify it applies to the given realm.
"""
with NamedTemporaryFile() as kdc_file, NamedTemporaryFile() as ca_file:
kdc_file.write(kdc_cert.public_bytes(x509.Encoding.PEM))
kdc_file.flush()
@@ -242,7 +231,7 @@ class Pkcs12ImportUnknownError(RuntimeError):
"""
class NSSDatabase:
class NSSDatabase(object):
"""A general-purpose wrapper around a NSS cert database
For permanent NSS databases, pass the cert DB directory to __init__
@@ -257,20 +246,14 @@ class NSSDatabase:
# Generic NSS DB code should be moved here.
def __init__(self, nssdir=None, dbtype='auto'):
if nssdir is not None:
self.secdir = nssdir
self._is_temporary = False
if dbtype == "auto":
dbtype = self._detect_dbtype()
if dbtype == "dbm" and not nss_supports_dbm():
raise ValueError(
"NSS is built without support of the legacy database(DBM)"
)
if nssdir is None:
self.secdir = tempfile.mkdtemp()
self._is_temporary = True
else:
self.secdir = nssdir
self._is_temporary = False
if dbtype == 'auto':
dbtype = self._detect_dbtype()
self.pwd_file = os.path.join(self.secdir, 'pwdfile.txt')
self.dbtype = None
@@ -553,20 +536,15 @@ class NSSDatabase:
def get_trust_chain(self, nickname):
"""Return names of certs in a given cert's trust chain
The list starts with root ca, then first intermediate CA, second
intermediate, and so on.
:param nickname: Name of the cert
:return: List of certificate names
"""
root_nicknames = []
result = self.run_certutil(
["-O", "--simple-self-signed", "-n", nickname],
capture_output=True)
result = self.run_certutil(["-O", "-n", nickname], capture_output=True)
chain = result.output.splitlines()
for c in chain:
m = re.match(r'\s*"(.*)" \[.*', c)
m = re.match('\s*"(.*)" \[.*', c)
if m:
root_nicknames.append(m.groups()[0])
@@ -816,8 +794,6 @@ class NSSDatabase:
'-out', out_file.name,
'-passin', 'file:' + self.pwd_file,
'-passout', 'file:' + out_pwdfile.name,
'-certpbe', 'aes-128-cbc',
'-keypbe', 'aes-128-cbc',
]
try:
ipautil.run(args)
@@ -904,58 +880,12 @@ class NSSDatabase:
def delete_cert(self, nick):
self.run_certutil(["-D", "-n", nick])
def delete_key_only(self, nick):
"""Delete the key with provided nick
This commands removes the key but leaves the cert in the DB.
"""
keys = self.list_keys()
# keys is a list of tuple(slot, algo, keyid, nickname)
for (_slot, _algo, keyid, nickname) in keys:
if nickname == nick:
# Key is present in the DB, delete the key
self.run_certutil(["-F", "-k", keyid])
break
def delete_key_and_cert(self, nick):
"""Delete a cert and its key from the DB"""
try:
self.run_certutil(["-F", "-n", nick])
except ipautil.CalledProcessError:
# Using -F -k instead of -F -n because the latter fails if
# the DB contains only the key
self.delete_key_only(nick)
# Check that cert was deleted
for (certname, _flags) in self.list_certs():
if certname == nick:
self.delete_cert(nick)
def _verify_cert_validity(self, cert):
"""Common checks for cert validity
"""
utcnow = datetime.datetime.utcnow()
if cert.not_valid_before > utcnow:
raise ValueError(
f"not valid before {cert.not_valid_before} UTC is in the "
"future."
)
if cert.not_valid_after < utcnow:
raise ValueError(
f"has expired {cert.not_valid_after} UTC"
)
# make sure the cert does not expire during installation
if cert.not_valid_after + datetime.timedelta(hours=1) < utcnow:
raise ValueError(
f"expires in less than one hour ({cert.not_valid_after} UTC)"
)
def verify_server_cert_validity(self, nickname, hostname):
"""Verify a certificate is valid for a SSL server with given hostname
Raises a ValueError if the certificate is invalid.
"""
cert = self.get_cert(nickname)
self._verify_cert_validity(cert)
try:
self.run_certutil(
@@ -977,9 +907,8 @@ class NSSDatabase:
except ValueError:
raise ValueError('invalid for server %s' % hostname)
def verify_ca_cert_validity(self, nickname, minpathlen=None):
def verify_ca_cert_validity(self, nickname):
cert = self.get_cert(nickname)
self._verify_cert_validity(cert)
if not cert.subject:
raise ValueError("has empty subject")
@@ -992,15 +921,6 @@ class NSSDatabase:
if not bc.value.ca:
raise ValueError("not a CA certificate")
if minpathlen is not None:
# path_length is None means no limitation
pl = bc.value.path_length
if pl is not None and pl < minpathlen:
raise ValueError(
"basic contraint pathlen {}, must be at least {}".format(
pl, minpathlen
)
)
try:
ski = cert.extensions.get_extension_for_class(

View File

@@ -23,13 +23,15 @@ from optparse import (
Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError)
# pylint: enable=deprecated-module
from copy import copy
from configparser import SafeConfigParser
from urllib.parse import urlsplit
import socket
import functools
from dns.exception import DNSException
import dns.name
# pylint: disable=import-error
from six.moves.configparser import SafeConfigParser
from six.moves.urllib.parse import urlsplit
# pylint: enable=import-error
from ipaplatform.paths import paths
from ipapython.dn import DN
@@ -131,7 +133,7 @@ class IPAOptionParser(OptionParser):
safe_opts_dict = {}
for option, value in opts.__dict__.items():
if not all_opts_dict[option].sensitive:
if all_opts_dict[option].sensitive != True:
safe_opts_dict[option] = value
return Values(safe_opts_dict)
@@ -150,7 +152,7 @@ def verify_args(parser, args, needed_args = None):
parser.error("no %s specified" % needed_list[len_have])
class IPAConfig:
class IPAConfig(object):
def __init__(self):
self.default_realm = None
self.default_server = []

View File

@@ -21,8 +21,11 @@ import re
import datetime
import email.utils
from calendar import timegm
from urllib.parse import urlparse
import six
# pylint: disable=import-error
from six.moves.urllib.parse import urlparse
# pylint: enable=import-error
'''
Core Python has two cookie libraries, Cookie.py targeted to server
@@ -63,8 +66,7 @@ escaping and unescapin.
#-------------------------------------------------------------------------------
class Cookie:
class Cookie(object):
'''
A Cookie object has the following attributes:
@@ -219,7 +221,7 @@ class Cookie:
return '/'
if url_path.count('/') <= 1:
return '/'
return'/'
return url_path[:url_path.rindex('/')]
@@ -389,9 +391,9 @@ class Cookie:
self._timestamp = None
elif isinstance(value, datetime.datetime):
self._timestamp = value
elif isinstance(value, (int, float)):
elif isinstance(value, (six.integer_types, float)):
self._timestamp = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, str):
elif isinstance(value, six.string_types):
self._timestamp = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \
@@ -415,9 +417,9 @@ class Cookie:
self._expires = None
elif isinstance(value, datetime.datetime):
self._expires = value
elif isinstance(value, (int, float)):
elif isinstance(value, (six.integer_types, float)):
self._expires = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, str):
elif isinstance(value, six.string_types):
self._expires = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \

View File

@@ -2,6 +2,8 @@
# Copyright (C) 2018 FreeIPA Contributors see COPYING for license
#
import six
import io
import os
import re
@@ -13,7 +15,7 @@ from ipapython.ipautil import unescape_seq, escape_seq
_SENTINEL = object()
class DirectiveSetter:
class DirectiveSetter(object):
"""Safe directive setter
with DirectiveSetter('/path/to/conf') as ds:
@@ -45,7 +47,7 @@ class DirectiveSetter:
fd, name = tempfile.mkstemp(prefix=prefix, dir=directory, text=True)
with io.open(fd, mode='w', closefd=True) as f:
for line in self.lines:
if not isinstance(line, str):
if not isinstance(line, six.text_type):
line = line.decode('utf-8')
f.write(line)
self.lines = None

View File

@@ -17,7 +17,7 @@
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
r'''
'''
Goal
----
@@ -423,16 +423,10 @@ import sys
import functools
import cryptography.x509
from ldap.dn import str2dn, dn2str
from ldap import DECODING_ERROR
import six
try:
from ldap import DECODING_ERROR
except ImportError:
from ipapython.dn_ctypes import str2dn, dn2str, DECODING_ERROR
else:
from ldap.dn import str2dn, dn2str
if six.PY3:
unicode = str
@@ -459,8 +453,8 @@ def _adjust_indices(start, end, length):
def _normalize_ava_input(val):
if six.PY3 and isinstance(val, bytes):
raise TypeError('expected str, got bytes: %r' % val)
elif not isinstance(val, str):
val = val_encode(str(val))
elif not isinstance(val, six.string_types):
val = val_encode(six.text_type(val))
elif six.PY2 and isinstance(val, unicode):
val = val.encode('utf-8')
return val
@@ -506,7 +500,7 @@ def get_ava(*args):
if len(arg) != 2:
raise ValueError("tuple or list must be 2-valued, not \"%s\"" % (arg))
ava = [_normalize_ava_input(arg[0]), _normalize_ava_input(arg[1]), 0]
elif isinstance(arg, str):
elif isinstance(arg, six.string_types):
rdn = str2rdn(arg)
if len(rdn) > 1:
raise TypeError("multiple AVA's specified by \"%s\"" % (arg))
@@ -563,7 +557,7 @@ else:
@functools.total_ordering
class AVA:
class AVA(object):
'''
AVA(arg0, ...)
@@ -685,7 +679,7 @@ class AVA:
caseIgnoreMatch.
'''
# Try coercing string to AVA, if successful compare to coerced object
if isinstance(other, str):
if isinstance(other, six.string_types):
try:
other_ava = AVA(other)
return self.__eq__(other_ava)
@@ -712,7 +706,7 @@ class AVA:
@functools.total_ordering
class RDN:
class RDN(object):
'''
RDN(arg0, ...)
@@ -830,7 +824,7 @@ class RDN:
if raw: # fast raw mode
avas = args
elif ava_count == 1 and isinstance(args[0], str):
elif ava_count == 1 and isinstance(args[0], six.string_types):
avas = str2rdn(args[0])
sort = 1
elif ava_count == 1 and isinstance(args[0], RDN):
@@ -866,11 +860,11 @@ class RDN:
return len(self._avas)
def __getitem__(self, key):
if isinstance(key, int):
if isinstance(key, six.integer_types):
return self._get_ava(self._avas[key])
if isinstance(key, slice):
return [self._get_ava(ava) for ava in self._avas[key]]
elif isinstance(key, str):
elif isinstance(key, six.string_types):
for ava in self._avas:
if key == val_decode(ava[0]):
return val_decode(ava[1])
@@ -888,7 +882,7 @@ class RDN:
if len(self._avas) == 0:
raise IndexError("No AVA's in this RDN")
self._avas[0][0] = val_encode(str(new_attr))
self._avas[0][0] = val_encode(six.text_type(new_attr))
attr = property(_get_attr)
@@ -900,7 +894,7 @@ class RDN:
def _set_value(self, new_value):
if len(self._avas) == 0:
raise IndexError("No AVA's in this RDN")
self._avas[0][1] = val_encode(str(new_value))
self._avas[0][1] = val_encode(six.text_type(new_value))
value = property(_get_value)
@@ -915,7 +909,7 @@ class RDN:
def __eq__(self, other):
# Try coercing string to RDN, if successful compare to coerced object
if isinstance(other, str):
if isinstance(other, six.string_types):
try:
other_rdn = RDN(other)
return self.__eq__(other_rdn)
@@ -945,7 +939,7 @@ class RDN:
result._avas.append((ava[0], ava[1], ava[2]))
elif isinstance(other, AVA):
result._avas.append(other.to_openldap())
elif isinstance(other, str):
elif isinstance(other, six.string_types):
rdn = self.__class__(other)
for ava in rdn._avas:
result._avas.append((ava[0], ava[1], ava[2]))
@@ -957,7 +951,7 @@ class RDN:
@functools.total_ordering
class DN:
class DN(object):
'''
DN(arg0, ...)
@@ -1118,9 +1112,9 @@ class DN:
return [[list(a) for a in rdn] for rdn in rdns]
def _rdns_from_value(self, value):
if isinstance(value, str):
if isinstance(value, six.string_types):
try:
if isinstance(value, str):
if isinstance(value, six.text_type):
value = val_encode(value)
rdns = str2dn(value)
except DECODING_ERROR:
@@ -1138,11 +1132,9 @@ class DN:
rdns = list(reversed([
[get_ava(
ATTR_NAME_BY_OID.get(ava.oid, ava.oid.dotted_string),
ava.value) for ava in rdn]
for rdn in value.rdns
ava.value)]
for ava in value
]))
for rdn in rdns:
sort_avas(rdn)
else:
raise TypeError(
"must be str, unicode, tuple, Name, RDN or DN, got %s instead"
@@ -1186,14 +1178,14 @@ class DN:
return len(self.rdns)
def __getitem__(self, key):
if isinstance(key, int):
if isinstance(key, six.integer_types):
return self._get_rdn(self.rdns[key])
if isinstance(key, slice):
cls = self.__class__
new_dn = cls.__new__(cls)
new_dn.rdns = self.rdns[key]
return new_dn
elif isinstance(key, str):
elif isinstance(key, six.string_types):
for rdn in self.rdns:
for ava in rdn:
if key == val_decode(ava[0]):
@@ -1220,7 +1212,7 @@ class DN:
def __eq__(self, other):
# Try coercing to DN, if successful compare to coerced object
if isinstance(other, (str, RDN, AVA)):
if isinstance(other, (six.string_types, RDN, AVA)):
try:
other_dn = DN(other)
return self.__eq__(other_dn)

View File

@@ -1,165 +0,0 @@
#
# Copyright (C) 2019 FreeIPA Contributors see COPYING for license
#
"""ctypes wrapper for libldap_str2dn
"""
from __future__ import absolute_import
import ctypes
import ctypes.util
import six
__all__ = ("str2dn", "dn2str", "DECODING_ERROR", "LDAPError")
# load reentrant ldap client library (libldap_r-*.so.2)
ldap_r_lib = ctypes.util.find_library("ldap_r-2")
if ldap_r_lib is None:
raise ImportError("libldap_r shared library missing")
try:
lib = ctypes.CDLL(ldap_r_lib)
except OSError as e:
raise ImportError(str(e))
# constants
LDAP_AVA_FREE_ATTR = 0x0010
LDAP_AVA_FREE_VALUE = 0x0020
LDAP_DECODING_ERROR = -4
# mask for AVA flags
AVA_MASK = ~(LDAP_AVA_FREE_ATTR | LDAP_AVA_FREE_VALUE)
class berval(ctypes.Structure):
__slots__ = ()
_fields_ = [("bv_len", ctypes.c_ulong), ("bv_value", ctypes.c_char_p)]
def __bytes__(self):
buf = ctypes.create_string_buffer(self.bv_value, self.bv_len)
return buf.raw
def __str__(self):
return self.__bytes__().decode("utf-8")
if six.PY2:
__unicode__ = __str__
__str__ = __bytes__
class LDAPAVA(ctypes.Structure):
__slots__ = ()
_fields_ = [
("la_attr", berval),
("la_value", berval),
("la_flags", ctypes.c_uint16),
]
# typedef LDAPAVA** LDAPRDN;
LDAPRDN = ctypes.POINTER(ctypes.POINTER(LDAPAVA))
# typedef LDAPRDN* LDAPDN;
LDAPDN = ctypes.POINTER(LDAPRDN)
def errcheck(result, func, arguments):
if result != 0:
if result == LDAP_DECODING_ERROR:
raise DECODING_ERROR
else:
msg = ldap_err2string(result)
raise LDAPError(msg.decode("utf-8"))
return result
ldap_str2dn = lib.ldap_str2dn
ldap_str2dn.argtypes = (
ctypes.c_char_p,
ctypes.POINTER(LDAPDN),
ctypes.c_uint16,
)
ldap_str2dn.restype = ctypes.c_int16
ldap_str2dn.errcheck = errcheck
ldap_dnfree = lib.ldap_dnfree
ldap_dnfree.argtypes = (LDAPDN,)
ldap_dnfree.restype = None
ldap_err2string = lib.ldap_err2string
ldap_err2string.argtypes = (ctypes.c_int16,)
ldap_err2string.restype = ctypes.c_char_p
class LDAPError(Exception):
pass
class DECODING_ERROR(LDAPError):
pass
# RFC 4514, 2.4
_ESCAPE_CHARS = {'"', "+", ",", ";", "<", ">", "'", "\x00"}
def _escape_dn(dn):
if not dn:
return ""
result = []
# a space or number sign occurring at the beginning of the string
if dn[0] in {"#", " "}:
result.append("\\")
for c in dn:
if c in _ESCAPE_CHARS:
result.append("\\")
result.append(c)
# a space character occurring at the end of the string
if len(dn) > 1 and result[-1] == " ":
# insert before last entry
result.insert(-1, "\\")
return "".join(result)
def dn2str(dn):
return ",".join(
"+".join(
"=".join((attr, _escape_dn(value))) for attr, value, _flag in rdn
)
for rdn in dn
)
def str2dn(dn, flags=0):
if dn is None:
return []
if isinstance(dn, six.text_type):
dn = dn.encode("utf-8")
ldapdn = LDAPDN()
try:
ldap_str2dn(dn, ctypes.byref(ldapdn), flags)
result = []
if not ldapdn:
# empty DN, str2dn("") == []
return result
for rdn in ldapdn:
if not rdn:
break
avas = []
for ava_p in rdn:
if not ava_p:
break
ava = ava_p[0]
avas.append(
(
six.text_type(ava.la_attr),
six.text_type(ava.la_value),
ava.la_flags & AVA_MASK,
)
)
result.append(avas)
return result
finally:
ldap_dnfree(ldapdn)

View File

@@ -27,7 +27,6 @@ import dns.exception
import dns.resolver
import dns.rdataclass
import dns.rdatatype
import dns.reversename
import six
@@ -40,91 +39,6 @@ if six.PY3:
logger = logging.getLogger(__name__)
ipa_resolver = None
def get_ipa_resolver():
global ipa_resolver
if ipa_resolver is None:
ipa_resolver = DNSResolver()
return ipa_resolver
def resolve(*args, **kwargs):
return get_ipa_resolver().resolve(*args, **kwargs)
def resolve_address(*args, **kwargs):
return get_ipa_resolver().resolve_address(*args, **kwargs)
def zone_for_name(*args, **kwargs):
if "resolver" not in kwargs:
kwargs["resolver"] = get_ipa_resolver()
return dns.resolver.zone_for_name(*args, **kwargs)
def reset_default_resolver():
"""Re-initialize ipa resolver.
"""
global ipa_resolver
ipa_resolver = DNSResolver()
class DNSResolver(dns.resolver.Resolver):
"""DNS stub resolver compatible with both dnspython < 2.0.0
and dnspython >= 2.0.0.
Set `use_search_by_default` attribute to `True`, which
determines the default for whether the search list configured
in the system's resolver configuration is used for relative
names, and whether the resolver's domain may be added to relative
names.
Increase the default lifetime which determines the number of seconds
to spend trying to get an answer to the question. dnspython 2.0.0
changes this to 5sec, while the previous one was 30sec.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.reset_ipa_defaults()
self.resolve = getattr(super(), "resolve", self.query)
self.resolve_address = getattr(
super(),
"resolve_address",
self._resolve_address
)
def reset_ipa_defaults(self):
self.use_search_by_default = True
# the default is 5sec
self.lifetime = 15
def reset(self):
super().reset()
self.reset_ipa_defaults()
def _resolve_address(self, ip_address, *args, **kwargs):
"""Query nameservers for PTR records.
:param ip_address: IPv4 or IPv6 address
:type ip_address: str
"""
return self.resolve(
dns.reversename.from_address(ip_address),
rdtype=dns.rdatatype.PTR,
*args,
**kwargs,
)
class DNSZoneAlreadyExists(dns.exception.DNSException):
supp_kwargs = {'zone', 'ns'}
fmt = (u"DNS zone {zone} already exists in DNS "
"and is handled by server(s): {ns}")
@six.python_2_unicode_compatible
class DNSName(dns.name.Name):
labels = None # make pylint happy
@@ -135,7 +49,7 @@ class DNSName(dns.name.Name):
def __init__(self, labels, origin=None):
try:
if isinstance(labels, str):
if isinstance(labels, six.string_types):
#pylint: disable=E1101
labels = dns.name.from_text(unicode(labels), origin).labels
elif isinstance(labels, dns.name.Name):
@@ -401,7 +315,7 @@ def resolve_rrsets(fqdn, rdtypes):
rrsets = []
for rdtype in rdtypes:
try:
answer = resolve(fqdn, rdtype)
answer = dns.resolver.query(fqdn, rdtype)
logger.debug('found %d %s records for %s: %s',
len(answer),
rdtype,
@@ -443,7 +357,7 @@ def check_zone_overlap(zone, raise_on_error=True):
return
try:
containing_zone = zone_for_name(zone)
containing_zone = dns.resolver.zone_for_name(zone)
except dns.exception.DNSException as e:
msg = ("DNS check for domain %s failed: %s." % (zone, e))
if raise_on_error:
@@ -454,13 +368,16 @@ def check_zone_overlap(zone, raise_on_error=True):
if containing_zone == zone:
try:
ns = [ans.to_text() for ans in resolve(zone, 'NS')]
ns = [ans.to_text() for ans in dns.resolver.query(zone, 'NS')]
except dns.exception.DNSException as e:
logger.debug("Failed to resolve nameserver(s) for domain %s: %s",
zone, e)
ns = []
raise DNSZoneAlreadyExists(zone=zone.to_text(), ns=ns)
msg = u"DNS zone {0} already exists in DNS".format(zone)
if ns:
msg += u" and is handled by server(s): {0}".format(', '.join(ns))
raise ValueError(msg)
def _mix_weight(records):
@@ -543,8 +460,6 @@ def query_srv(qname, resolver=None, **kwargs):
:return: list of dns.rdtypes.IN.SRV.SRV instances
"""
if resolver is None:
resolve_f = resolve
else:
resolve_f = getattr(resolver, "resolve", resolver.query)
answer = resolve_f(qname, rdtype=dns.rdatatype.SRV, **kwargs)
resolver = dns.resolver
answer = resolver.query(qname, rdtype=dns.rdatatype.SRV, **kwargs)
return sort_prio_weight(answer)

View File

@@ -21,11 +21,13 @@ import collections
import gzip
import io
import logging
from urllib.parse import urlencode
import xml.dom.minidom
import zlib
import six
# pylint: disable=import-error
from six.moves.urllib.parse import urlencode
# pylint: enable=import-error
# pylint: disable=ipa-forbidden-import
from ipalib import api, errors
@@ -227,7 +229,7 @@ def _httplib_request(
try:
conn = connection_factory(host, port, **connection_options)
conn.request(method, path, body=request_body, headers=headers)
conn.request(method, uri, body=request_body, headers=headers)
res = conn.getresponse()
http_status = res.status

View File

@@ -4,7 +4,7 @@
from collections import deque
class Graph:
class Graph(object):
"""
Simple oriented graph structure

View File

@@ -58,7 +58,8 @@ def _get_usage(configurable_class):
def install_tool(configurable_class, command_name, log_file_name,
debug_option=False, verbose=False, console_format=None,
use_private_ccache=True, uninstall_log_file_name=None):
use_private_ccache=True, uninstall_log_file_name=None,
ignore_return_codes=()):
"""
Some commands represent multiple related tools, e.g.
``ipa-server-install`` and ``ipa-server-install --uninstall`` would be
@@ -72,6 +73,8 @@ def install_tool(configurable_class, command_name, log_file_name,
:param console_format: logging format for stderr
:param use_private_ccache: a temporary ccache is created and used
:param uninstall_log_file_name: if not None the log for uninstall
:param ignore_return_codes: tuple of error codes to not log errors
for. Let the caller do it if it wants.
"""
if uninstall_log_file_name is not None:
uninstall_kwargs = dict(
@@ -81,6 +84,7 @@ def install_tool(configurable_class, command_name, log_file_name,
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
ignore_return_codes=ignore_return_codes,
)
else:
uninstall_kwargs = None
@@ -98,12 +102,14 @@ def install_tool(configurable_class, command_name, log_file_name,
console_format=console_format,
uninstall_kwargs=uninstall_kwargs,
use_private_ccache=use_private_ccache,
ignore_return_codes=ignore_return_codes,
)
)
def uninstall_tool(configurable_class, command_name, log_file_name,
debug_option=False, verbose=False, console_format=None):
debug_option=False, verbose=False, console_format=None,
ignore_return_codes=()):
return type(
'uninstall_tool({0})'.format(configurable_class.__name__),
(UninstallTool,),
@@ -115,6 +121,7 @@ def uninstall_tool(configurable_class, command_name, log_file_name,
debug_option=debug_option,
verbose=verbose,
console_format=console_format,
ignore_return_codes=ignore_return_codes,
)
)

View File

@@ -35,12 +35,12 @@ class Installable(core.Configurable):
def _get_components(self):
components = super(Installable, self)._get_components()
if self.uninstalling: # pylint: disable=using-constant-test
if self.uninstalling:
components = reversed(list(components))
return components
def _configure(self):
if self.uninstalling: # pylint: disable=using-constant-test
if self.uninstalling:
return self._uninstall()
else:
return self._install()

View File

@@ -11,7 +11,7 @@ import sys
import six
class from_:
class from_(object):
"""
Wrapper for delegating to a subgenerator.

View File

@@ -20,9 +20,13 @@ import logging
import os
import re
import time
import warnings
import sys
import six
# Module exports
__all__ = ['standard_logging_setup',
__all__ = ['log_mgr', 'root_logger', 'standard_logging_setup',
'ISO8601_UTC_DATETIME_FMT',
'LOGGING_FORMAT_STDERR', 'LOGGING_FORMAT_STDOUT', 'LOGGING_FORMAT_FILE']
@@ -53,7 +57,83 @@ LOGGING_FORMAT_STANDARD_CONSOLE = '%(name)-12s: %(levelname)-8s %(message)s'
LOGGING_FORMAT_STANDARD_FILE = '%(asctime)s %(levelname)s %(message)s'
class Filter:
class _DeprecatedLogger(object):
def __init__(self, logger, name):
self._logger = logger
self._name = name
def _warn(self):
warnings.warn(
"{} is deprecated, use a module-level logger".format(self._name),
DeprecationWarning)
def debug(self, *args, **kwargs):
self._warn()
self._logger.debug(*args, **kwargs)
def info(self, *args, **kwargs):
self._warn()
self._logger.info(*args, **kwargs)
def warning(self, *args, **kwargs):
self._warn()
self._logger.warning(*args, **kwargs)
def error(self, *args, **kwargs):
self._warn()
self._logger.error(*args, **kwargs)
def critical(self, *args, **kwargs):
self._warn()
self._logger.critical(*args, **kwargs)
def exception(self, *args, **kwargs):
self._warn()
self._logger.exception(*args, **kwargs)
def get_logger(who, bind_logger_names=False):
if isinstance(who, six.string_types):
warnings.warn(
"{}.log_mgr.get_logger is deprecated, use "
"logging.getLogger".format(__name__),
DeprecationWarning)
logger_name = who
else:
caller_globals = sys._getframe(1).f_globals
logger_name = caller_globals.get('__name__', '__main__')
if logger_name == '__main__':
logger_name = caller_globals.get('__file__', logger_name)
logger_name = os.path.basename(logger_name)
logger = logging.getLogger(logger_name)
if not isinstance(who, six.string_types):
obj_name = '%s.%s' % (who.__module__, who.__class__.__name__)
logger = _DeprecatedLogger(logger, obj_name)
if bind_logger_names:
method = 'log'
if hasattr(who, method):
raise ValueError('%s is already bound to %s' % (method, repr(who)))
setattr(who, method, logger)
for method in ('debug',
'info',
'warning',
'error',
'exception',
'critical'):
if hasattr(who, method):
raise ValueError(
'%s is already bound to %s' % (method, repr(who)))
setattr(who, method, getattr(logger, method))
return logger
class Filter(object):
def __init__(self, regexp, level):
self.regexp = re.compile(regexp)
self.level = level
@@ -117,3 +197,10 @@ def convert_log_level(value):
except KeyError:
raise ValueError('unknown log level (%s)' % value)
return level
# Single shared instance of log manager
log_mgr = sys.modules[__name__]
root_logger = _DeprecatedLogger(logging.getLogger(),
'{}.log_mgr.root_logger'.format(__name__))

View File

@@ -1,590 +0,0 @@
#
# ipachangeconf - configuration file manipulation classes and functions
# partially based on authconfig code
# Copyright (c) 1999-2007 Red Hat, Inc.
# Author: Simo Sorce <ssorce@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import fcntl
import logging
import os
import shutil
import six
if six.PY3:
unicode = str
logger = logging.getLogger(__name__)
def openLocked(filename, perms):
fd = -1
try:
fd = os.open(filename, os.O_RDWR | os.O_CREAT, perms)
fcntl.lockf(fd, fcntl.LOCK_EX)
except OSError as e:
if fd != -1:
try:
os.close(fd)
except OSError:
pass
raise IOError(e.errno, e.strerror)
return os.fdopen(fd, "r+")
# TODO: add subsection as a concept
# (ex. REALM.NAME = { foo = x bar = y } )
# TODO: put section delimiters as separating element of the list
# so that we can process multiple sections in one go
# TODO: add a comment all but provided options as a section option
class IPAChangeConf:
def __init__(self, name):
self.progname = name
self.indent = ("", "", "")
self.assign = (" = ", "=")
self.dassign = self.assign[0]
self.comment = ("#",)
self.dcomment = self.comment[0]
self.eol = ("\n",)
self.deol = self.eol[0]
self.sectnamdel = ("[", "]")
self.subsectdel = ("{", "}")
self.case_insensitive_sections = True
def setProgName(self, name):
self.progname = name
def setIndent(self, indent):
if type(indent) is tuple:
self.indent = indent
elif type(indent) is str:
self.indent = (indent, )
else:
raise ValueError('Indent must be a list of strings')
def setOptionAssignment(self, assign):
if type(assign) is tuple:
self.assign = assign
else:
self.assign = (assign, )
self.dassign = self.assign[0]
def setCommentPrefix(self, comment):
if type(comment) is tuple:
self.comment = comment
else:
self.comment = (comment, )
self.dcomment = self.comment[0]
def setEndLine(self, eol):
if type(eol) is tuple:
self.eol = eol
else:
self.eol = (eol, )
self.deol = self.eol[0]
def setSectionNameDelimiters(self, delims):
self.sectnamdel = delims
def setSubSectionDelimiters(self, delims):
self.subsectdel = delims
def matchComment(self, line):
for v in self.comment:
if line.lstrip().startswith(v):
return line.lstrip()[len(v):]
return False
def matchEmpty(self, line):
if line.strip() == "":
return True
return False
def matchSection(self, line):
cl = "".join(line.strip().split())
cl = cl.lower() if self.case_insensitive_sections else cl
if len(self.sectnamdel) != 2:
return False
if not cl.startswith(self.sectnamdel[0]):
return False
if not cl.endswith(self.sectnamdel[1]):
return False
return cl[len(self.sectnamdel[0]):-len(self.sectnamdel[1])]
def matchSubSection(self, line):
if self.matchComment(line):
return False
parts = line.split(self.dassign, 1)
if len(parts) < 2:
return False
if parts[1].strip() == self.subsectdel[0]:
return parts[0].strip()
return False
def matchSubSectionEnd(self, line):
if self.matchComment(line):
return False
if line.strip() == self.subsectdel[1]:
return True
return False
def getSectionLine(self, section):
if len(self.sectnamdel) != 2:
return section
return self._dump_line(self.sectnamdel[0],
section,
self.sectnamdel[1],
self.deol)
def _dump_line(self, *args):
return u"".join(unicode(x) for x in args)
def dump(self, options, level=0):
output = []
if level >= len(self.indent):
level = len(self.indent) - 1
for o in options:
if o['type'] == "section":
output.append(self._dump_line(self.sectnamdel[0],
o['name'],
self.sectnamdel[1]))
output.append(self.dump(o['value'], (level + 1)))
continue
if o['type'] == "subsection":
output.append(self._dump_line(self.indent[level],
o['name'],
self.dassign,
self.subsectdel[0]))
output.append(self.dump(o['value'], (level + 1)))
output.append(self._dump_line(self.indent[level],
self.subsectdel[1]))
continue
if o['type'] == "option":
delim = o.get('delim', self.dassign)
if delim not in self.assign:
raise ValueError(
'Unknown delim "%s" must be one of "%s"' %
(delim, " ".join(list(self.assign)))
)
output.append(self._dump_line(self.indent[level],
o['name'],
delim,
o['value']))
continue
if o['type'] == "comment":
output.append(self._dump_line(self.dcomment, o['value']))
continue
if o['type'] == "empty":
output.append('')
continue
raise SyntaxError('Unknown type: [%s]' % o['type'])
# append an empty string to the output so that we add eol to the end
# of the file contents in a single join()
output.append('')
return self.deol.join(output)
def parseLine(self, line):
if self.matchEmpty(line):
return {'name': 'empty', 'type': 'empty'}
value = self.matchComment(line)
if value:
return {'name': 'comment',
'type': 'comment',
'value': value.rstrip()} # pylint: disable=E1103
o = dict()
parts = line.split(self.dassign, 1)
if len(parts) < 2:
# The default assign didn't match, try the non-default
for d in self.assign[1:]:
parts = line.split(d, 1)
if len(parts) >= 2:
o['delim'] = d
break
if 'delim' not in o:
raise SyntaxError('Syntax Error: Unknown line format')
o.update({'name': parts[0].strip(), 'type': 'option',
'value': parts[1].rstrip()})
return o
def findOpts(self, opts, type, name, exclude_sections=False):
num = 0
for o in opts:
if o['type'] == type and o['name'] == name:
return (num, o)
if exclude_sections and (o['type'] == "section" or
o['type'] == "subsection"):
return (num, None)
num += 1
return (num, None)
def commentOpts(self, inopts, level=0):
opts = []
if level >= len(self.indent):
level = len(self.indent) - 1
for o in inopts:
if o['type'] == 'section':
no = self.commentOpts(o['value'], (level + 1))
val = self._dump_line(self.dcomment,
self.sectnamdel[0],
o['name'],
self.sectnamdel[1])
opts.append({'name': 'comment',
'type': 'comment',
'value': val})
for n in no:
opts.append(n)
continue
if o['type'] == 'subsection':
no = self.commentOpts(o['value'], (level + 1))
val = self._dump_line(self.indent[level],
o['name'],
self.dassign,
self.subsectdel[0])
opts.append({'name': 'comment',
'type': 'comment',
'value': val})
opts.extend(no)
val = self._dump_line(self.indent[level], self.subsectdel[1])
opts.append({'name': 'comment',
'type': 'comment',
'value': val})
continue
if o['type'] == 'option':
delim = o.get('delim', self.dassign)
if delim not in self.assign:
val = self._dump_line(self.indent[level],
o['name'],
delim,
o['value'])
opts.append({'name': 'comment', 'type': 'comment',
'value': val})
continue
if o['type'] == 'comment':
opts.append(o)
continue
if o['type'] == 'empty':
opts.append({'name': 'comment',
'type': 'comment',
'value': ''})
continue
raise SyntaxError('Unknown type: [%s]' % o['type'])
return opts
def mergeOld(self, oldopts, newopts):
opts = []
for o in oldopts:
if o['type'] == "section" or o['type'] == "subsection":
_num, no = self.findOpts(newopts, o['type'], o['name'])
if not no:
opts.append(o)
continue
if no['action'] == "set":
mo = self.mergeOld(o['value'], no['value'])
opts.append({'name': o['name'],
'type': o['type'],
'value': mo})
continue
if no['action'] == "comment":
co = self.commentOpts(o['value'])
for c in co:
opts.append(c)
continue
if no['action'] == "remove":
continue
raise SyntaxError('Unknown action: [%s]' % no['action'])
if o['type'] == "comment" or o['type'] == "empty":
opts.append(o)
continue
if o['type'] == "option":
_num, no = self.findOpts(newopts, 'option', o['name'], True)
if not no:
opts.append(o)
continue
if no['action'] == 'comment' or no['action'] == 'remove':
if (no['value'] is not None and
o['value'] is not no['value']):
opts.append(o)
continue
if no['action'] == 'comment':
value = self._dump_line(self.dcomment,
o['name'],
self.dassign,
o['value'])
opts.append({'name': 'comment',
'type': 'comment',
'value': value})
continue
if no['action'] == 'set':
opts.append(no)
continue
if no['action'] == 'addifnotset':
opts.append({
'name': 'comment',
'type': 'comment',
'value': self._dump_line(
' ', no['name'], ' modified by IPA'
),
})
opts.append({'name': 'comment', 'type': 'comment',
'value': self._dump_line(no['name'],
self.dassign,
no['value'],
)})
opts.append(o)
continue
raise SyntaxError('Unknown action: [%s]' % no['action'])
raise SyntaxError('Unknown type: [%s]' % o['type'])
return opts
def mergeNew(self, opts, newopts):
cline = 0
for no in newopts:
if no['type'] == "section" or no['type'] == "subsection":
(num, o) = self.findOpts(opts, no['type'], no['name'])
if not o:
if no['action'] == 'set':
opts.append(no)
continue
if no['action'] == "set":
self.mergeNew(o['value'], no['value'])
continue
cline = num + 1
continue
if no['type'] == "option":
(num, o) = self.findOpts(opts, no['type'], no['name'], True)
if not o:
if no['action'] == 'set' or no['action'] == 'addifnotset':
opts.append(no)
continue
cline = num + 1
continue
if no['type'] == "comment" or no['type'] == "empty":
opts.insert(cline, no)
cline += 1
continue
raise SyntaxError('Unknown type: [%s]' % no['type'])
def merge(self, oldopts, newopts):
"""
Uses a two pass strategy:
First we create a new opts tree from oldopts removing/commenting
the options as indicated by the contents of newopts
Second we fill in the new opts tree with options as indicated
in the newopts tree (this is becaus eentire (sub)sections may
in the newopts tree (this is becaus entire (sub)sections may
exist in the newopts that do not exist in oldopts)
"""
opts = self.mergeOld(oldopts, newopts)
self.mergeNew(opts, newopts)
return opts
# TODO: Make parse() recursive?
def parse(self, f):
opts = []
sectopts = []
section = None
subsectopts = []
subsection = None
curopts = opts
fatheropts = opts
# Read in the old file.
for line in f:
# It's a section start.
value = self.matchSection(line)
if value:
if section is not None:
opts.append({'name': section,
'type': 'section',
'value': sectopts})
sectopts = []
curopts = sectopts
fatheropts = sectopts
section = value
continue
# It's a subsection start.
value = self.matchSubSection(line)
if value:
if subsection is not None:
raise SyntaxError('nested subsections are not '
'supported yet')
subsectopts = []
curopts = subsectopts
subsection = value
continue
value = self.matchSubSectionEnd(line)
if value:
if subsection is None:
raise SyntaxError('Unmatched end subsection terminator '
'found')
fatheropts.append({'name': subsection,
'type': 'subsection',
'value': subsectopts})
subsection = None
curopts = fatheropts
continue
# Copy anything else as is.
try:
curopts.append(self.parseLine(line))
except SyntaxError as e:
raise SyntaxError('{error} in file {fname}: [{line}]'.format(
error=e, fname=f.name, line=line.rstrip()))
# Add last section if any
if len(sectopts) != 0:
opts.append({'name': section,
'type': 'section',
'value': sectopts})
return opts
def changeConf(self, file, newopts):
"""
Write settings to configuration file
:param file: path to the file
:param options: set of dictionaries in the form:
{'name': 'foo', 'value': 'bar', 'action': 'set/comment'}
:param section: section name like 'global'
"""
output = ""
f = None
try:
# Do not catch an unexisting file error
# we want to fail in that case
shutil.copy2(file, (file + ".ipabkp"))
f = openLocked(file, 0o644)
oldopts = self.parse(f)
options = self.merge(oldopts, newopts)
output = self.dump(options)
# Write it out and close it.
f.seek(0)
f.truncate(0)
f.write(output)
finally:
try:
if f:
f.close()
except IOError:
pass
logger.debug("Updating configuration file %s", file)
logger.debug(output)
return True
def newConf(self, file, options, file_perms=0o644):
""""
Write settings to a new file, backup the old
:param file: path to the file
:param options: a set of dictionaries in the form:
{'name': 'foo', 'value': 'bar', 'action': 'set/comment'}
:param file_perms: number defining the new file's permissions
"""
output = ""
f = None
try:
try:
shutil.copy2(file, (file + ".ipabkp"))
except IOError as err:
if err.errno == 2:
# The orign file did not exist
pass
f = openLocked(file, file_perms)
# Trunkate
f.seek(0)
f.truncate(0)
output = self.dump(options)
f.write(output)
finally:
try:
if f:
f.close()
except IOError:
pass
logger.debug("Writing configuration file %s", file)
logger.debug(output)
return True
@staticmethod
def setOption(name, value):
return {'name': name,
'type': 'option',
'action': 'set',
'value': value}
@staticmethod
def rmOption(name):
return {'name': name,
'type': 'option',
'action': 'remove',
'value': None}
@staticmethod
def setSection(name, options):
return {'name': name,
'type': 'section',
'action': 'set',
'value': options}
@staticmethod
def emptyLine():
return {'name': 'empty',
'type': 'empty'}

View File

@@ -31,6 +31,10 @@ import os
import pwd
import warnings
# pylint: disable=import-error
from six.moves.urllib.parse import urlparse
# pylint: enable=import-error
from cryptography import x509 as crypto_x509
from cryptography.hazmat.primitives import serialization
@@ -38,14 +42,12 @@ import ldap
import ldap.sasl
import ldap.filter
from ldap.controls import SimplePagedResultsControl, GetEffectiveRightsControl
import ldapurl
import six
# pylint: disable=ipa-forbidden-import
from ipalib import errors, x509, _
from ipalib.constants import LDAP_GENERALIZED_TIME_FORMAT
# pylint: enable=ipa-forbidden-import
from ipaplatform.paths import paths
from ipapython.ipautil import format_netloc, CIDict
from ipapython.dn import DN
from ipapython.dnsutil import DNSName
@@ -91,18 +93,6 @@ if six.PY2 and hasattr(ldap, 'LDAPBytesWarning'):
)
def realm_to_serverid(realm_name):
"""Convert Kerberos realm name to 389-DS server id"""
return "-".join(realm_name.split("."))
def realm_to_ldapi_uri(realm_name):
"""Get ldapi:// URI to 389-DS's Unix socket"""
serverid = realm_to_serverid(realm_name)
socketname = paths.SLAPD_INSTANCE_SOCKET_TEMPLATE % (serverid,)
return 'ldapi://' + ldapurl.ldapUrlEscape(socketname)
def ldap_initialize(uri, cacertfile=None):
"""Wrapper around ldap.initialize()
@@ -137,7 +127,7 @@ def ldap_initialize(uri, cacertfile=None):
return conn
class _ServerSchema:
class _ServerSchema(object):
'''
Properties of a schema retrieved from an LDAP server.
'''
@@ -148,7 +138,7 @@ class _ServerSchema:
self.retrieve_timestamp = time.time()
class SchemaCache:
class SchemaCache(object):
'''
Cache the schema's from individual LDAP servers.
'''
@@ -383,7 +373,7 @@ class LDAPEntry(MutableMapping):
self._not_list.discard(name)
def _attr_name(self, name):
if not isinstance(name, str):
if not isinstance(name, six.string_types):
raise TypeError(
"attribute name must be unicode or str, got %s object %r" % (
name.__class__.__name__, name))
@@ -649,7 +639,7 @@ class SingleValueLDAPEntryView(LDAPEntryView):
self._entry[name] = [value]
class LDAPClient:
class LDAPClient(object):
"""LDAP backend class
This class abstracts a LDAP connection, providing methods that work with
@@ -752,8 +742,6 @@ class LDAPClient:
'nsslapd-idlistscanlimit': True,
'nsslapd-anonlimitsdn': True,
'nsslapd-minssf-exclude-rootdse': True,
'nsslapd-enable-upgrade-hash': True,
'nsslapd-db-locks': True,
})
time_limit = -1.0 # unlimited
@@ -779,9 +767,15 @@ class LDAPClient:
syntax.
"""
if ldap_uri is not None:
# special case for ldap2 server plugin
self.ldap_uri = ldap_uri
assert self.protocol in {'ldaps', 'ldapi', 'ldap'}
self.host = 'localhost'
self.port = None
url_data = urlparse(ldap_uri)
self._protocol = url_data.scheme
if self._protocol in ('ldap', 'ldaps'):
self.host = url_data.hostname
self.port = url_data.port
self._start_tls = start_tls
self._force_schema_updates = force_schema_updates
self._no_schema = no_schema
@@ -792,50 +786,7 @@ class LDAPClient:
self._has_schema = False
self._schema = None
if ldap_uri is not None:
self._conn = self._connect()
@classmethod
def from_realm(cls, realm_name, **kwargs):
"""Create a LDAPI connection to local 389-DS instance
"""
uri = realm_to_ldapi_uri(realm_name)
return cls(uri, start_tls=False, cacert=None, **kwargs)
@classmethod
def from_hostname_secure(cls, hostname, cacert=paths.IPA_CA_CRT,
start_tls=True, **kwargs):
"""Create LDAP or LDAPS connection to a remote 389-DS instance
This constructor is opinionated and doesn't let you shoot yourself in
the foot. It always creates a secure connection. By default it
returns a LDAP connection to port 389 and performs STARTTLS using the
default CA cert. With start_tls=False, it creates a LDAPS connection
to port 636 instead.
Note: Microsoft AD does not support SASL encryption and integrity
verification with a TLS connection. For AD, use a plain connection
with GSSAPI and a MIN_SSF >= 56. SASL GSSAPI and SASL GSS SPNEGO
ensure data integrity and confidentiality with SSF > 1. Also see
https://msdn.microsoft.com/en-us/library/cc223500.aspx
"""
if start_tls:
uri = 'ldap://%s' % format_netloc(hostname, 389)
else:
uri = 'ldaps://%s' % format_netloc(hostname, 636)
return cls(uri, start_tls=start_tls, cacert=cacert, **kwargs)
@classmethod
def from_hostname_plain(cls, hostname, **kwargs):
"""Create a plain LDAP connection with TLS/SSL
Note: A plain TLS connection should only be used in combination with
GSSAPI bind.
"""
assert 'start_tls' not in kwargs
assert 'cacert' not in kwargs
uri = 'ldap://%s' % format_netloc(hostname, 389)
return cls(uri, **kwargs)
self._conn = self._connect()
def __str__(self):
return self.ldap_uri
@@ -851,13 +802,6 @@ class LDAPClient:
def conn(self):
return self._conn
@property
def protocol(self):
if self.ldap_uri:
return self.ldap_uri.split('://', 1)[0]
else:
return None
def _get_schema(self):
if self._no_schema:
return None
@@ -969,8 +913,9 @@ class LDAPClient:
return b'TRUE'
else:
return b'FALSE'
elif isinstance(val, (unicode, int, Decimal, DN, Principal)):
return str(val).encode('utf-8')
elif isinstance(val, (unicode, six.integer_types, Decimal, DN,
Principal)):
return six.text_type(val).encode('utf-8')
elif isinstance(val, DNSName):
return val.to_text().encode('ascii')
elif isinstance(val, bytes):
@@ -1201,22 +1146,16 @@ class LDAPClient:
if not self._sasl_nocanon:
conn.set_option(ldap.OPT_X_SASL_NOCANON, ldap.OPT_OFF)
if self._start_tls and self.protocol == 'ldap':
# STARTTLS applies only to ldap:// connections
if self._start_tls:
conn.start_tls_s()
return conn
def simple_bind(self, bind_dn, bind_password, server_controls=None,
client_controls=None, insecure_bind=False):
client_controls=None):
"""
Perform simple bind operation.
"""
if (self.protocol == 'ldap' and not self._start_tls and
bind_password and not insecure_bind):
# non-empty bind must use a secure connection unless
# insecure bind is explicitly enabled
raise ValueError('simple_bind over insecure LDAP connection')
with self.error_handler():
self._flush_schema()
assert isinstance(bind_dn, DN)
@@ -1241,7 +1180,7 @@ class LDAPClient:
Perform SASL bind operation using the SASL GSSAPI mechanism.
"""
with self.error_handler():
if self.protocol == 'ldapi':
if self._protocol == 'ldapi':
auth_tokens = SASL_GSS_SPNEGO
else:
auth_tokens = SASL_GSSAPI
@@ -1363,7 +1302,7 @@ class LDAPClient:
value = u'\\'.join(
value[i:i+2] for i in six.moves.range(-2, len(value), 2))
else:
value = str(value)
value = six.text_type(value)
value = ldap.filter.escape_filter_chars(value)
if not exact:
@@ -1569,9 +1508,9 @@ class LDAPClient:
str(base_dn), scope, filter, attrs_list,
serverctrls=sctrls, timeout=time_limit,
sizelimit=size_limit)
except ldap.LDAPError as e2:
except ldap.LDAPError as e:
logger.warning(
"Error cancelling paged search: %s", e2)
"Error cancelling paged search: %s", e)
cookie = ''
try:

View File

@@ -1,6 +1,6 @@
Metadata-Version: 2.1
Name: ipapython
Version: 4.8.10
Version: 4.7.2
Summary: FreeIPA python support library
Home-page: https://www.freeipa.org/
Author: FreeIPA Developers
@@ -16,9 +16,10 @@ Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
Classifier: Programming Language :: C
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.6
Classifier: Programming Language :: Python :: 3.7
Classifier: Programming Language :: Python :: Implementation :: CPython
Classifier: Operating System :: POSIX
Classifier: Operating System :: POSIX :: Linux
@@ -26,6 +27,5 @@ Classifier: Operating System :: Unix
Classifier: Topic :: Internet :: Name Service (DNS)
Classifier: Topic :: Security
Classifier: Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP
Requires-Python: >=3.6.0
Provides-Extra: ldap
Provides-Extra: netifaces
Requires-Python: >=2.7.5,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*
Provides-Extra: install

View File

@@ -6,13 +6,11 @@ config.py
cookie.py
directivesetter.py
dn.py
dn_ctypes.py
dnsutil.py
dogtag.py
errors.py
graph.py
ipa_log_manager.py
ipachangeconf.py
ipaldap.py
ipautil.py
ipavalidate.py
@@ -24,7 +22,6 @@ setup.cfg
setup.py
ssh.py
version.py
../COPYING
install/__init__.py
install/cli.py
install/common.py

View File

@@ -2,12 +2,14 @@ cffi
cryptography>=1.6
dnspython>=1.15
gssapi>=1.2.0
ipaplatform==4.8.10
ipaplatform==4.7.2
netaddr
netifaces>=0.10.4
python-ldap>=3.0.0
six
[ldap]
python-ldap
[:python_version<'3']
enum34
[netifaces]
netifaces
[install]
dbus-python

View File

@@ -28,31 +28,28 @@ import random
import math
import os
import sys
import errno
import copy
import shutil
import socket
import re
import datetime
import netaddr
import netifaces
import time
import pwd
import grp
from contextlib import contextmanager
import locale
import collections
import urllib
from dns import resolver, reversename
from dns.exception import DNSException
import six
from six.moves import input
try:
import netifaces
except ImportError:
netifaces = None
from six.moves import urllib
from ipapython.dn import DN
from ipaplatform.paths import paths
logger = logging.getLogger(__name__)
@@ -200,8 +197,6 @@ class CheckedIPAddress(UnsafeIPAddress):
:return: InterfaceDetails named tuple or None if no interface has
this address
"""
if netifaces is None:
raise ImportError("netifaces")
logger.debug("Searching for an interface of IP address: %s", self)
if self.version == 4:
family = netifaces.AF_INET
@@ -307,7 +302,7 @@ def template_str(txt, vars):
# eval() is a special string one can insert into a template to have the
# Python interpreter evaluate the string. This is intended to allow
# math to be performed in templates.
pattern = re.compile(r'(eval\s*\(([^()]*)\))')
pattern = re.compile('(eval\s*\(([^()]*)\))')
val = pattern.sub(lambda x: str(eval(x.group(2))), val)
return val
@@ -388,8 +383,7 @@ class CalledProcessError(subprocess.CalledProcessError):
def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
capture_output=False, skip_output=False, cwd=None,
runas=None, suplementary_groups=[],
capture_error=False, encoding=None, redirect_output=False,
umask=None, nolog_output=False, nolog_error=False):
capture_error=False, encoding=None, redirect_output=False, umask=None):
"""
Execute an external command.
@@ -421,8 +415,6 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
suplementary groups for subporcess.
The option runas must be specified together with this option.
:param capture_error: Capture stderr
:param nolog_output: do not log stdout even if it is being captured
:param nolog_error: do not log stderr even if it is being captured
:param encoding: For Python 3, the encoding to use for output,
error_output, and (if it's not bytes) stdin.
If None, the current encoding according to locale is used.
@@ -455,7 +447,7 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
p_out = None
p_err = None
if isinstance(nolog, str):
if isinstance(nolog, six.string_types):
# We expect a tuple (or list, or other iterable) of nolog strings.
# Passing just a single string is bad: strings are iterable, so this
# would result in every individual character of that string being
@@ -512,18 +504,14 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
for group, gid in zip(suplementary_groups, suplementary_gids):
logger.debug('suplementary_group=%s (GID %d)', group, gid)
if runas is not None or umask is not None:
# preexec function is not supported in WSGI environment
def preexec_fn():
if runas is not None:
os.setgroups(suplementary_gids)
os.setregid(pent.pw_gid, pent.pw_gid)
os.setreuid(pent.pw_uid, pent.pw_uid)
def preexec_fn():
if runas is not None:
os.setgroups(suplementary_gids)
os.setregid(pent.pw_gid, pent.pw_gid)
os.setreuid(pent.pw_uid, pent.pw_uid)
if umask is not None:
os.umask(umask)
else:
preexec_fn = None
if umask:
os.umask(umask)
try:
# pylint: disable=subprocess-popen-preexec-fn
@@ -535,7 +523,7 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
logger.debug('Process interrupted')
p.wait()
raise
except BaseException:
except:
logger.debug('Process execution failed')
raise
finally:
@@ -555,24 +543,15 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
errors='replace')
else:
output_log = stdout
if six.PY3:
error_log = stderr.decode(locale.getpreferredencoding(),
errors='replace')
else:
error_log = stderr
output_log = nolog_replace(output_log, nolog)
if nolog_output:
logger.debug('stdout=<REDACTED>')
else:
logger.debug('stdout=%s', output_log)
logger.debug('stdout=%s', output_log)
error_log = nolog_replace(error_log, nolog)
if nolog_error:
logger.debug('stderr=<REDACTED>')
else:
logger.debug('stderr=%s', error_log)
logger.debug('stderr=%s', error_log)
if capture_output:
if six.PY2:
@@ -606,7 +585,7 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
def nolog_replace(string, nolog):
"""Replace occurences of strings given in `nolog` with XXXXXXXX"""
for value in nolog:
if not value or not isinstance(value, str):
if not value or not isinstance(value, six.string_types):
continue
quoted = urllib.parse.quote(value)
@@ -698,7 +677,7 @@ class CIDict(dict):
if six.PY2:
def has_key(self, key):
# pylint: disable=no-member
return super(CIDict, self).has_key(key.lower()) # noqa
return super(CIDict, self).has_key(key.lower())
# pylint: enable=no-member
def get(self, key, failobj=None):
@@ -975,7 +954,7 @@ def user_input(prompt, default = None, allow_empty = True):
return ''
raise RuntimeError("Failed to get user input")
if isinstance(default, str):
if isinstance(default, six.string_types):
while True:
try:
ret = input("%s [%s]: " % (prompt, default))
@@ -1109,17 +1088,30 @@ def check_port_bindable(port, socket_type=socket.SOCK_STREAM):
s.close()
def config_replace_variables(filepath, replacevars=dict(), appendvars=dict(),
removevars=None):
def reverse_record_exists(ip_address):
"""
Checks if IP address have some reverse record somewhere.
Does not care where it points.
Returns True/False
"""
reverse = reversename.from_address(str(ip_address))
try:
resolver.query(reverse, "PTR")
except DNSException:
# really don't care what exception, PTR is simply unresolvable
return False
return True
def config_replace_variables(filepath, replacevars=dict(), appendvars=dict()):
"""
Take a key=value based configuration file, and write new version
with certain values replaced, appended, or removed.
with certain values replaced or appended
All (key,value) pairs from replacevars and appendvars that were not found
in the configuration file, will be added there.
All entries in set removevars are removed.
It is responsibility of a caller to ensure that replacevars and
appendvars do not overlap.
@@ -1130,7 +1122,7 @@ def config_replace_variables(filepath, replacevars=dict(), appendvars=dict(),
One have to run restore_context(filepath) afterwards or
security context of the file will not be correct after modification
"""
pattern = re.compile(r'''
pattern = re.compile('''
(^
\s*
(?P<option> [^\#;]+?)
@@ -1161,11 +1153,7 @@ $)''', re.VERBOSE)
elif value.find(appendvars[option]) == -1:
new_line = u"%s=%s %s\n" % (option, value, appendvars[option])
old_values[option] = value
if removevars and option in removevars:
old_values[option] = value
new_line = None
if new_line is not None:
new_config.write(new_line)
new_config.write(new_line)
# Now add all options from replacevars and appendvars that were not found in the file
new_vars = replacevars.copy()
new_vars.update(appendvars)
@@ -1205,7 +1193,7 @@ def inifile_replace_variables(filepath, section, replacevars=dict(), appendvars=
One have to run restore_context(filepath) afterwards or
security context of the file will not be correct after modification
"""
pattern = re.compile(r'''
pattern = re.compile('''
(^
\[
(?P<section> .+) \]
@@ -1451,14 +1439,14 @@ if six.PY2:
Decode argument using the file system encoding, as returned by
`sys.getfilesystemencoding()`.
"""
if isinstance(value, bytes):
if isinstance(value, six.binary_type):
return value.decode(sys.getfilesystemencoding())
elif isinstance(value, str):
elif isinstance(value, six.text_type):
return value
else:
raise TypeError("expect {0} or {1}, not {2}".format(
bytes.__name__,
str.__name__,
six.binary_type.__name__,
six.text_type.__name__,
type(value).__name__))
else:
fsdecode = os.fsdecode #pylint: disable=no-member
@@ -1534,7 +1522,7 @@ def decode_json(data):
# default
return 'utf-8'
if isinstance(data, str):
if isinstance(data, six.text_type):
return data
return data.decode(detect_encoding(data), 'surrogatepass')
@@ -1561,7 +1549,7 @@ class APIVersion(tuple):
return "<APIVersion('{}.{}')>".format(*self)
def __getnewargs__(self):
return (str(self),)
return str(self)
@property
def major(self):
@@ -1570,61 +1558,3 @@ class APIVersion(tuple):
@property
def minor(self):
return self[1]
def remove_keytab(keytab_path):
"""
Remove Kerberos keytab and issue a warning if the procedure fails
:param keytab_path: path to the keytab file
"""
try:
logger.debug("Removing service keytab: %s", keytab_path)
os.remove(keytab_path)
except OSError as e:
if e.errno != errno.ENOENT:
logger.warning("Failed to remove Kerberos keytab '%s': %s",
keytab_path, e)
logger.warning("You may have to remove it manually")
def remove_ccache(ccache_path=None, run_as=None):
"""
remove Kerberos credential cache, essentially a wrapper around kdestroy.
:param ccache_path: path to the ccache file
:param run_as: run kdestroy as this user
"""
logger.debug("Removing service credentials cache")
kdestroy_cmd = [paths.KDESTROY]
if ccache_path is not None:
logger.debug("Ccache path: '%s'", ccache_path)
kdestroy_cmd.extend(['-c', ccache_path])
try:
run(kdestroy_cmd, runas=run_as, env={})
except CalledProcessError as e:
logger.warning(
"Failed to clear Kerberos credentials cache: %s", e)
def remove_file(filename):
"""Remove a file and log any exceptions raised.
"""
try:
os.unlink(filename)
except Exception as e:
# ignore missing file
if getattr(e, 'errno', None) != errno.ENOENT:
logger.error('Error removing %s: %s', filename, str(e))
def rmtree(path):
"""
Remove a directory structure and log any exceptions raised.
"""
try:
if os.path.exists(path):
shutil.rmtree(path)
except Exception as e:
logger.error('Error removing %s: %s', path, str(e))

View File

@@ -61,17 +61,17 @@ def split_principal_name(principal_name):
@six.python_2_unicode_compatible
class Principal:
class Principal(object):
"""
Container for the principal name and realm according to RFC 1510
"""
def __init__(self, components, realm=None):
if isinstance(components, bytes):
if isinstance(components, six.binary_type):
raise TypeError(
"Cannot create a principal object from bytes: {!r}".format(
components)
)
elif isinstance(components, str):
elif isinstance(components, six.string_types):
# parse principal components from realm
self.components, self.realm = self._parse_from_text(
components, realm)

View File

@@ -20,10 +20,10 @@
from __future__ import absolute_import
import os
import six
from ipapython.ipautil import run
from ipaplatform.paths import paths
from ipaplatform.tasks import tasks
# NOTE: Absolute path not required for keyctl since we reset the environment
# in ipautil.run.
@@ -51,7 +51,7 @@ def get_real_key(key):
One cannot request a key based on the description it was created with
so find the one we're looking for.
"""
assert isinstance(key, str)
assert isinstance(key, six.string_types)
result = run([paths.KEYCTL, 'search', KEYRING, KEYTYPE, key],
raiseonerr=False, capture_output=True)
if result.returncode:
@@ -60,13 +60,7 @@ def get_real_key(key):
def get_persistent_key(key):
"""
Fetches the value of a persistent key from storage, trimming trailing
any tailing whitespace.
Assert when key is not a string-type.
"""
assert isinstance(key, str)
assert isinstance(key, six.string_types)
result = run([paths.KEYCTL, 'get_persistent', KEYRING, key],
raiseonerr=False, capture_output=True)
if result.returncode:
@@ -74,14 +68,7 @@ def get_persistent_key(key):
return result.raw_output.rstrip()
def is_persistent_keyring_supported(check_container=True):
"""Returns True if the kernel persistent keyring is supported.
If check_container is True and a containerized environment is detected,
return False. There is no support for keyring namespace isolation yet.
"""
if check_container and tasks.detect_container() is not None:
return False
def is_persistent_keyring_supported():
uid = os.geteuid()
try:
get_persistent_key(str(uid))
@@ -95,7 +82,7 @@ def has_key(key):
"""
Returns True/False whether the key exists in the keyring.
"""
assert isinstance(key, str)
assert isinstance(key, six.string_types)
try:
get_real_key(key)
return True
@@ -109,7 +96,7 @@ def read_key(key):
Use pipe instead of print here to ensure we always get the raw data.
"""
assert isinstance(key, str)
assert isinstance(key, six.string_types)
real_key = get_real_key(key)
result = run([paths.KEYCTL, 'pipe', real_key], raiseonerr=False,
capture_output=True)
@@ -123,7 +110,7 @@ def update_key(key, value):
"""
Update the keyring data. If they key doesn't exist it is created.
"""
assert isinstance(key, str)
assert isinstance(key, six.string_types)
assert isinstance(value, bytes)
if has_key(key):
real_key = get_real_key(key)
@@ -139,7 +126,7 @@ def add_key(key, value):
"""
Add a key to the kernel keyring.
"""
assert isinstance(key, str)
assert isinstance(key, six.string_types)
assert isinstance(value, bytes)
if has_key(key):
raise ValueError('key %s already exists' % key)
@@ -153,7 +140,7 @@ def del_key(key):
"""
Remove a key from the keyring
"""
assert isinstance(key, str)
assert isinstance(key, six.string_types)
real_key = get_real_key(key)
result = run([paths.KEYCTL, 'unlink', real_key, KEYRING],
raiseonerr=False)

View File

@@ -43,11 +43,12 @@ if __name__ == '__main__':
# "ipalib", # circular dependency
"ipaplatform",
"netaddr",
"netifaces",
"python-ldap",
"six",
],
extras_require={
"ldap": ["python-ldap"], # ipapython.ipaldap
# CheckedIPAddress.get_matching_interface
"netifaces": ["netifaces"],
":python_version<'3'": ["enum34"],
"install": ["dbus-python"], # for certmonger
},
)

View File

@@ -38,8 +38,7 @@ __all__ = ['SSHPublicKey']
OPENSSH_BASE_REGEX = re.compile(r'^[\t ]*(?P<keytype>[^\x00\n\r]+?) [\t ]*(?P<key>[^\x00\n\r]+?)(?:[\t ]+(?P<comment>[^\x00\n\r]*?)[\t ]*)?$')
OPENSSH_OPTIONS_REGEX = re.compile(r'(?P<name>[-0-9A-Za-z]+)(?:="(?P<value>(?:\\"|[^\x00\n\r"])*)")?')
class SSHPublicKey:
class SSHPublicKey(object):
"""
SSH public key object.
"""

View File

@@ -18,10 +18,10 @@
#
# The full version including strings
VERSION = "4.8.10"
VERSION="4.7.2"
# A fuller version including the vendor tag (e.g. 3.3.3-34.fc20)
VENDOR_VERSION = "4.8.10"
VENDOR_VERSION="4.7.2"
# Just the numeric portion of the version so one can do direct numeric
@@ -41,11 +41,11 @@ VENDOR_VERSION = "4.8.10"
# IPA 3.2.1: NUM_VERSION=30201
# IPA 3.2.99: NUM_VERSION=30299 (development version)
# IPA 3.3.0: NUM_VERSION=30300
NUM_VERSION = 40810
NUM_VERSION=40702
# The version of the API.
API_VERSION = "2.239"
API_VERSION=u'2.230'
DEFAULT_PLUGINS = frozenset(l.strip() for l in """
@@ -246,13 +246,11 @@ env/1
group/1
group_add/1
group_add_member/1
group_add_member_manager/1
group_del/1
group_detach/1
group_find/1
group_mod/1
group_remove_member/1
group_remove_member_manager/1
group_show/1
hbacrule/1
hbacrule_add/1
@@ -305,12 +303,10 @@ host_show/1
hostgroup/1
hostgroup_add/1
hostgroup_add_member/1
hostgroup_add_member_manager/1
hostgroup_del/1
hostgroup_find/1
hostgroup_mod/1
hostgroup_remove_member/1
hostgroup_remove_member_manager/1
hostgroup_show/1
i18n_messages/1
idoverridegroup/1
@@ -458,13 +454,11 @@ server_role/1
server_role_find/1
server_role_show/1
server_show/1
server_state/1
service/1
service_add/1
service_add_cert/1
service_add_host/1
service_add_principal/1
service_add_smb/1
service_allow_create_keytab/1
service_allow_retrieve_keytab/1
service_del/1
@@ -567,7 +561,6 @@ topologysuffix_verify/1
trust/1
trust_add/1
trust_del/1
trust_enable_agent/1
trust_fetch_domains/1
trust_find/1
trust_mod/1

View File

@@ -18,10 +18,10 @@
#
# The full version including strings
VERSION = "@VERSION@"
VERSION="@VERSION@"
# A fuller version including the vendor tag (e.g. 3.3.3-34.fc20)
VENDOR_VERSION = "@VERSION@@VENDOR_SUFFIX@"
VENDOR_VERSION="@VERSION@@VENDOR_SUFFIX@"
# Just the numeric portion of the version so one can do direct numeric
@@ -41,11 +41,11 @@ VENDOR_VERSION = "@VERSION@@VENDOR_SUFFIX@"
# IPA 3.2.1: NUM_VERSION=30201
# IPA 3.2.99: NUM_VERSION=30299 (development version)
# IPA 3.3.0: NUM_VERSION=30300
NUM_VERSION = @NUM_VERSION@
NUM_VERSION=@NUM_VERSION@
# The version of the API.
API_VERSION = "@API_VERSION@"
API_VERSION=u'@API_VERSION@'
DEFAULT_PLUGINS = frozenset(l.strip() for l in """