Imported Debian patch 4.0.5-6~numeezy

This commit is contained in:
Alexandre Ellert
2016-02-17 15:07:45 +01:00
committed by Mario Fetka
parent c44de33144
commit 10dfc9587b
1203 changed files with 53869 additions and 241462 deletions

View File

@@ -1,26 +1,27 @@
PYTHON ?= /usr/bin/python2
PYTHONLIBDIR ?= $(shell $(PYTHON) -c "from distutils.sysconfig import *; print(get_python_lib())")
PYTHONLIBDIR ?= $(shell python2 -c "from distutils.sysconfig import *; print get_python_lib()")
PACKAGEDIR ?= $(DESTDIR)/$(PYTHONLIBDIR)/ipa
CONFIGDIR ?= $(DESTDIR)/etc/ipa
TESTS = $(wildcard test/*.py)
SUBDIRS = py_default_encoding
all:
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
check:
.PHONY: install
install:
if [ "$(DESTDIR)" = "" ]; then \
$(PYTHON) setup.py install; \
python2 setup.py install; \
else \
$(PYTHON) setup.py install --root $(DESTDIR); \
python2 setup.py install --root $(DESTDIR); \
fi
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
clean:
rm -f *~ *.pyc __pycache__/
rm -f *~ *.pyc
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
@@ -36,3 +37,9 @@ maintainer-clean: distclean
@for subdir in $(SUBDIRS); do \
(cd $$subdir && $(MAKE) $@) || exit 1; \
done
.PHONY: test
test: $(subst .py,.tst,$(TESTS))
%.tst: %.py
python2 $<

Binary file not shown.

View File

@@ -32,7 +32,7 @@ from ipapython import config
from ipapython import ipa_log_manager
class ScriptError(Exception):
class ScriptError(StandardError):
"""An exception that records an error message and a return value
"""
def __init__(self, msg='', rval=1):
@@ -169,7 +169,7 @@ class AdminTool(object):
self.ask_for_options()
self.setup_logging()
return_value = self.run()
except BaseException as exception:
except BaseException, exception:
traceback = sys.exc_info()[2]
error_message, return_value = self.handle_error(exception)
if return_value:
@@ -291,10 +291,6 @@ class AdminTool(object):
self.command_name, type(exception).__name__, exception)
if error_message:
self.log.error(error_message)
message = "The %s command failed." % self.command_name
if self.log_file_name:
message += " See %s for more information" % self.log_file_name
self.log.error(message)
def log_success(self):
self.log.info('The %s command was successful', self.command_name)

View File

@@ -17,530 +17,6 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import re
import tempfile
import shutil
import base64
from nss import nss
from nss.error import NSPRError
from ipaplatform.paths import paths
from ipapython.ipa_log_manager import root_logger
from ipapython import ipautil
from ipalib import x509
CA_NICKNAME_FMT = "%s IPA CA"
def get_ca_nickname(realm, format=CA_NICKNAME_FMT):
return format % realm
def create_ipa_nssdb():
db = NSSDatabase(paths.IPA_NSSDB_DIR)
pwdfile = os.path.join(db.secdir, 'pwdfile.txt')
ipautil.backup_file(pwdfile)
ipautil.backup_file(os.path.join(db.secdir, 'cert8.db'))
ipautil.backup_file(os.path.join(db.secdir, 'key3.db'))
ipautil.backup_file(os.path.join(db.secdir, 'secmod.db'))
with open(pwdfile, 'w') as f:
f.write(ipautil.ipa_generate_password(pwd_len=40))
os.chmod(pwdfile, 0o600)
db.create_db(pwdfile)
os.chmod(os.path.join(db.secdir, 'cert8.db'), 0o644)
os.chmod(os.path.join(db.secdir, 'key3.db'), 0o644)
os.chmod(os.path.join(db.secdir, 'secmod.db'), 0o644)
def update_ipa_nssdb():
ipa_db = NSSDatabase(paths.IPA_NSSDB_DIR)
sys_db = NSSDatabase(paths.NSS_DB_DIR)
if not os.path.exists(os.path.join(ipa_db.secdir, 'cert8.db')):
create_ipa_nssdb()
for nickname, trust_flags in (('IPA CA', 'CT,C,C'),
('External CA cert', 'C,,')):
try:
cert = sys_db.get_cert(nickname)
except RuntimeError:
continue
try:
ipa_db.add_cert(cert, nickname, trust_flags)
except ipautil.CalledProcessError as e:
raise RuntimeError("Failed to add %s to %s: %s" %
(nickname, ipa_db.secdir, e))
# Remove IPA certs from /etc/pki/nssdb
for nickname, trust_flags in ipa_db.list_certs():
while sys_db.has_nickname(nickname):
try:
sys_db.delete_cert(nickname)
except ipautil.CalledProcessError as e:
raise RuntimeError("Failed to remove %s from %s: %s" %
(nickname, sys_db.secdir, e))
def find_cert_from_txt(cert, start=0):
"""
Given a cert blob (str) which may or may not contian leading and
trailing text, pull out just the certificate part. This will return
the FIRST cert in a stream of data.
Returns a tuple (certificate, last position in cert)
"""
s = cert.find('-----BEGIN CERTIFICATE-----', start)
e = cert.find('-----END CERTIFICATE-----', s)
if e > 0:
e = e + 25
if s < 0 or e < 0:
raise RuntimeError("Unable to find certificate")
cert = cert[s:e]
return (cert, e)
class NSSDatabase(object):
"""A general-purpose wrapper around a NSS cert database
For permanent NSS databases, pass the cert DB directory to __init__
For temporary databases, do not pass nssdir, and call close() when done
to remove the DB. Alternatively, a NSSDatabase can be used as a
context manager that calls close() automatically.
"""
# Traditionally, we used CertDB for our NSS DB operations, but that class
# got too tied to IPA server details, killing reusability.
# BaseCertDB is a class that knows nothing about IPA.
# Generic NSS DB code should be moved here.
def __init__(self, nssdir=None):
if nssdir is None:
self.secdir = tempfile.mkdtemp()
self._is_temporary = True
else:
self.secdir = nssdir
self._is_temporary = False
def close(self):
if self._is_temporary:
shutil.rmtree(self.secdir)
def __enter__(self):
return self
def __exit__(self, type, value, tb):
self.close()
def run_certutil(self, args, stdin=None, **kwargs):
new_args = [paths.CERTUTIL, "-d", self.secdir]
new_args = new_args + args
return ipautil.run(new_args, stdin, **kwargs)
def create_db(self, password_filename):
"""Create cert DB
:param password_filename: Name of file containing the database password
"""
self.run_certutil(["-N", "-f", password_filename])
def list_certs(self):
"""Return nicknames and cert flags for all certs in the database
:return: List of (name, trust_flags) tuples
"""
result = self.run_certutil(["-L"], capture_output=True)
certs = result.output.splitlines()
# FIXME, this relies on NSS never changing the formatting of certutil
certlist = []
for cert in certs:
match = re.match(r'^(.+?)\s+(\w*,\w*,\w*)\s*$', cert)
if match:
certlist.append(match.groups())
return tuple(certlist)
def find_server_certs(self):
"""Return nicknames and cert flags for server certs in the database
Server certs have an "u" character in the trust flags.
:return: List of (name, trust_flags) tuples
"""
server_certs = []
for name, flags in self.list_certs():
if 'u' in flags:
server_certs.append((name, flags))
return server_certs
def get_trust_chain(self, nickname):
"""Return names of certs in a given cert's trust chain
:param nickname: Name of the cert
:return: List of certificate names
"""
root_nicknames = []
result = self.run_certutil(["-O", "-n", nickname], capture_output=True)
chain = result.output.splitlines()
for c in chain:
m = re.match('\s*"(.*)" \[.*', c)
if m:
root_nicknames.append(m.groups()[0])
return root_nicknames
def import_pkcs12(self, pkcs12_filename, db_password_filename,
pkcs12_passwd=None):
args = [paths.PK12UTIL, "-d", self.secdir,
"-i", pkcs12_filename,
"-k", db_password_filename, '-v']
if pkcs12_passwd is not None:
pkcs12_passwd = pkcs12_passwd + '\n'
args = args + ["-w", paths.DEV_STDIN]
try:
ipautil.run(args, stdin=pkcs12_passwd)
except ipautil.CalledProcessError as e:
if e.returncode == 17:
raise RuntimeError("incorrect password for pkcs#12 file %s" %
pkcs12_filename)
elif e.returncode == 10:
raise RuntimeError("Failed to open %s" % pkcs12_filename)
else:
raise RuntimeError("unknown error import pkcs#12 file %s" %
pkcs12_filename)
def import_files(self, files, db_password_filename, import_keys=False,
key_password=None, key_nickname=None):
"""
Import certificates and a single private key from multiple files
The files may be in PEM and DER certificate, PKCS#7 certificate chain,
PKCS#8 and raw private key and PKCS#12 formats.
:param files: Names of files to import
:param db_password_filename: Name of file containing the database
password
:param import_keys: Whether to import private keys
:param key_password: Password to decrypt private keys
:param key_nickname: Nickname of the private key to import from PKCS#12
files
"""
key_file = None
extracted_key = None
extracted_certs = ''
for filename in files:
try:
with open(filename, 'rb') as f:
data = f.read()
except IOError as e:
raise RuntimeError(
"Failed to open %s: %s" % (filename, e.strerror))
# Try to parse the file as PEM file
matches = list(re.finditer(
r'-----BEGIN (.+?)-----(.*?)-----END \1-----', data, re.DOTALL))
if matches:
loaded = False
for match in matches:
body = match.group()
label = match.group(1)
line = len(data[:match.start() + 1].splitlines())
if label in ('CERTIFICATE', 'X509 CERTIFICATE',
'X.509 CERTIFICATE'):
try:
x509.load_certificate(match.group(2))
except NSPRError as e:
if label != 'CERTIFICATE':
root_logger.warning(
"Skipping certificate in %s at line %s: %s",
filename, line, e)
continue
else:
extracted_certs += body + '\n'
loaded = True
continue
if label in ('PKCS7', 'PKCS #7 SIGNED DATA', 'CERTIFICATE'):
args = [
paths.OPENSSL, 'pkcs7',
'-print_certs',
]
try:
result = ipautil.run(
args, stdin=body, capture_output=True)
except ipautil.CalledProcessError as e:
if label == 'CERTIFICATE':
root_logger.warning(
"Skipping certificate in %s at line %s: %s",
filename, line, e)
else:
root_logger.warning(
"Skipping PKCS#7 in %s at line %s: %s",
filename, line, e)
continue
else:
extracted_certs += result.output + '\n'
loaded = True
continue
if label in ('PRIVATE KEY', 'ENCRYPTED PRIVATE KEY',
'RSA PRIVATE KEY', 'DSA PRIVATE KEY',
'EC PRIVATE KEY'):
if not import_keys:
continue
if key_file:
raise RuntimeError(
"Can't load private key from both %s and %s" %
(key_file, filename))
args = [
paths.OPENSSL, 'pkcs8',
'-topk8',
'-passout', 'file:' + db_password_filename,
]
if ((label != 'PRIVATE KEY' and key_password) or
label == 'ENCRYPTED PRIVATE KEY'):
key_pwdfile = ipautil.write_tmp_file(key_password)
args += [
'-passin', 'file:' + key_pwdfile.name,
]
try:
result = ipautil.run(
args, stdin=body, capture_output=True)
except ipautil.CalledProcessError as e:
root_logger.warning(
"Skipping private key in %s at line %s: %s",
filename, line, e)
continue
else:
extracted_key = result.output
key_file = filename
loaded = True
continue
if loaded:
continue
raise RuntimeError("Failed to load %s" % filename)
# Try to load the file as DER certificate
try:
x509.load_certificate(data, x509.DER)
except NSPRError:
pass
else:
data = x509.make_pem(base64.b64encode(data))
extracted_certs += data + '\n'
continue
# Try to import the file as PKCS#12 file
if import_keys:
try:
self.import_pkcs12(
filename, db_password_filename, key_password)
except RuntimeError:
pass
else:
if key_file:
raise RuntimeError(
"Can't load private key from both %s and %s" %
(key_file, filename))
key_file = filename
server_certs = self.find_server_certs()
if key_nickname:
for nickname, trust_flags in server_certs:
if nickname == key_nickname:
break
else:
raise RuntimeError(
"Server certificate \"%s\" not found in %s" %
(key_nickname, filename))
else:
if len(server_certs) > 1:
raise RuntimeError(
"%s server certificates found in %s, "
"expecting only one" %
(len(server_certs), filename))
continue
raise RuntimeError("Failed to load %s" % filename)
if import_keys and not key_file:
raise RuntimeError(
"No server certificates found in %s" % (', '.join(files)))
nss_certs = x509.load_certificate_list(extracted_certs)
nss_cert = None
for nss_cert in nss_certs:
nickname = str(nss_cert.subject)
self.add_cert(nss_cert.der_data, nickname, ',,')
del nss_certs, nss_cert
if extracted_key:
in_file = ipautil.write_tmp_file(extracted_certs + extracted_key)
out_file = tempfile.NamedTemporaryFile()
out_password = ipautil.ipa_generate_password()
out_pwdfile = ipautil.write_tmp_file(out_password)
args = [
paths.OPENSSL, 'pkcs12',
'-export',
'-in', in_file.name,
'-out', out_file.name,
'-passin', 'file:' + db_password_filename,
'-passout', 'file:' + out_pwdfile.name,
]
try:
ipautil.run(args)
except ipautil.CalledProcessError as e:
raise RuntimeError(
"No matching certificate found for private key from %s" %
key_file)
self.import_pkcs12(out_file.name, db_password_filename,
out_password)
def trust_root_cert(self, root_nickname, trust_flags=None):
if root_nickname[:7] == "Builtin":
root_logger.debug(
"No need to add trust for built-in root CAs, skipping %s" %
root_nickname)
else:
if trust_flags is None:
trust_flags = 'C,,'
try:
self.run_certutil(["-M", "-n", root_nickname,
"-t", trust_flags])
except ipautil.CalledProcessError as e:
raise RuntimeError(
"Setting trust on %s failed" % root_nickname)
def get_cert(self, nickname, pem=False):
args = ['-L', '-n', nickname, '-a']
try:
result = self.run_certutil(args, capture_output=True)
except ipautil.CalledProcessError:
raise RuntimeError("Failed to get %s" % nickname)
cert = result.output
if not pem:
(cert, start) = find_cert_from_txt(cert, start=0)
cert = x509.strip_header(cert)
cert = base64.b64decode(cert)
return cert
def has_nickname(self, nickname):
try:
self.get_cert(nickname)
except RuntimeError:
# This might be error other than "nickname not found". Beware.
return False
else:
return True
def export_pem_cert(self, nickname, location):
"""Export the given cert to PEM file in the given location"""
cert = self.get_cert(nickname, pem=True)
with open(location, "w+") as fd:
fd.write(cert)
os.chmod(location, 0o444)
def import_pem_cert(self, nickname, flags, location):
"""Import a cert form the given PEM file.
The file must contain exactly one certificate.
"""
try:
with open(location) as fd:
certs = fd.read()
except IOError as e:
raise RuntimeError(
"Failed to open %s: %s" % (location, e.strerror)
)
cert, st = find_cert_from_txt(certs)
self.add_cert(cert, nickname, flags, pem=True)
try:
find_cert_from_txt(certs, st)
except RuntimeError:
pass
else:
raise ValueError('%s contains more than one certificate' %
location)
def add_cert(self, cert, nick, flags, pem=False):
args = ["-A", "-n", nick, "-t", flags]
if pem:
args.append("-a")
self.run_certutil(args, stdin=cert)
def delete_cert(self, nick):
self.run_certutil(["-D", "-n", nick])
def verify_server_cert_validity(self, nickname, hostname):
"""Verify a certificate is valid for a SSL server with given hostname
Raises a ValueError if the certificate is invalid.
"""
certdb = cert = None
if nss.nss_is_initialized():
nss.nss_shutdown()
nss.nss_init(self.secdir)
try:
certdb = nss.get_default_certdb()
cert = nss.find_cert_from_nickname(nickname)
intended_usage = nss.certificateUsageSSLServer
try:
approved_usage = cert.verify_now(certdb, True, intended_usage)
except NSPRError as e:
if e.errno != -8102:
raise ValueError(e.strerror)
approved_usage = 0
if not approved_usage & intended_usage:
raise ValueError('invalid for a SSL server')
if not cert.verify_hostname(hostname):
raise ValueError('invalid for server %s' % hostname)
finally:
del certdb, cert
nss.nss_shutdown()
return None
def verify_ca_cert_validity(self, nickname):
certdb = cert = None
if nss.nss_is_initialized():
nss.nss_shutdown()
nss.nss_init(self.secdir)
try:
certdb = nss.get_default_certdb()
cert = nss.find_cert_from_nickname(nickname)
if not cert.subject:
raise ValueError("has empty subject")
try:
bc = cert.get_extension(nss.SEC_OID_X509_BASIC_CONSTRAINTS)
except KeyError:
raise ValueError("missing basic constraints")
bc = nss.BasicConstraints(bc.value)
if not bc.is_ca:
raise ValueError("not a CA certificate")
intended_usage = nss.certificateUsageSSLCA
try:
approved_usage = cert.verify_now(certdb, True, intended_usage)
except NSPRError as e:
if e.errno != -8102: # SEC_ERROR_INADEQUATE_KEY_USAGE
raise ValueError(e.strerror)
approved_usage = 0
if approved_usage & intended_usage != intended_usage:
raise ValueError('invalid for a CA')
finally:
del certdb, cert
nss.nss_shutdown()

View File

@@ -22,22 +22,22 @@
# This is used so we can add tracking to the Apache and 389-ds
# server certificates created during the IPA server installation.
from __future__ import print_function
import os
import sys
import time
import dbus
import shlex
import subprocess
import tempfile
from ipapython import ipautil
from ipapython.ipa_log_manager import *
from ipapython import dogtag
from ipaplatform.paths import paths
from ipaplatform import services
from ipapython.ipa_log_manager import root_logger
REQUEST_DIR = paths.CERTMONGER_REQUESTS_DIR
CA_DIR = paths.CERTMONGER_CAS_DIR
DBUS_CM_PATH = '/org/fedorahosted/certmonger'
DBUS_CM_IF = 'org.fedorahosted.certmonger'
DBUS_CM_NAME = 'org.fedorahosted.certmonger'
DBUS_CM_REQUEST_IF = 'org.fedorahosted.certmonger.request'
DBUS_CM_CA_IF = 'org.fedorahosted.certmonger.ca'
DBUS_PROPERTY_IF = 'org.freedesktop.DBus.Properties'
@@ -47,7 +47,7 @@ class _cm_dbus_object(object):
"""
Auxiliary class for convenient DBus object handling.
"""
def __init__(self, bus, parent, object_path, object_dbus_interface,
def __init__(self, bus, object_path, object_dbus_interface,
parent_dbus_interface=None, property_interface=False):
"""
bus - DBus bus object, result of dbus.SystemBus() or dbus.SessionBus()
@@ -63,7 +63,6 @@ class _cm_dbus_object(object):
if parent_dbus_interface is None:
parent_dbus_interface = object_dbus_interface
self.bus = bus
self.parent = parent
self.path = object_path
self.obj_dbus_if = object_dbus_interface
self.parent_dbus_if = parent_dbus_interface
@@ -73,83 +72,36 @@ class _cm_dbus_object(object):
self.prop_if = dbus.Interface(self.obj, DBUS_PROPERTY_IF)
class _certmonger(_cm_dbus_object):
def _start_certmonger():
"""
Create a connection to certmonger.
By default use SystemBus. When not available use private connection
over Unix socket.
This solution is really ugly and should be removed as soon as DBus
SystemBus is available at system install time.
Start certmonger daemon. If it's already running systemctl just ignores
the command.
"""
timeout = 300
def _start_private_conn(self):
sock_filename = os.path.join(tempfile.mkdtemp(), 'certmonger')
self._proc = subprocess.Popen([paths.CERTMONGER, '-n', '-L', '-P',
sock_filename])
for t in range(0, self.timeout, 5):
if os.path.exists(sock_filename):
return "unix:path=%s" % sock_filename
time.sleep(5)
self._stop_private_conn()
raise RuntimeError("Failed to start certmonger: Timed out")
def _stop_private_conn(self):
if self._proc:
retcode = self._proc.poll()
if retcode is not None:
return
self._proc.terminate()
for t in range(0, self.timeout, 5):
retcode = self._proc.poll()
if retcode is not None:
return
time.sleep(5)
root_logger.error("Failed to stop certmonger.")
def __del__(self):
self._stop_private_conn()
def __init__(self):
self._proc = None
self._bus = None
if not services.knownservices.certmonger.is_running():
try:
self._bus = dbus.SystemBus()
except dbus.DBusException as e:
err_name = e.get_dbus_name()
if err_name not in ['org.freedesktop.DBus.Error.NoServer',
'org.freedesktop.DBus.Error.FileNotFound']:
root_logger.error("Failed to connect to certmonger over "
"SystemBus: %s" % e)
raise
try:
self._private_sock = self._start_private_conn()
self._bus = dbus.connection.Connection(self._private_sock)
except dbus.DBusException as e:
root_logger.error("Failed to connect to certmonger over "
"private socket: %s" % e)
raise
else:
try:
self._bus.get_name_owner(DBUS_CM_NAME)
except dbus.DBusException:
try:
services.knownservices.certmonger.start()
except Exception as e:
root_logger.error("Failed to start certmonger: %s" % e)
raise
services.knownservices.certmonger.start()
except Exception, e:
root_logger.error('Failed to start certmonger: %s' % e)
raise
for t in range(0, self.timeout, 5):
try:
self._bus.get_name_owner(DBUS_CM_NAME)
break
except dbus.DBusException:
pass
time.sleep(5)
raise RuntimeError('Failed to start certmonger')
super(_certmonger, self).__init__(self._bus, None, DBUS_CM_PATH,
DBUS_CM_IF)
def _connect_to_certmonger():
"""
Start certmonger daemon and connect to it via DBus.
"""
try:
_start_certmonger()
except (KeyboardInterrupt, OSError), e:
root_logger.error('Failed to start certmonger: %s' % e)
raise
try:
bus = dbus.SystemBus()
cm = _cm_dbus_object(bus, DBUS_CM_PATH, DBUS_CM_IF)
except dbus.DBusException, e:
root_logger.error("Failed to access certmonger over DBus: %s", e)
raise
return cm
def _get_requests(criteria=dict()):
@@ -159,7 +111,7 @@ def _get_requests(criteria=dict()):
if not isinstance(criteria, dict):
raise TypeError('"criteria" must be dict.')
cm = _certmonger()
cm = _connect_to_certmonger()
requests = []
requests_paths = []
if 'nickname' in criteria:
@@ -170,12 +122,12 @@ def _get_requests(criteria=dict()):
requests_paths = cm.obj_if.get_requests()
for request_path in requests_paths:
request = _cm_dbus_object(cm.bus, cm, request_path, DBUS_CM_REQUEST_IF,
request = _cm_dbus_object(cm.bus, request_path, DBUS_CM_REQUEST_IF,
DBUS_CM_IF, True)
for criterion in criteria:
if criterion == 'ca-name':
ca_path = request.obj_if.get_ca()
ca = _cm_dbus_object(cm.bus, cm, ca_path, DBUS_CM_CA_IF,
ca = _cm_dbus_object(cm.bus, ca_path, DBUS_CM_CA_IF,
DBUS_CM_IF)
value = ca.obj_if.get_nickname()
else:
@@ -184,7 +136,6 @@ def _get_requests(criteria=dict()):
break
else:
requests.append(request)
return requests
@@ -212,13 +163,13 @@ def get_request_value(request_id, directive):
"""
try:
request = _get_request(dict(nickname=request_id))
except RuntimeError as e:
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
if directive == 'ca-name':
ca_path = request.obj_if.get_ca()
ca = _cm_dbus_object(request.bus, request, ca_path, DBUS_CM_CA_IF,
ca = _cm_dbus_object(request.bus, ca_path, DBUS_CM_CA_IF,
DBUS_CM_IF)
return ca.obj_if.get_nickname()
else:
@@ -240,9 +191,10 @@ def get_request_id(criteria):
"""
try:
request = _get_request(criteria)
except RuntimeError as e:
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
return request.prop_if.Get(DBUS_CM_REQUEST_IF, 'nickname')
else:
@@ -270,7 +222,7 @@ def add_request_value(request_id, directive, value):
"""
try:
request = _get_request({'nickname': request_id})
except RuntimeError as e:
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
@@ -302,27 +254,19 @@ def request_cert(nssdb, nickname, subject, principal, passwd_fname=None):
"""
Execute certmonger to request a server certificate.
"""
cm = _certmonger()
ca_path = cm.obj_if.find_ca_by_nickname('IPA')
if not ca_path:
raise RuntimeError('IPA CA not found')
cm = _connect_to_certmonger()
request_parameters = dict(KEY_STORAGE='NSSDB', CERT_STORAGE='NSSDB',
CERT_LOCATION=nssdb, CERT_NICKNAME=nickname,
KEY_LOCATION=nssdb, KEY_NICKNAME=nickname,
SUBJECT=subject, PRINCIPAL=[principal],
CA=ca_path)
SUBJECT=subject, PRINCIPAL=principal,)
if passwd_fname:
request_parameters['KEY_PIN_FILE'] = passwd_fname
result = cm.obj_if.add_request(request_parameters)
try:
if result[0]:
request = _cm_dbus_object(cm.bus, cm, result[1], DBUS_CM_REQUEST_IF,
request = _cm_dbus_object(cm.bus, result[1], DBUS_CM_REQUEST_IF,
DBUS_CM_IF, True)
else:
raise RuntimeError('add_request() returned False')
except Exception as e:
root_logger.error('Failed to create a new request: {error}'
.format(error=e))
except TypeError:
root_logger.error('Failed to get create new request.')
raise
return request.obj_if.get_nickname()
@@ -338,7 +282,7 @@ def start_tracking(nickname, secdir, password_file=None, command=None):
Returns certificate nickname.
"""
cm = _certmonger()
cm = _connect_to_certmonger()
params = {'TRACK': True}
params['cert-nickname'] = nickname
params['cert-database'] = os.path.abspath(secdir)
@@ -357,13 +301,10 @@ def start_tracking(nickname, secdir, password_file=None, command=None):
result = cm.obj_if.add_request(params)
try:
if result[0]:
request = _cm_dbus_object(cm.bus, cm, result[1], DBUS_CM_REQUEST_IF,
request = _cm_dbus_object(cm.bus, result[1], DBUS_CM_REQUEST_IF,
DBUS_CM_IF, True)
else:
raise RuntimeError('add_request() returned False')
except Exception as e:
root_logger.error('Failed to add new request: {error}'
.format(error=e))
except TypeError, e:
root_logger.error('Failed to add new request.')
raise
return request.prop_if.Get(DBUS_CM_REQUEST_IF, 'nickname')
@@ -384,26 +325,12 @@ def stop_tracking(secdir, request_id=None, nickname=None):
criteria['cert-nickname'] = nickname
try:
request = _get_request(criteria)
except RuntimeError as e:
except RuntimeError, e:
root_logger.error('Failed to get request: %s' % e)
raise
if request:
request.parent.obj_if.remove_request(request.path)
def modify(request_id, profile=None):
if profile:
request = _get_request({'nickname': request_id})
if request:
request.obj_if.modify({'template-profile': profile})
def resubmit_request(request_id, profile=None):
request = _get_request({'nickname': request_id})
if request:
if profile:
request.obj_if.modify({'template-profile': profile})
request.obj_if.resubmit()
cm = _connect_to_certmonger()
cm.obj_if.remove_request(request.path)
def _find_IPA_ca():
@@ -414,9 +341,9 @@ def _find_IPA_ca():
We can use find_request_value because the ca files have the
same file format.
"""
cm = _certmonger()
cm = _connect_to_certmonger()
ca_path = cm.obj_if.find_ca_by_nickname('IPA')
return _cm_dbus_object(cm.bus, cm, ca_path, DBUS_CM_CA_IF, DBUS_CM_IF, True)
return _cm_dbus_object(cm.bus, ca_path, DBUS_CM_CA_IF, DBUS_CM_IF, True)
def add_principal_to_cas(principal):
@@ -448,13 +375,15 @@ def remove_principal_from_cas():
ca.prop_if.Set(DBUS_CM_CA_IF, 'external-helper', ext_helper)
def get_pin(token):
def get_pin(token, dogtag_constants=None):
"""
Dogtag stores its NSS pin in a file formatted as token:PIN.
The caller is expected to handle any exceptions raised.
"""
with open(paths.PKI_TOMCAT_PASSWORD_CONF, 'r') as f:
if dogtag_constants is None:
dogtag_constants = dogtag.configured_constants()
with open(dogtag_constants.PASSWORD_CONF_PATH, 'r') as f:
for line in f:
(tok, pin) = line.split('=', 1)
if token == tok:
@@ -478,7 +407,7 @@ def dogtag_start_tracking(ca, nickname, pin, pinfile, secdir, pre_command,
Both commands can be None.
"""
cm = _certmonger()
cm = _connect_to_certmonger()
certmonger_cmd_template = paths.CERTMONGER_COMMAND_TEMPLATE
params = {'TRACK': True}
@@ -497,11 +426,19 @@ def dogtag_start_tracking(ca, nickname, pin, pinfile, secdir, pre_command,
params['KEY_PIN_FILE'] = os.path.abspath(pinfile)
if pre_command:
if not os.path.isabs(pre_command):
pre_command = certmonger_cmd_template % (pre_command)
if sys.maxsize > 2**32L:
libpath = 'lib64'
else:
libpath = 'lib'
pre_command = certmonger_cmd_template % (libpath, pre_command)
params['cert-presave-command'] = pre_command
if post_command:
if not os.path.isabs(post_command):
post_command = certmonger_cmd_template % (post_command)
if sys.maxsize > 2**32L:
libpath = 'lib64'
else:
libpath = 'lib'
post_command = certmonger_cmd_template % (libpath, post_command)
params['cert-postsave-command'] = post_command
if profile:
params['ca-profile'] = profile
@@ -526,23 +463,10 @@ def check_state(dirs):
return reqids
def wait_for_request(request_id, timeout=120):
for i in range(0, timeout, 5):
state = get_request_value(request_id, 'status')
root_logger.debug("certmonger request is in state %r", state)
if state in ('CA_REJECTED', 'CA_UNREACHABLE', 'CA_UNCONFIGURED',
'NEED_GUIDANCE', 'NEED_CA', 'MONITORING'):
break
time.sleep(5)
else:
raise RuntimeError("request timed out")
return state
if __name__ == '__main__':
request_id = request_cert(paths.HTTPD_ALIAS_DIR, "Test",
"cn=tiger.example.com,O=IPA",
"HTTP/tiger.example.com@EXAMPLE.COM")
csr = get_request_value(request_id, 'csr')
print(csr)
print csr
stop_tracking(request_id)

View File

@@ -17,20 +17,18 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import ConfigParser
from optparse import Option, Values, OptionParser, IndentedHelpFormatter, OptionValueError
from copy import copy
from dns import resolver, rdatatype
from dns.exception import DNSException
from six.moves.configparser import SafeConfigParser
from ipapython.dn import DN
from ipaplatform.paths import paths
import dns.name
from six.moves.urllib.parse import urlsplit
import socket
import re
import urlparse
class IPAConfigError(Exception):
def __init__(self, msg=''):
@@ -66,7 +64,7 @@ def check_ip_option(option, opt, value):
def check_dn_option(option, opt, value):
try:
return DN(value)
except Exception as e:
except Exception, e:
raise OptionValueError("option %s: invalid DN: %s" % (opt, e))
class IPAOption(Option):
@@ -107,7 +105,7 @@ class IPAOptionParser(OptionParser):
all_opts_dict = dict([ (o.dest, o) for o in self._get_all_options() if hasattr(o, 'sensitive') ])
safe_opts_dict = {}
for option, value in opts.__dict__.items():
for option, value in opts.__dict__.iteritems():
if all_opts_dict[option].sensitive != True:
safe_opts_dict[option] = value
@@ -154,7 +152,7 @@ class IPAConfig:
config = IPAConfig()
def __parse_config(discover_server = True):
p = SafeConfigParser()
p = ConfigParser.SafeConfigParser()
p.read(paths.IPA_DEFAULT_CONF)
try:
@@ -165,7 +163,7 @@ def __parse_config(discover_server = True):
if discover_server:
try:
s = p.get("global", "xmlrpc_uri")
server = urlsplit(s)
server = urlparse.urlsplit(s)
config.default_server.append(server.netloc)
except:
pass
@@ -178,6 +176,17 @@ def __parse_config(discover_server = True):
def __discover_config(discover_server = True):
servers = []
try:
if not config.default_realm:
try:
# only import krbV when we need it
import krbV
krbctx = krbV.default_context()
config.default_realm = krbctx.default_realm
except ImportError:
pass
if not config.default_realm:
return False
if not config.default_domain:
# try once with REALM -> domain
domain = str(config.default_realm).lower()

Binary file not shown.

View File

@@ -21,11 +21,8 @@ import re
import time
import datetime
import email.utils
from urllib2 import urlparse
from calendar import timegm
import six
from six.moves.urllib.parse import urlparse
from ipapython.ipa_log_manager import log_mgr
'''
@@ -49,7 +46,7 @@ the relevant RFC's as well as actual practice in the field. However
cookielib.py is tighly integrated with urllib2 and it's not possible
to use most of the features of cookielib without simultaneously using
urllib2. Unfortunataely we only use httplib because of our dependency
on xmlrpc.client. Without urllib2 cookielib is a non-starter.
on xmlrpclib. Without urllib2 cookielib is a non-starter.
This module is a minimal implementation of Netscape cookies which
works equally well on either the client or server side. It's API is
@@ -188,7 +185,7 @@ class Cookie(object):
try:
dt = datetime.datetime(*email.utils.parsedate(s)[0:6])
except Exception as e:
except Exception, e:
raise ValueError("unable to parse expires datetime '%s': %s" % (s, e))
return dt
@@ -273,9 +270,8 @@ class Cookie(object):
if match:
key = match.group(1)
value = match.group(2)
# Double quoted value?
if value and value[0] == '"':
if value[0] == '"':
if value[-1] == '"':
value = value[1:-1]
else:
@@ -392,9 +388,9 @@ class Cookie(object):
self._timestamp = None
elif isinstance(value, datetime.datetime):
self._timestamp = value
elif isinstance(value, (six.integer_types, float)):
elif isinstance(value, (int, long, float)):
self._timestamp = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, six.string_types):
elif isinstance(value, basestring):
self._timestamp = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \
@@ -418,9 +414,9 @@ class Cookie(object):
self._expires = None
elif isinstance(value, datetime.datetime):
self._expires = value
elif isinstance(value, (six.integer_types, float)):
elif isinstance(value, (int, long, float)):
self._expires = datetime.datetime.utcfromtimestamp(value)
elif isinstance(value, six.string_types):
elif isinstance(value, basestring):
self._expires = Cookie.parse_datetime(value)
else:
raise TypeError('value must be datetime, int, long, float, basestring or None, not %s' % \
@@ -541,7 +537,7 @@ class Cookie(object):
received from.
'''
scheme, domain, path, params, query, fragment = urlparse(url)
scheme, domain, path, params, query, fragment = urlparse.urlparse(url)
if self.domain is None:
self.domain = domain.lower()
@@ -598,7 +594,7 @@ class Cookie(object):
from ipalib.util import validate_domain_name
try:
validate_domain_name(url_domain)
except Exception as e:
except Exception, e:
return False
if cookie_domain is None:
@@ -643,7 +639,7 @@ class Cookie(object):
cookie_name = self.key
url_scheme, url_domain, url_path, url_params, url_query, url_fragment = urlparse(url)
url_scheme, url_domain, url_path, url_params, url_query, url_fragment = urlparse.urlparse(url)
cookie_expiration = self.get_expiration()
if cookie_expiration is not None:

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@@ -1,184 +0,0 @@
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
from ipapython import p11helper as _ipap11helper
attrs_id2name = {
#_ipap11helper.CKA_ALLOWED_MECHANISMS: 'ipk11allowedmechanisms',
_ipap11helper.CKA_ALWAYS_AUTHENTICATE: 'ipk11alwaysauthenticate',
_ipap11helper.CKA_ALWAYS_SENSITIVE: 'ipk11alwayssensitive',
#_ipap11helper.CKA_CHECK_VALUE: 'ipk11checkvalue',
_ipap11helper.CKA_COPYABLE: 'ipk11copyable',
_ipap11helper.CKA_DECRYPT: 'ipk11decrypt',
_ipap11helper.CKA_DERIVE: 'ipk11derive',
#_ipap11helper.CKA_DESTROYABLE: 'ipk11destroyable',
_ipap11helper.CKA_ENCRYPT: 'ipk11encrypt',
#_ipap11helper.CKA_END_DATE: 'ipk11enddate',
_ipap11helper.CKA_EXTRACTABLE: 'ipk11extractable',
_ipap11helper.CKA_ID: 'ipk11id',
#_ipap11helper.CKA_KEY_GEN_MECHANISM: 'ipk11keygenmechanism',
_ipap11helper.CKA_KEY_TYPE: 'ipk11keytype',
_ipap11helper.CKA_LABEL: 'ipk11label',
_ipap11helper.CKA_LOCAL: 'ipk11local',
_ipap11helper.CKA_MODIFIABLE: 'ipk11modifiable',
_ipap11helper.CKA_NEVER_EXTRACTABLE: 'ipk11neverextractable',
_ipap11helper.CKA_PRIVATE: 'ipk11private',
#_ipap11helper.CKA_PUBLIC_KEY_INFO: 'ipapublickey',
#_ipap11helper.CKA_PUBLIC_KEY_INFO: 'ipk11publickeyinfo',
_ipap11helper.CKA_SENSITIVE: 'ipk11sensitive',
_ipap11helper.CKA_SIGN: 'ipk11sign',
_ipap11helper.CKA_SIGN_RECOVER: 'ipk11signrecover',
#_ipap11helper.CKA_START_DATE: 'ipk11startdate',
#_ipap11helper.CKA_SUBJECT: 'ipk11subject',
_ipap11helper.CKA_TRUSTED: 'ipk11trusted',
_ipap11helper.CKA_UNWRAP: 'ipk11unwrap',
#_ipap11helper.CKA_UNWRAP_TEMPLATE: 'ipk11unwraptemplate',
_ipap11helper.CKA_VERIFY: 'ipk11verify',
_ipap11helper.CKA_VERIFY_RECOVER: 'ipk11verifyrecover',
_ipap11helper.CKA_WRAP: 'ipk11wrap',
#_ipap11helper.CKA_WRAP_TEMPLATE: 'ipk11wraptemplate',
_ipap11helper.CKA_WRAP_WITH_TRUSTED: 'ipk11wrapwithtrusted',
}
attrs_name2id = {v: k for k, v in attrs_id2name.items()}
# attribute:
# http://www.freeipa.org/page/V4/PKCS11_in_LDAP/Schema#ipk11KeyType
#
# mapping table:
# http://www.freeipa.org/page/V4/PKCS11_in_LDAP/Schema#CK_MECHANISM_TYPE
keytype_name2id = {
"rsa": _ipap11helper.KEY_TYPE_RSA,
"aes": _ipap11helper.KEY_TYPE_AES,
}
keytype_id2name = {v: k for k, v in keytype_name2id.items()}
wrappingmech_name2id = {
"rsaPkcs": _ipap11helper.MECH_RSA_PKCS,
"rsaPkcsOaep": _ipap11helper.MECH_RSA_PKCS_OAEP,
"aesKeyWrap": _ipap11helper.MECH_AES_KEY_WRAP,
"aesKeyWrapPad": _ipap11helper.MECH_AES_KEY_WRAP_PAD
}
wrappingmech_id2name = {v: k for k, v in wrappingmech_name2id.items()}
bool_attr_names = set([
'ipk11alwaysauthenticate',
'ipk11alwayssensitive',
'ipk11copyable',
'ipk11decrypt',
'ipk11derive',
'ipk11encrypt',
'ipk11extractable',
'ipk11local',
'ipk11modifiable',
'ipk11neverextractable',
'ipk11private',
'ipk11sensitive',
'ipk11sign',
'ipk11signrecover',
'ipk11trusted',
'ipk11unwrap',
'ipk11verify',
'ipk11verifyrecover',
'ipk11wrap',
'ipk11wrapwithtrusted',
])
modifiable_attrs_id2name = {
_ipap11helper.CKA_DECRYPT: 'ipk11decrypt',
_ipap11helper.CKA_DERIVE: 'ipk11derive',
_ipap11helper.CKA_ENCRYPT: 'ipk11encrypt',
_ipap11helper.CKA_EXTRACTABLE: 'ipk11extractable',
_ipap11helper.CKA_ID: 'ipk11id',
_ipap11helper.CKA_LABEL: 'ipk11label',
_ipap11helper.CKA_SENSITIVE: 'ipk11sensitive',
_ipap11helper.CKA_SIGN: 'ipk11sign',
_ipap11helper.CKA_SIGN_RECOVER: 'ipk11signrecover',
_ipap11helper.CKA_UNWRAP: 'ipk11unwrap',
_ipap11helper.CKA_VERIFY: 'ipk11verify',
_ipap11helper.CKA_VERIFY_RECOVER: 'ipk11verifyrecover',
_ipap11helper.CKA_WRAP: 'ipk11wrap',
}
modifiable_attrs_name2id = {v: k for k, v in modifiable_attrs_id2name.items()}
def sync_pkcs11_metadata(log, source, target):
"""sync ipk11 metadata from source object to target object"""
# iterate over list of modifiable PKCS#11 attributes - this prevents us
# from attempting to set read-only attributes like CKA_LOCAL
for attr in modifiable_attrs_name2id:
if attr in source:
if source[attr] != target[attr]:
log.debug('Updating attribute %s from "%s" to "%s"', attr, repr(source[attr]), repr(target[attr]))
target[attr] = source[attr]
def populate_pkcs11_metadata(source, target):
"""populate all ipk11 metadata attributes in target object from source object"""
for attr in attrs_name2id:
if attr in source:
target[attr] = source[attr]
def ldap2p11helper_api_params(ldap_key):
"""prepare dict with metadata parameters suitable for key unwrapping"""
unwrap_params = {}
# some attributes are just renamed
direct_param_map = {
"ipk11label": "label",
"ipk11id": "id",
"ipk11copyable": "cka_copyable",
"ipk11decrypt": "cka_decrypt",
"ipk11derive": "cka_derive",
"ipk11encrypt": "cka_encrypt",
"ipk11extractable": "cka_extractable",
"ipk11modifiable": "cka_modifiable",
"ipk11private": "cka_private",
"ipk11sensitive": "cka_sensitive",
"ipk11sign": "cka_sign",
"ipk11unwrap": "cka_unwrap",
"ipk11verify": "cka_verify",
"ipk11wrap": "cka_wrap",
"ipk11wrapwithtrusted": "cka_wrap_with_trusted"
}
for ldap_name, p11h_name in direct_param_map.items():
if ldap_name in ldap_key:
unwrap_params[p11h_name] = ldap_key[ldap_name]
# and some others needs conversion
indirect_param_map = {
"ipk11keytype": ("key_type", keytype_name2id),
"ipawrappingmech": ("wrapping_mech", wrappingmech_name2id),
}
for ldap_name, rules in indirect_param_map.items():
p11h_name, mapping = rules
if ldap_name in ldap_key:
unwrap_params[p11h_name] = mapping[ldap_key[ldap_name]]
return unwrap_params
class AbstractHSM(object):
def _filter_replica_keys(self, all_keys):
replica_keys = {}
for key_id, key in all_keys.items():
if not key['ipk11label'].startswith('dnssec-replica:'):
continue
replica_keys[key_id] = key
return replica_keys
def _filter_zone_keys(self, all_keys):
zone_keys = {}
for key_id, key in all_keys.items():
if key['ipk11label'] == u'dnssec-master' \
or key['ipk11label'].startswith('dnssec-replica:'):
continue
zone_keys[key_id] = key
return zone_keys

View File

@@ -1,220 +0,0 @@
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
from datetime import datetime
import dns.name
import errno
import os
import logging
import shutil
import stat
import subprocess
from ipalib import api
import ipalib.constants
from ipapython.dn import DN
from ipapython import ipa_log_manager, ipautil
from ipaplatform.paths import paths
from ipapython.dnssec.temp import TemporaryDirectory
time_bindfmt = '%Y%m%d%H%M%S'
# this daemon should run under ods:named user:group
# user has to be ods because ODSMgr.py sends signal to ods-enforcerd
FILE_PERM = (stat.S_IRUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IWUSR)
DIR_PERM = (stat.S_IRWXU | stat.S_IRWXG)
class BINDMgr(object):
"""BIND key manager. It does LDAP->BIND key files synchronization.
One LDAP object with idnsSecKey object class will produce
single pair of BIND key files.
"""
def __init__(self, api):
self.api = api
self.log = ipa_log_manager.log_mgr.get_logger(self)
self.ldap_keys = {}
self.modified_zones = set()
def notify_zone(self, zone):
cmd = ['rndc', 'sign', zone.to_text()]
result = ipautil.run(cmd, capture_output=True)
self.log.info('%s', result.output_log)
def dn2zone_name(self, dn):
"""cn=KSK-20140813162153Z-cede9e182fc4af76c4bddbc19123a565,cn=keys,idnsname=test,cn=dns,dc=ipa,dc=example"""
# verify that metadata object is under DNS sub-tree
dn = DN(dn)
container = DN(self.api.env.container_dns, self.api.env.basedn)
idx = dn.rfind(container)
assert idx != -1, 'Metadata object %s is not inside %s' % (dn, container)
assert len(dn[idx - 1]) == 1, 'Multi-valued RDN as zone name is not supported'
return dns.name.from_text(dn[idx - 1]['idnsname'])
def time_ldap2bindfmt(self, str_val):
dt = datetime.strptime(str_val, ipalib.constants.LDAP_GENERALIZED_TIME_FORMAT)
return dt.strftime(time_bindfmt)
def dates2params(self, ldap_attrs):
"""Convert LDAP timestamps to list of parameters suitable
for dnssec-keyfromlabel utility"""
attr2param = {'idnsseckeypublish': '-P',
'idnsseckeyactivate': '-A',
'idnsseckeyinactive': '-I',
'idnsseckeydelete': '-D'}
params = []
for attr, param in attr2param.items():
params.append(param)
if attr in ldap_attrs:
assert len(ldap_attrs[attr]) == 1, 'Timestamp %s is expected to be single-valued' % attr
params.append(self.time_ldap2bindfmt(ldap_attrs[attr][0]))
else:
params.append('none')
return params
def ldap_event(self, op, uuid, attrs):
"""Record single LDAP event - key addition, deletion or modification.
Change is only recorded to memory.
self.sync() has to be called to synchronize change to BIND."""
assert op == 'add' or op == 'del' or op == 'mod'
zone = self.dn2zone_name(attrs['dn'])
self.modified_zones.add(zone)
zone_keys = self.ldap_keys.setdefault(zone, {})
if op == 'add':
self.log.info('Key metadata %s added to zone %s' % (attrs['dn'], zone))
zone_keys[uuid] = attrs
elif op == 'del':
self.log.info('Key metadata %s deleted from zone %s' % (attrs['dn'], zone))
zone_keys.pop(uuid)
elif op == 'mod':
self.log.info('Key metadata %s updated in zone %s' % (attrs['dn'], zone))
zone_keys[uuid] = attrs
def install_key(self, zone, uuid, attrs, workdir):
"""Run dnssec-keyfromlabel on given LDAP object.
:returns: base file name of output files, e.g. Kaaa.test.+008+19719"""
self.log.info('attrs: %s', attrs)
assert attrs.get('idnsseckeyzone', ['FALSE'])[0] == 'TRUE', \
'object %s is not a DNS zone key' % attrs['dn']
uri = "%s;pin-source=%s" % (attrs['idnsSecKeyRef'][0], paths.DNSSEC_SOFTHSM_PIN)
cmd = [paths.DNSSEC_KEYFROMLABEL, '-K', workdir, '-a', attrs['idnsSecAlgorithm'][0], '-l', uri]
cmd += self.dates2params(attrs)
if attrs.get('idnsSecKeySep', ['FALSE'])[0].upper() == 'TRUE':
cmd += ['-f', 'KSK']
if attrs.get('idnsSecKeyRevoke', ['FALSE'])[0].upper() == 'TRUE':
cmd += ['-R', datetime.now().strftime(time_bindfmt)]
cmd.append(zone.to_text())
# keys has to be readable by ODS & named
result = ipautil.run(cmd, capture_output=True)
basename = result.output.strip()
private_fn = "%s/%s.private" % (workdir, basename)
os.chmod(private_fn, FILE_PERM)
# this is useful mainly for debugging
with open("%s/%s.uuid" % (workdir, basename), 'w') as uuid_file:
uuid_file.write(uuid)
with open("%s/%s.dn" % (workdir, basename), 'w') as dn_file:
dn_file.write(attrs['dn'])
def get_zone_dir_name(self, zone):
"""Escape zone name to form suitable for file-system.
This method has to be equivalent to zr_get_zone_path()
in bind-dyndb-ldap/zone_register.c."""
if zone == dns.name.root:
return "@"
# strip final (empty) label
zone = zone.relativize(dns.name.root)
escaped = ""
for label in zone:
for char in label:
c = ord(char)
if ((c >= 0x30 and c <= 0x39) or # digit
(c >= 0x41 and c <= 0x5A) or # uppercase
(c >= 0x61 and c <= 0x7A) or # lowercase
c == 0x2D or # hyphen
c == 0x5F): # underscore
if (c >= 0x41 and c <= 0x5A): # downcase
c += 0x20
escaped += chr(c)
else:
escaped += "%%%02X" % c
escaped += '.'
# strip trailing period
return escaped[:-1]
def sync_zone(self, zone):
self.log.info('Synchronizing zone %s' % zone)
zone_path = os.path.join(paths.BIND_LDAP_DNS_ZONE_WORKDIR,
self.get_zone_dir_name(zone))
try:
os.makedirs(zone_path)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
# fix HSM permissions
# TODO: move out
for prefix, dirs, files in os.walk(paths.DNSSEC_TOKENS_DIR, topdown=True):
for name in dirs:
fpath = os.path.join(prefix, name)
self.log.debug('Fixing directory permissions: %s', fpath)
os.chmod(fpath, DIR_PERM | stat.S_ISGID)
for name in files:
fpath = os.path.join(prefix, name)
self.log.debug('Fixing file permissions: %s', fpath)
os.chmod(fpath, FILE_PERM)
# TODO: move out
with TemporaryDirectory(zone_path) as tempdir:
for uuid, attrs in self.ldap_keys[zone].items():
self.install_key(zone, uuid, attrs, tempdir)
# keys were generated in a temporary directory, swap directories
target_dir = "%s/keys" % zone_path
try:
shutil.rmtree(target_dir)
except OSError as e:
if e.errno != errno.ENOENT:
raise e
shutil.move(tempdir, target_dir)
os.chmod(target_dir, DIR_PERM)
self.notify_zone(zone)
def sync(self, dnssec_zones):
"""Synchronize list of zones in LDAP with BIND.
dnssec_zones lists zones which should be processed. All other zones
will be ignored even though they were modified using ldap_event().
This filter is useful in cases where LDAP contains DNS zones which
have old metadata objects and DNSSEC disabled. Such zones must be
ignored to prevent errors while calling dnssec-keyfromlabel or rndc.
"""
self.log.debug('Key metadata in LDAP: %s' % self.ldap_keys)
self.log.debug('Zones modified but skipped during bindmgr.sync: %s',
self.modified_zones - dnssec_zones)
for zone in self.modified_zones.intersection(dnssec_zones):
self.sync_zone(zone)
self.modified_zones = set()
def diff_zl(self, s1, s2):
"""Compute zones present in s1 but not present in s2.
Returns: List of (uuid, name) tuples with zones present only in s1."""
s1_extra = s1.uuids - s2.uuids
removed = [(uuid, name) for (uuid, name) in s1.mapping.items()
if uuid in s1_extra]
return removed

View File

@@ -1,192 +0,0 @@
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
import logging
import ldap.dn
import os
import dns.name
from ipaplatform.paths import paths
from ipapython import ipautil
from ipapython.dnssec.syncrepl import SyncReplConsumer
from ipapython.dnssec.odsmgr import ODSMgr
from ipapython.dnssec.bindmgr import BINDMgr
SIGNING_ATTR = 'idnsSecInlineSigning'
OBJCLASS_ATTR = 'objectClass'
class KeySyncer(SyncReplConsumer):
def __init__(self, *args, **kwargs):
# hack
self.api = kwargs['ipa_api']
del kwargs['ipa_api']
# DNSSEC master should have OpenDNSSEC installed
# TODO: Is this the best way?
if os.environ.get('ISMASTER', '0') == '1':
self.ismaster = True
self.odsmgr = ODSMgr()
else:
self.ismaster = False
self.bindmgr = BINDMgr(self.api)
self.init_done = False
self.dnssec_zones = set()
SyncReplConsumer.__init__(self, *args, **kwargs)
def _get_objclass(self, attrs):
"""Get object class.
Given set of attributes has to have exactly one supported object class.
"""
supported_objclasses = set(['idnszone', 'idnsseckey', 'ipk11publickey'])
present_objclasses = set([o.lower() for o in attrs[OBJCLASS_ATTR]]).intersection(supported_objclasses)
assert len(present_objclasses) == 1, attrs[OBJCLASS_ATTR]
return present_objclasses.pop()
def __get_signing_attr(self, attrs):
"""Get SIGNING_ATTR from dictionary with LDAP zone attributes.
Returned value is normalized to TRUE or FALSE, defaults to FALSE."""
values = attrs.get(SIGNING_ATTR, ['FALSE'])
assert len(values) == 1, '%s is expected to be single-valued' \
% SIGNING_ATTR
return values[0].upper()
def __is_dnssec_enabled(self, attrs):
"""Test if LDAP DNS zone with given attributes is DNSSEC enabled."""
return self.__get_signing_attr(attrs) == 'TRUE'
def __is_replica_pubkey(self, attrs):
vals = attrs.get('ipk11label', [])
if len(vals) != 1:
return False
return vals[0].startswith('dnssec-replica:')
def application_add(self, uuid, dn, newattrs):
objclass = self._get_objclass(newattrs)
if objclass == 'idnszone':
self.zone_add(uuid, dn, newattrs)
elif objclass == 'idnsseckey':
self.key_meta_add(uuid, dn, newattrs)
elif objclass == 'ipk11publickey' and \
self.__is_replica_pubkey(newattrs):
self.hsm_master_sync()
def application_del(self, uuid, dn, oldattrs):
objclass = self._get_objclass(oldattrs)
if objclass == 'idnszone':
self.zone_del(uuid, dn, oldattrs)
elif objclass == 'idnsseckey':
self.key_meta_del(uuid, dn, oldattrs)
elif objclass == 'ipk11publickey' and \
self.__is_replica_pubkey(oldattrs):
self.hsm_master_sync()
def application_sync(self, uuid, dn, newattrs, oldattrs):
objclass = self._get_objclass(oldattrs)
if objclass == 'idnszone':
olddn = ldap.dn.str2dn(oldattrs['dn'])
newdn = ldap.dn.str2dn(newattrs['dn'])
assert olddn == newdn, 'modrdn operation is not supported'
oldval = self.__get_signing_attr(oldattrs)
newval = self.__get_signing_attr(newattrs)
if oldval != newval:
if self.__is_dnssec_enabled(newattrs):
self.zone_add(uuid, olddn, newattrs)
else:
self.zone_del(uuid, olddn, oldattrs)
elif objclass == 'idnsseckey':
self.key_metadata_sync(uuid, dn, oldattrs, newattrs)
elif objclass == 'ipk11publickey' and \
self.__is_replica_pubkey(newattrs):
self.hsm_master_sync()
def syncrepl_refreshdone(self):
self.log.info('Initial LDAP dump is done, sychronizing with ODS and BIND')
self.init_done = True
self.ods_sync()
self.hsm_replica_sync()
self.hsm_master_sync()
self.bindmgr.sync(self.dnssec_zones)
# idnsSecKey wrapper
# Assumption: metadata points to the same key blob all the time,
# i.e. it is not necessary to re-download blobs because of change in DNSSEC
# metadata - DNSSEC flags or timestamps.
def key_meta_add(self, uuid, dn, newattrs):
self.hsm_replica_sync()
self.bindmgr.ldap_event('add', uuid, newattrs)
self.bindmgr_sync(self.dnssec_zones)
def key_meta_del(self, uuid, dn, oldattrs):
self.bindmgr.ldap_event('del', uuid, oldattrs)
self.bindmgr_sync(self.dnssec_zones)
self.hsm_replica_sync()
def key_metadata_sync(self, uuid, dn, oldattrs, newattrs):
self.bindmgr.ldap_event('mod', uuid, newattrs)
self.bindmgr_sync(self.dnssec_zones)
def bindmgr_sync(self, dnssec_zones):
if self.init_done:
self.bindmgr.sync(dnssec_zones)
# idnsZone wrapper
def zone_add(self, uuid, dn, newattrs):
zone = dns.name.from_text(newattrs['idnsname'][0])
if self.__is_dnssec_enabled(newattrs):
self.dnssec_zones.add(zone)
else:
self.dnssec_zones.discard(zone)
if not self.ismaster:
return
if self.__is_dnssec_enabled(newattrs):
self.odsmgr.ldap_event('add', uuid, newattrs)
self.ods_sync()
def zone_del(self, uuid, dn, oldattrs):
zone = dns.name.from_text(oldattrs['idnsname'][0])
self.dnssec_zones.discard(zone)
if not self.ismaster:
return
if self.__is_dnssec_enabled(oldattrs):
self.odsmgr.ldap_event('del', uuid, oldattrs)
self.ods_sync()
def ods_sync(self):
if not self.ismaster:
return
if self.init_done:
self.odsmgr.sync()
# triggered by modification to idnsSecKey objects
def hsm_replica_sync(self):
"""Download keys from LDAP to local HSM."""
if self.ismaster:
return
if not self.init_done:
return
ipautil.run([paths.IPA_DNSKEYSYNCD_REPLICA])
# triggered by modification to ipk11PublicKey objects
def hsm_master_sync(self):
"""Download replica keys from LDAP to local HSM
& upload master and zone keys to LDAP."""
if not self.ismaster:
return
if not self.init_done:
return
ipautil.run([paths.ODS_SIGNER, 'ipa-hsm-update'])

View File

@@ -1,455 +0,0 @@
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
from __future__ import print_function
from binascii import hexlify
import collections
import logging
from pprint import pprint
import sys
import time
import ipalib
from ipapython.dn import DN
from ipapython import ipaldap
from ipapython import ipautil
from ipapython import ipa_log_manager
from ipaplatform.paths import paths
from ipapython.dnssec.abshsm import (
attrs_name2id,
AbstractHSM,
bool_attr_names,
populate_pkcs11_metadata)
from ipapython import p11helper as _ipap11helper
import uuid
def uri_escape(val):
"""convert val to %-notation suitable for ID component in URI"""
assert len(val) > 0, "zero-length URI component detected"
hexval = hexlify(val)
out = '%'
# pylint: disable=E1127
out += '%'.join(hexval[i:i+2] for i in range(0, len(hexval), 2))
return out
def ldap_bool(val):
if val == 'TRUE' or val is True:
return True
elif val == 'FALSE' or val is False:
return False
else:
raise AssertionError('invalid LDAP boolean "%s"' % val)
def get_default_attrs(object_classes):
# object class -> default attribute values mapping
defaults = {
u'ipk11publickey': {
'ipk11copyable': True,
'ipk11derive': False,
'ipk11encrypt': False,
'ipk11local': True,
'ipk11modifiable': True,
'ipk11private': True,
'ipk11trusted': False,
'ipk11verify': True,
'ipk11verifyrecover': True,
'ipk11wrap': False
},
u'ipk11privatekey': {
'ipk11alwaysauthenticate': False,
'ipk11alwayssensitive': True,
'ipk11copyable': True,
'ipk11decrypt': False,
'ipk11derive': False,
'ipk11extractable': True,
'ipk11local': True,
'ipk11modifiable': True,
'ipk11neverextractable': False,
'ipk11private': True,
'ipk11sensitive': True,
'ipk11sign': True,
'ipk11signrecover': True,
'ipk11unwrap': False,
'ipk11wrapwithtrusted': False
},
u'ipk11secretkey': {
'ipk11alwaysauthenticate': False,
'ipk11alwayssensitive': True,
'ipk11copyable': True,
'ipk11decrypt': False,
'ipk11derive': False,
'ipk11encrypt': False,
'ipk11extractable': True,
'ipk11local': True,
'ipk11modifiable': True,
'ipk11neverextractable': False,
'ipk11private': True,
'ipk11sensitive': True,
'ipk11sign': False,
'ipk11trusted': False,
'ipk11unwrap': True,
'ipk11verify': False,
'ipk11wrap': True,
'ipk11wrapwithtrusted': False
}
}
# get set of supported object classes
present_clss = set()
for cls in object_classes:
present_clss.add(cls.lower())
present_clss.intersection_update(set(defaults.keys()))
if len(present_clss) <= 0:
raise AssertionError('none of "%s" object classes are supported' %
object_classes)
result = {}
for cls in present_clss:
result.update(defaults[cls])
return result
class Key(collections.MutableMapping):
"""abstraction to hide LDAP entry weirdnesses:
- non-normalized attribute names
- boolean attributes returned as strings
- planned entry deletion prevents subsequent use of the instance
"""
def __init__(self, entry, ldap, ldapkeydb):
self.entry = entry
self._delentry = None # indicates that object was deleted
self.ldap = ldap
self.ldapkeydb = ldapkeydb
self.log = ldap.log.getChild(__name__)
def __assert_not_deleted(self):
assert self.entry and not self._delentry, (
"attempt to use to-be-deleted entry %s detected"
% self._delentry.dn)
def __getitem__(self, key):
self.__assert_not_deleted()
val = self.entry.single_value[key]
if key.lower() in bool_attr_names:
val = ldap_bool(val)
return val
def __setitem__(self, key, value):
self.__assert_not_deleted()
self.entry[key] = value
def __delitem__(self, key):
self.__assert_not_deleted()
del self.entry[key]
def __iter__(self):
"""generates list of ipa names of all PKCS#11 attributes present in the object"""
self.__assert_not_deleted()
for ipa_name in list(self.entry.keys()):
lowercase = ipa_name.lower()
if lowercase in attrs_name2id:
yield lowercase
def __len__(self):
self.__assert_not_deleted()
return len(self.entry)
def __repr__(self):
if self._delentry:
return 'deleted entry: %s' % repr(self._delentry)
sanitized = dict(self.entry)
for attr in ['ipaPrivateKey', 'ipaPublicKey', 'ipk11publickeyinfo']:
if attr in sanitized:
del sanitized[attr]
return repr(sanitized)
def _cleanup_key(self):
"""remove default values from LDAP entry"""
default_attrs = get_default_attrs(self.entry['objectclass'])
empty = object()
for attr in default_attrs:
if self.get(attr, empty) == default_attrs[attr]:
del self[attr]
def _update_key(self):
"""remove default values from LDAP entry and write back changes"""
if self._delentry:
self._delete_key()
return
self._cleanup_key()
try:
self.ldap.update_entry(self.entry)
except ipalib.errors.EmptyModlist:
pass
def _delete_key(self):
"""remove key metadata entry from LDAP
After calling this, the python object is no longer valid and all
subsequent method calls on it will fail.
"""
assert not self.entry, (
"Key._delete_key() called before Key.schedule_deletion()")
assert self._delentry, "Key._delete_key() called more than once"
self.log.debug('deleting key id 0x%s DN %s from LDAP',
hexlify(self._delentry.single_value['ipk11id']),
self._delentry.dn)
self.ldap.delete_entry(self._delentry)
self._delentry = None
self.ldap = None
self.ldapkeydb = None
def schedule_deletion(self):
"""schedule key deletion from LDAP
Calling schedule_deletion() will make this object incompatible with
normal Key. After that the object must not be read or modified.
Key metadata will be actually deleted when LdapKeyDB.flush() is called.
"""
assert not self._delentry, (
"Key.schedule_deletion() called more than once")
self._delentry = self.entry
self.entry = None
class ReplicaKey(Key):
# TODO: object class assert
def __init__(self, entry, ldap, ldapkeydb):
super(ReplicaKey, self).__init__(entry, ldap, ldapkeydb)
class MasterKey(Key):
# TODO: object class assert
def __init__(self, entry, ldap, ldapkeydb):
super(MasterKey, self).__init__(entry, ldap, ldapkeydb)
@property
def wrapped_entries(self):
"""LDAP entires with wrapped data
One entry = one blob + ipaWrappingKey pointer to unwrapping key"""
keys = []
if 'ipaSecretKeyRef' not in self.entry:
return keys
for dn in self.entry['ipaSecretKeyRef']:
try:
obj = self.ldap.get_entry(dn)
keys.append(obj)
except ipalib.errors.NotFound:
continue
return keys
def add_wrapped_data(self, data, wrapping_mech, replica_key_id):
wrapping_key_uri = 'pkcs11:id=%s;type=public' \
% uri_escape(replica_key_id)
# TODO: replace this with 'autogenerate' to prevent collisions
uuid_rdn = DN('ipk11UniqueId=%s' % uuid.uuid1())
entry_dn = DN(uuid_rdn, self.ldapkeydb.base_dn)
entry = self.ldap.make_entry(entry_dn,
objectClass=['ipaSecretKeyObject', 'ipk11Object'],
ipaSecretKey=data,
ipaWrappingKey=wrapping_key_uri,
ipaWrappingMech=wrapping_mech)
self.log.info('adding master key 0x%s wrapped with replica key 0x%s to %s',
hexlify(self['ipk11id']),
hexlify(replica_key_id),
entry_dn)
self.ldap.add_entry(entry)
if 'ipaSecretKeyRef' not in self.entry:
self.entry['objectClass'] += ['ipaSecretKeyRefObject']
self.entry.setdefault('ipaSecretKeyRef', []).append(entry_dn)
class LdapKeyDB(AbstractHSM):
def __init__(self, log, ldap, base_dn):
self.ldap = ldap
self.base_dn = base_dn
self.log = log
self.cache_replica_pubkeys_wrap = None
self.cache_masterkeys = None
self.cache_zone_keypairs = None
def _get_key_dict(self, key_type, ldap_filter):
try:
objs = self.ldap.get_entries(base_dn=self.base_dn,
filter=ldap_filter)
except ipalib.errors.NotFound:
return {}
keys = {}
for o in objs:
# add default values not present in LDAP
key = key_type(o, self.ldap, self)
default_attrs = get_default_attrs(key.entry['objectclass'])
for attr in default_attrs:
key.setdefault(attr, default_attrs[attr])
assert 'ipk11id' in key, 'key is missing ipk11Id in %s' % key.entry.dn
key_id = key['ipk11id']
assert key_id not in keys, 'duplicate ipk11Id=0x%s in "%s" and "%s"' % (hexlify(key_id), key.entry.dn, keys[key_id].entry.dn)
assert 'ipk11label' in key, 'key "%s" is missing ipk11Label' % key.entry.dn
assert 'objectclass' in key.entry, 'key "%s" is missing objectClass attribute' % key.entry.dn
keys[key_id] = key
self._update_keys()
return keys
def _update_keys(self):
for cache in [self.cache_masterkeys, self.cache_replica_pubkeys_wrap,
self.cache_zone_keypairs]:
if cache:
for key in cache.values():
key._update_key()
def flush(self):
"""write back content of caches to LDAP"""
self._update_keys()
self.cache_masterkeys = None
self.cache_replica_pubkeys_wrap = None
self.cache_zone_keypairs = None
def _import_keys_metadata(self, source_keys):
"""import key metadata from Key-compatible objects
metadata from multiple source keys can be imported into single LDAP
object
:param: source_keys is iterable of (Key object, PKCS#11 object class)"""
entry_dn = DN('ipk11UniqueId=autogenerate', self.base_dn)
entry = self.ldap.make_entry(entry_dn, objectClass=['ipk11Object'])
new_key = Key(entry, self.ldap, self)
for source_key, pkcs11_class in source_keys:
if pkcs11_class == _ipap11helper.KEY_CLASS_SECRET_KEY:
entry['objectClass'].append('ipk11SecretKey')
elif pkcs11_class == _ipap11helper.KEY_CLASS_PUBLIC_KEY:
entry['objectClass'].append('ipk11PublicKey')
elif pkcs11_class == _ipap11helper.KEY_CLASS_PRIVATE_KEY:
entry['objectClass'].append('ipk11PrivateKey')
else:
raise AssertionError('unsupported object class %s' % pkcs11_class)
populate_pkcs11_metadata(source_key, new_key)
new_key._cleanup_key()
return new_key
def import_master_key(self, mkey):
new_key = self._import_keys_metadata(
[(mkey, _ipap11helper.KEY_CLASS_SECRET_KEY)])
self.ldap.add_entry(new_key.entry)
self.log.debug('imported master key metadata: %s', new_key.entry)
def import_zone_key(self, pubkey, pubkey_data, privkey,
privkey_wrapped_data, wrapping_mech, master_key_id):
new_key = self._import_keys_metadata(
[(pubkey, _ipap11helper.KEY_CLASS_PUBLIC_KEY),
(privkey, _ipap11helper.KEY_CLASS_PRIVATE_KEY)])
new_key.entry['objectClass'].append('ipaPrivateKeyObject')
new_key.entry['ipaPrivateKey'] = privkey_wrapped_data
new_key.entry['ipaWrappingKey'] = 'pkcs11:id=%s;type=secret-key' \
% uri_escape(master_key_id)
new_key.entry['ipaWrappingMech'] = wrapping_mech
new_key.entry['objectClass'].append('ipaPublicKeyObject')
new_key.entry['ipaPublicKey'] = pubkey_data
self.ldap.add_entry(new_key.entry)
self.log.debug('imported zone key id: 0x%s', hexlify(new_key['ipk11id']))
@property
def replica_pubkeys_wrap(self):
if self.cache_replica_pubkeys_wrap:
return self.cache_replica_pubkeys_wrap
keys = self._filter_replica_keys(
self._get_key_dict(ReplicaKey,
'(&(objectClass=ipk11PublicKey)(ipk11Wrap=TRUE)(objectClass=ipaPublicKeyObject))'))
self.cache_replica_pubkeys_wrap = keys
return keys
@property
def master_keys(self):
if self.cache_masterkeys:
return self.cache_masterkeys
keys = self._get_key_dict(MasterKey,
'(&(objectClass=ipk11SecretKey)(|(ipk11UnWrap=TRUE)(!(ipk11UnWrap=*)))(ipk11Label=dnssec-master))')
for key in keys.values():
prefix = 'dnssec-master'
assert key['ipk11label'] == prefix, \
'secret key dn="%s" ipk11id=0x%s ipk11label="%s" with ipk11UnWrap = TRUE does not have '\
'"%s" key label' % (
key.entry.dn,
hexlify(key['ipk11id']),
str(key['ipk11label']),
prefix)
self.cache_masterkeys = keys
return keys
@property
def zone_keypairs(self):
if self.cache_zone_keypairs:
return self.cache_zone_keypairs
self.cache_zone_keypairs = self._filter_zone_keys(
self._get_key_dict(Key,
'(&(objectClass=ipk11PrivateKey)(objectClass=ipaPrivateKeyObject)(objectClass=ipk11PublicKey)(objectClass=ipaPublicKeyObject))'))
return self.cache_zone_keypairs
if __name__ == '__main__':
# this is debugging mode
# print information we think are useful to stdout
# other garbage goes via logger to stderr
ipa_log_manager.standard_logging_setup(debug=True)
log = ipa_log_manager.root_logger
# IPA framework initialization
ipalib.api.bootstrap(in_server=True, log=None) # no logging to file
ipalib.api.finalize()
# LDAP initialization
dns_dn = DN(ipalib.api.env.container_dns, ipalib.api.env.basedn)
ldap = ipaldap.LDAPClient(ipalib.api.env.ldap_uri)
log.debug('Connecting to LDAP')
# GSSAPI will be used, used has to be kinited already
ldap.gssapi_bind()
log.debug('Connected')
ldapkeydb = LdapKeyDB(log, ldap, DN(('cn', 'keys'), ('cn', 'sec'),
ipalib.api.env.container_dns,
ipalib.api.env.basedn))
print('replica public keys: CKA_WRAP = TRUE')
print('====================================')
for pubkey_id, pubkey in ldapkeydb.replica_pubkeys_wrap.items():
print(hexlify(pubkey_id))
pprint(pubkey)
print('')
print('master keys')
print('===========')
for mkey_id, mkey in ldapkeydb.master_keys.items():
print(hexlify(mkey_id))
pprint(mkey)
print('')
print('zone key pairs')
print('==============')
for key_id, key in ldapkeydb.zone_keypairs.items():
print(hexlify(key_id))
pprint(key)

View File

@@ -1,229 +0,0 @@
#!/usr/bin/python2
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
from __future__ import print_function
from binascii import hexlify
import collections
import logging
import os
from pprint import pprint
import sys
import time
from ipaplatform.paths import paths
from ipapython import p11helper as _ipap11helper
from ipapython.dnssec.abshsm import (attrs_name2id, attrs_id2name, AbstractHSM,
keytype_id2name, keytype_name2id,
ldap2p11helper_api_params)
private_key_api_params = set(["label", "id", "data", "unwrapping_key",
"wrapping_mech", "key_type", "cka_always_authenticate", "cka_copyable",
"cka_decrypt", "cka_derive", "cka_extractable", "cka_modifiable",
"cka_private", "cka_sensitive", "cka_sign", "cka_sign_recover",
"cka_unwrap", "cka_wrap_with_trusted"])
public_key_api_params = set(["label", "id", "data", "cka_copyable",
"cka_derive", "cka_encrypt", "cka_modifiable", "cka_private",
"cka_trusted", "cka_verify", "cka_verify_recover", "cka_wrap"])
class Key(collections.MutableMapping):
def __init__(self, p11, handle):
self.p11 = p11
self.handle = handle
# sanity check CKA_ID and CKA_LABEL
try:
cka_id = self.p11.get_attribute(handle, _ipap11helper.CKA_ID)
assert len(cka_id) != 0, 'ipk11id length should not be 0'
except _ipap11helper.NotFound:
raise _ipap11helper.NotFound('key without ipk11id: handle %s' % handle)
try:
cka_label = self.p11.get_attribute(handle, _ipap11helper.CKA_LABEL)
assert len(cka_label) != 0, 'ipk11label length should not be 0'
except _ipap11helper.NotFound:
raise _ipap11helper.NotFound('key without ipk11label: id 0x%s'
% hexlify(cka_id))
def __getitem__(self, key):
key = key.lower()
try:
value = self.p11.get_attribute(self.handle, attrs_name2id[key])
if key == 'ipk11keytype':
value = keytype_id2name[value]
return value
except _ipap11helper.NotFound:
raise KeyError()
def __setitem__(self, key, value):
key = key.lower()
if key == 'ipk11keytype':
value = keytype_name2id[value]
return self.p11.set_attribute(self.handle, attrs_name2id[key], value)
def __delitem__(self, key):
raise _ipap11helper.P11HelperException('__delitem__ is not supported')
def __iter__(self):
"""generates list of ipa names of all attributes present in the object"""
for pkcs11_id, ipa_name in attrs_id2name.items():
try:
self.p11.get_attribute(self.handle, pkcs11_id)
except _ipap11helper.NotFound:
continue
yield ipa_name
def __len__(self):
cnt = 0
for attr in self:
cnt += 1
return cnt
def __str__(self):
return str(dict(self))
def __repr__(self):
return self.__str__()
class LocalHSM(AbstractHSM):
def __init__(self, library, slot, pin):
self.cache_replica_pubkeys = None
self.p11 = _ipap11helper.P11_Helper(slot, pin, library)
self.log = logging.getLogger()
def __del__(self):
self.p11.finalize()
def find_keys(self, **kwargs):
"""Return dict with Key objects matching given criteria.
CKA_ID is used as key so all matching objects have to have unique ID."""
# this is a hack for old p11-kit URI parser
# see https://bugs.freedesktop.org/show_bug.cgi?id=85057
if 'uri' in kwargs:
kwargs['uri'] = kwargs['uri'].replace('type=', 'object-type=')
handles = self.p11.find_keys(**kwargs)
keys = {}
for h in handles:
key = Key(self.p11, h)
o_id = key['ipk11id']
assert o_id not in keys, 'duplicate ipk11Id = 0x%s; keys = %s' % (
hexlify(o_id), keys)
keys[o_id] = key
return keys
@property
def replica_pubkeys(self):
return self._filter_replica_keys(
self.find_keys(objclass=_ipap11helper.KEY_CLASS_PUBLIC_KEY))
@property
def replica_pubkeys_wrap(self):
return self._filter_replica_keys(
self.find_keys(objclass=_ipap11helper.KEY_CLASS_PUBLIC_KEY,
cka_wrap=True))
@property
def master_keys(self):
"""Get all usable DNSSEC master keys"""
keys = self.find_keys(objclass=_ipap11helper.KEY_CLASS_SECRET_KEY, label=u'dnssec-master', cka_unwrap=True)
for key in keys.values():
prefix = 'dnssec-master'
assert key['ipk11label'] == prefix, \
'secret key ipk11id=0x%s ipk11label="%s" with ipk11UnWrap = TRUE does not have '\
'"%s" key label' % (hexlify(key['ipk11id']),
str(key['ipk11label']), prefix)
return keys
@property
def active_master_key(self):
"""Get one active DNSSEC master key suitable for key wrapping"""
keys = self.find_keys(objclass=_ipap11helper.KEY_CLASS_SECRET_KEY,
label=u'dnssec-master', cka_wrap=True, cka_unwrap=True)
assert len(keys) > 0, "DNSSEC master key with UN/WRAP = TRUE not found"
return keys.popitem()[1]
@property
def zone_pubkeys(self):
return self._filter_zone_keys(
self.find_keys(objclass=_ipap11helper.KEY_CLASS_PUBLIC_KEY))
@property
def zone_privkeys(self):
return self._filter_zone_keys(
self.find_keys(objclass=_ipap11helper.KEY_CLASS_PRIVATE_KEY))
def import_public_key(self, source, data):
params = ldap2p11helper_api_params(source)
# filter out params inappropriate for public keys
for par in set(params.keys()).difference(public_key_api_params):
del params[par]
params['data'] = data
h = self.p11.import_public_key(**params)
return Key(self.p11, h)
def import_private_key(self, source, data, unwrapping_key):
params = ldap2p11helper_api_params(source)
# filter out params inappropriate for private keys
for par in set(params.keys()).difference(private_key_api_params):
del params[par]
params['data'] = data
params['unwrapping_key'] = unwrapping_key.handle
h = self.p11.import_wrapped_private_key(**params)
return Key(self.p11, h)
if __name__ == '__main__':
if 'SOFTHSM2_CONF' not in os.environ:
os.environ['SOFTHSM2_CONF'] = paths.DNSSEC_SOFTHSM2_CONF
localhsm = LocalHSM(paths.LIBSOFTHSM2_SO, 0,
open(paths.DNSSEC_SOFTHSM_PIN).read())
print('replica public keys: CKA_WRAP = TRUE')
print('====================================')
for pubkey_id, pubkey in localhsm.replica_pubkeys_wrap.items():
print(hexlify(pubkey_id))
pprint(pubkey)
print('')
print('replica public keys: all')
print('========================')
for pubkey_id, pubkey in localhsm.replica_pubkeys.items():
print(hexlify(pubkey_id))
pprint(pubkey)
print('')
print('master keys')
print('===========')
for mkey_id, mkey in localhsm.master_keys.items():
print(hexlify(mkey_id))
pprint(mkey)
print('')
print('zone public keys')
print('================')
for key_id, key in localhsm.zone_pubkeys.items():
print(hexlify(key_id))
pprint(key)
print('')
print('zone private keys')
print('=================')
for key_id, key in localhsm.zone_privkeys.items():
print(hexlify(key_id))
pprint(key)

View File

@@ -1,204 +0,0 @@
#!/usr/bin/python2
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
import logging
from lxml import etree
import dns.name
import subprocess
from ipapython import ipa_log_manager, ipautil
# hack: zone object UUID is stored as path to imaginary zone file
ENTRYUUID_PREFIX = "/var/lib/ipa/dns/zone/entryUUID/"
ENTRYUUID_PREFIX_LEN = len(ENTRYUUID_PREFIX)
class ZoneListReader(object):
def __init__(self):
self.names = set() # dns.name
self.uuids = set() # UUID strings
self.mapping = dict() # {UUID: dns.name}
self.log = ipa_log_manager.log_mgr.get_logger(self)
def _add_zone(self, name, zid):
"""Add zone & UUID to internal structures.
Zone with given name and UUID must not exist."""
# detect duplicate zone names
name = dns.name.from_text(name)
assert name not in self.names, \
'duplicate name (%s, %s) vs. %s' % (name, zid, self.mapping)
# duplicate non-None zid is not allowed
assert not zid or zid not in self.uuids, \
'duplicate UUID (%s, %s) vs. %s' % (name, zid, self.mapping)
self.names.add(name)
self.uuids.add(zid)
self.mapping[zid] = name
def _del_zone(self, name, zid):
"""Remove zone & UUID from internal structures.
Zone with given name and UUID must exist.
"""
name = dns.name.from_text(name)
assert zid is not None
assert name in self.names, \
'name (%s, %s) does not exist in %s' % (name, zid, self.mapping)
assert zid in self.uuids, \
'UUID (%s, %s) does not exist in %s' % (name, zid, self.mapping)
assert zid in self.mapping and name == self.mapping[zid], \
'pair {%s: %s} does not exist in %s' % (zid, name, self.mapping)
self.names.remove(name)
self.uuids.remove(zid)
del self.mapping[zid]
class ODSZoneListReader(ZoneListReader):
"""One-shot parser for ODS zonelist.xml."""
def __init__(self, zonelist_text):
super(ODSZoneListReader, self).__init__()
xml = etree.fromstring(zonelist_text)
self._parse_zonelist(xml)
def _parse_zonelist(self, xml):
"""iterate over Zone elements with attribute 'name' and
add IPA zones to self.zones"""
for zone_xml in xml.xpath('/ZoneList/Zone[@name]'):
name, zid = self._parse_ipa_zone(zone_xml)
self._add_zone(name, zid)
def _parse_ipa_zone(self, zone_xml):
"""Extract zone name, input adapter and detect IPA zones.
IPA zones have contains Adapters/Input/Adapter element with
attribute type = "File" and with value prefixed with ENTRYUUID_PREFIX.
Returns:
tuple (zone name, ID)
"""
name = zone_xml.get('name')
in_adapters = zone_xml.xpath(
'Adapters/Input/Adapter[@type="File" '
'and starts-with(text(), "%s")]' % ENTRYUUID_PREFIX)
assert len(in_adapters) == 1, 'only IPA zones are supported: %s' \
% etree.tostring(zone_xml)
path = in_adapters[0].text
# strip prefix from path
zid = path[ENTRYUUID_PREFIX_LEN:]
return (name, zid)
class LDAPZoneListReader(ZoneListReader):
def __init__(self):
super(LDAPZoneListReader, self).__init__()
def process_ipa_zone(self, op, uuid, zone_ldap):
assert (op == 'add' or op == 'del'), 'unsupported op %s' % op
assert uuid is not None
assert 'idnsname' in zone_ldap, \
'LDAP zone UUID %s without idnsName' % uuid
assert len(zone_ldap['idnsname']) == 1, \
'LDAP zone UUID %s with len(idnsname) != 1' % uuid
if op == 'add':
self._add_zone(zone_ldap['idnsname'][0], uuid)
elif op == 'del':
self._del_zone(zone_ldap['idnsname'][0], uuid)
class ODSMgr(object):
"""OpenDNSSEC zone manager. It does LDAP->ODS synchronization.
Zones with idnsSecInlineSigning attribute = TRUE in LDAP are added
or deleted from ODS as necessary. ODS->LDAP key synchronization
has to be solved seperatelly.
"""
def __init__(self):
self.log = ipa_log_manager.log_mgr.get_logger(self)
self.zl_ldap = LDAPZoneListReader()
def ksmutil(self, params):
"""Call ods-ksmutil with given parameters and return stdout.
Raises CalledProcessError if returncode != 0.
"""
cmd = ['ods-ksmutil'] + params
result = ipautil.run(cmd, capture_output=True)
return result.output
def get_ods_zonelist(self):
stdout = self.ksmutil(['zonelist', 'export'])
reader = ODSZoneListReader(stdout)
return reader
def add_ods_zone(self, uuid, name):
zone_path = '%s%s' % (ENTRYUUID_PREFIX, uuid)
cmd = ['zone', 'add', '--zone', str(name), '--input', zone_path]
output = self.ksmutil(cmd)
self.log.info(output)
self.notify_enforcer()
def del_ods_zone(self, name):
# ods-ksmutil blows up if zone name has period at the end
name = name.relativize(dns.name.root)
# detect if name is root zone
if name == dns.name.empty:
name = dns.name.root
cmd = ['zone', 'delete', '--zone', str(name)]
output = self.ksmutil(cmd)
self.log.info(output)
self.notify_enforcer()
self.cleanup_signer(name)
def notify_enforcer(self):
cmd = ['notify']
output = self.ksmutil(cmd)
self.log.info(output)
def cleanup_signer(self, zone_name):
cmd = ['ods-signer', 'ldap-cleanup', str(zone_name)]
output = ipautil.run(cmd, capture_output=True)
self.log.info(output)
def ldap_event(self, op, uuid, attrs):
"""Record single LDAP event - zone addition or deletion.
Change is only recorded to memory.
self.sync() have to be called to synchronize change to ODS."""
assert op == 'add' or op == 'del'
self.zl_ldap.process_ipa_zone(op, uuid, attrs)
self.log.debug("LDAP zones: %s", self.zl_ldap.mapping)
def sync(self):
"""Synchronize list of zones in LDAP with ODS."""
zl_ods = self.get_ods_zonelist()
self.log.debug("ODS zones: %s", zl_ods.mapping)
removed = self.diff_zl(zl_ods, self.zl_ldap)
self.log.info("Zones removed from LDAP: %s", removed)
added = self.diff_zl(self.zl_ldap, zl_ods)
self.log.info("Zones added to LDAP: %s", added)
for (uuid, name) in removed:
self.del_ods_zone(name)
for (uuid, name) in added:
self.add_ods_zone(uuid, name)
def diff_zl(self, s1, s2):
"""Compute zones present in s1 but not present in s2.
Returns: List of (uuid, name) tuples with zones present only in s1."""
s1_extra = s1.uuids - s2.uuids
removed = [(uuid, name) for (uuid, name) in s1.mapping.items()
if uuid in s1_extra]
return removed
if __name__ == '__main__':
ipa_log_manager.standard_logging_setup(debug=True)
ods = ODSMgr()
reader = ods.get_ods_zonelist()
ipa_log_manager.root_logger.info('ODS zones: %s', reader.mapping)

View File

@@ -1,122 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
"""
This script implements a syncrepl consumer which syncs data from server
to a local dict.
"""
# Import the python-ldap modules
import ldap
import ldapurl
# Import specific classes from python-ldap
from ldap.cidict import cidict
from ldap.ldapobject import ReconnectLDAPObject
from ldap.syncrepl import SyncreplConsumer
# Import modules from Python standard lib
import signal
import time
import sys
import logging
from ipapython import ipa_log_manager
class SyncReplConsumer(ReconnectLDAPObject, SyncreplConsumer):
"""
Syncrepl Consumer interface
"""
def __init__(self, *args, **kwargs):
self.log = ipa_log_manager.log_mgr.get_logger(self)
# Initialise the LDAP Connection first
ldap.ldapobject.ReconnectLDAPObject.__init__(self, *args, **kwargs)
# Now prepare the data store
self.__data = cidict()
self.__data['uuids'] = cidict()
# We need this for later internal use
self.__presentUUIDs = cidict()
def close_db(self):
# This is useless for dict
pass
def syncrepl_get_cookie(self):
if 'cookie' in self.__data:
cookie = self.__data['cookie']
self.log.debug('Current cookie is: %s', cookie)
return cookie
else:
self.log.debug('Current cookie is: None (not received yet)')
def syncrepl_set_cookie(self, cookie):
self.log.debug('New cookie is: %s', cookie)
self.__data['cookie'] = cookie
def syncrepl_entry(self, dn, attributes, uuid):
attributes = cidict(attributes)
# First we determine the type of change we have here
# (and store away the previous data for later if needed)
previous_attributes = cidict()
if uuid in self.__data['uuids']:
change_type = 'modify'
previous_attributes = self.__data['uuids'][uuid]
else:
change_type = 'add'
# Now we store our knowledge of the existence of this entry
# (including the DN as an attribute for convenience)
attributes['dn'] = dn
self.__data['uuids'][uuid] = attributes
# Debugging
self.log.debug('Detected %s of entry: %s %s', change_type, dn, uuid)
if change_type == 'modify':
self.application_sync(uuid, dn, attributes, previous_attributes)
else:
self.application_add(uuid, dn, attributes)
def syncrepl_delete(self, uuids):
# Make sure we know about the UUID being deleted, just in case...
uuids = [uuid for uuid in uuids if uuid in self.__data['uuids']]
# Delete all the UUID values we know of
for uuid in uuids:
attributes = self.__data['uuids'][uuid]
dn = attributes['dn']
self.log.debug('Detected deletion of entry: %s %s', dn, uuid)
self.application_del(uuid, dn, attributes)
del self.__data['uuids'][uuid]
def syncrepl_present(self, uuids, refreshDeletes=False):
# If we have not been given any UUID values,
# then we have recieved all the present controls...
if uuids is None:
# We only do things if refreshDeletes is false
# as the syncrepl extension will call syncrepl_delete instead
# when it detects a delete notice
if refreshDeletes is False:
deletedEntries = [uuid for uuid in self.__data['uuids'].keys()
if uuid not in self.__presentUUIDs]
self.syncrepl_delete(deletedEntries)
# Phase is now completed, reset the list
self.__presentUUIDs = {}
else:
# Note down all the UUIDs we have been sent
for uuid in uuids:
self.__presentUUIDs[uuid] = True
def application_add(self, uuid, dn, attributes):
self.log.info('Performing application add for: %s %s', dn, uuid)
self.log.debug('New attributes: %s', attributes)
return True
def application_sync(self, uuid, dn, attributes, previous_attributes):
self.log.info('Performing application sync for: %s %s', dn, uuid)
self.log.debug('Old attributes: %s', previous_attributes)
self.log.debug('New attributes: %s', attributes)
return True
def application_del(self, uuid, dn, previous_attributes):
self.log.info('Performing application delete for: %s %s', dn, uuid)
self.log.debug('Old attributes: %s', previous_attributes)
return True

View File

@@ -1,22 +0,0 @@
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
import errno
import shutil
import tempfile
class TemporaryDirectory(object):
def __init__(self, root):
self.root = root
def __enter__(self):
self.name = tempfile.mkdtemp(dir=self.root)
return self.name
def __exit__(self, exc_type, exc_value, traceback):
try:
shutil.rmtree(self.name)
except OSError as e:
if e.errno != errno.ENOENT:
raise

View File

@@ -21,41 +21,31 @@ import dns.name
import dns.exception
import copy
import six
if six.PY3:
unicode = str
@six.python_2_unicode_compatible
class DNSName(dns.name.Name):
labels = None # make pylint happy
@classmethod
def from_text(cls, labels, origin=None):
return cls(dns.name.from_text(labels, origin))
def __init__(self, labels, origin=None):
if isinstance(labels, str):
#pylint: disable=E1101
labels = dns.name.from_text(labels, origin).labels
elif isinstance(labels, unicode):
#pylint: disable=E1101
labels = dns.name.from_unicode(labels, origin).labels
elif isinstance(labels, dns.name.Name):
labels = labels.labels
try:
if isinstance(labels, six.string_types):
#pylint: disable=E1101
labels = dns.name.from_text(unicode(labels), origin).labels
elif isinstance(labels, dns.name.Name):
labels = labels.labels
super(DNSName, self).__init__(labels)
except UnicodeError as e:
except UnicodeError, e:
# dnspython bug, an invalid domain name returns the UnicodeError
# instead of a dns.exception
raise dns.exception.SyntaxError(e)
def __bool__(self):
def __nonzero__(self):
#dns.name.from_text('@') is represented like empty tuple
#we need to acting '@' as nonzero value
return True
__nonzero__ = __bool__ # for Python 2
def __copy__(self):
return DNSName(self.labels)
@@ -63,14 +53,14 @@ class DNSName(dns.name.Name):
return DNSName(copy.deepcopy(self.labels, memo))
def __str__(self):
return self.to_text()
def __unicode__(self):
return self.to_unicode()
def ToASCII(self):
#method named by RFC 3490 and python standard library
return self.to_text().decode('ascii') # must be unicode string
def canonicalize(self):
return DNSName(super(DNSName, self).canonicalize())
return str(self).decode('ascii') # must be unicode string
def concatenate(self, other):
return DNSName(super(DNSName, self).concatenate(other))

Binary file not shown.

View File

@@ -17,37 +17,132 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import collections
import os
import httplib
import xml.dom.minidom
import ConfigParser
from urllib import urlencode
import nss.nss as nss
import six
from six.moves.urllib.parse import urlencode
from nss.error import NSPRError
from ipalib import api, errors
from ipalib.errors import NetworkError
from ipalib.errors import NetworkError, CertificateOperationError
from ipalib.text import _
from ipapython import nsslib, ipautil
from ipaplatform.paths import paths
from ipapython.ipa_log_manager import *
# Python 3 rename. The package is available in "six.moves.http_client", but
# pylint cannot handle classes from that alias
try:
import httplib
except ImportError:
import http.client as httplib
# IPA can use either Dogtag version 9 or 10.
#
# Install tools should use the constants from install_constants, so that they
# install with version 10 if it is available, and with 9 if not.
# After IPA installation, the Dogtag version used is stored in the
# "dogtag_version" config option. (If that is missing, version 9 is assumed.)
# The configured_constants() function below provides constants relevant to
# the configured version.
if six.PY3:
unicode = str
class Dogtag10Constants(object):
DOGTAG_VERSION = 10
UNSECURE_PORT = 8080
AGENT_SECURE_PORT = 8443
EE_SECURE_PORT = 8443
AJP_PORT = 8009
DS_PORT = 389
DS_SECURE_PORT = 636
Profile = collections.namedtuple('Profile', ['profile_id', 'description', 'store_issued'])
SPAWN_BINARY = paths.PKISPAWN
DESTROY_BINARY = paths.PKIDESTROY
INCLUDED_PROFILES = {
Profile(u'caIPAserviceCert', u'Standard profile for network services', True),
Profile(u'IECUserRoles', u'User profile that includes IECUserRoles extension from request', True),
}
SERVER_ROOT = paths.VAR_LIB_PKI_DIR
PKI_INSTANCE_NAME = 'pki-tomcat'
PKI_ROOT = '%s/%s' % (SERVER_ROOT, PKI_INSTANCE_NAME)
CRL_PUBLISH_PATH = paths.PKI_CA_PUBLISH_DIR
CS_CFG_PATH = '%s/conf/ca/CS.cfg' % PKI_ROOT
PASSWORD_CONF_PATH = '%s/conf/password.conf' % PKI_ROOT
SERVICE_PROFILE_DIR = '%s/ca/profiles/ca' % PKI_ROOT
ALIAS_DIR = paths.PKI_TOMCAT_ALIAS_DIR.rstrip('/')
DEFAULT_PROFILE = u'caIPAserviceCert'
SERVICE_NAME = 'pki_tomcatd'
RACERT_LINE_SEP = '\n'
IPA_SERVICE_PROFILE = '%s/caIPAserviceCert.cfg' % SERVICE_PROFILE_DIR
SIGN_PROFILE = '%s/caJarSigningCert.cfg' % SERVICE_PROFILE_DIR
SHARED_DB = True
DS_USER = "dirsrv"
DS_NAME = "dirsrv"
class Dogtag9Constants(object):
DOGTAG_VERSION = 9
UNSECURE_PORT = 9180
AGENT_SECURE_PORT = 9443
EE_SECURE_PORT = 9444
AJP_PORT = 9447
DS_PORT = 7389
DS_SECURE_PORT = 7636
SPAWN_BINARY = paths.PKICREATE
DESTROY_BINARY = paths.PKISILENT
SERVER_ROOT = paths.VAR_LIB
PKI_INSTANCE_NAME = 'pki-ca'
PKI_ROOT = '%s/%s' % (SERVER_ROOT, PKI_INSTANCE_NAME)
CRL_PUBLISH_PATH = paths.PKI_CA_PUBLISH_DIR
CS_CFG_PATH = '%s/conf/CS.cfg' % PKI_ROOT
PASSWORD_CONF_PATH = '%s/conf/password.conf' % PKI_ROOT
SERVICE_PROFILE_DIR = '%s/profiles/ca' % PKI_ROOT
ALIAS_DIR = '%s/alias' % PKI_ROOT
SERVICE_NAME = 'pki-cad'
RACERT_LINE_SEP = '\r\n'
ADMIN_SECURE_PORT = 9445
EE_CLIENT_AUTH_PORT = 9446
TOMCAT_SERVER_PORT = 9701
IPA_SERVICE_PROFILE = '%s/caIPAserviceCert.cfg' % SERVICE_PROFILE_DIR
SIGN_PROFILE = '%s/caJarSigningCert.cfg' % SERVICE_PROFILE_DIR
SHARED_DB = False
DS_USER = "pkisrv"
DS_NAME = "PKI-IPA"
if os.path.exists(paths.PKISPAWN):
install_constants = Dogtag10Constants
else:
install_constants = Dogtag9Constants
def _get_configured_version(api):
"""Get the version of Dogtag IPA is configured to use
If an API is given, use information in its environment.
Otherwise, use information from the global config file.
"""
if api:
return int(api.env.dogtag_version)
else:
p = ConfigParser.SafeConfigParser()
p.read(paths.IPA_DEFAULT_CONF)
try:
version = p.get('global', 'dogtag_version')
except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
return 9
else:
return int(version)
def configured_constants(api=None):
"""Get the name of the Dogtag CA instance
See get_configured_version
"""
if _get_configured_version(api) >= 10:
return Dogtag10Constants
else:
return Dogtag9Constants
def error_from_xml(doc, message_template):
@@ -55,20 +150,21 @@ def error_from_xml(doc, message_template):
item_node = doc.getElementsByTagName("Error")
reason = item_node[0].childNodes[0].data
return errors.RemoteRetrieveError(reason=reason)
except Exception as e:
except Exception, e:
return errors.RemoteRetrieveError(reason=message_template % e)
def get_ca_certchain(ca_host=None):
def get_ca_certchain(ca_host=None, dogtag_constants=None):
"""
Retrieve the CA Certificate chain from the configured Dogtag server.
"""
if ca_host is None:
ca_host = api.env.ca_host
if dogtag_constants is None:
dogtag_constants = configured_constants()
chain = None
conn = httplib.HTTPConnection(
ca_host,
api.env.ca_install_port or 8080)
conn = httplib.HTTPConnection(ca_host,
api.env.ca_install_port or dogtag_constants.UNSECURE_PORT)
conn.request("GET", "/ca/ee/ca/getCertChain")
res = conn.getresponse()
doc = None
@@ -93,16 +189,6 @@ def get_ca_certchain(ca_host=None):
return chain
def _parse_ca_status(body):
doc = xml.dom.minidom.parseString(body)
try:
item_node = doc.getElementsByTagName("XMLResponse")[0]
item_node = item_node.getElementsByTagName("Status")[0]
return item_node.childNodes[0].data
except IndexError:
raise error_from_xml(doc, _("Retrieving CA status failed: %s"))
def ca_status(ca_host=None, use_proxy=True):
"""Return the status of the CA, and the httpd proxy in front of it
@@ -118,35 +204,35 @@ def ca_status(ca_host=None, use_proxy=True):
ca_port = 443
else:
ca_port = 8443
status, headers, body = unauthenticated_https_request(
status, reason, headers, body = unauthenticated_https_request(
ca_host, ca_port, '/ca/admin/ca/getStatus')
if status == 503:
# Service temporarily unavailable
return status
return reason
elif status != 200:
raise errors.RemoteRetrieveError(
reason=_("Retrieving CA status failed with status %d") % status)
return _parse_ca_status(body)
reason=_("Retrieving CA status failed: %s") % reason)
doc = xml.dom.minidom.parseString(body)
try:
item_node = doc.getElementsByTagName("XMLResponse")[0]
item_node = item_node.getElementsByTagName("Status")[0]
return item_node.childNodes[0].data
except IndexError:
raise error_from_xml(doc, _("Retrieving CA status failed: %s"))
def https_request(host, port, url, secdir, password, nickname,
method='POST', headers=None, body=None, **kw):
def https_request(host, port, url, secdir, password, nickname, **kw):
"""
:param method: HTTP request method (defalut: 'POST')
:param url: The path (not complete URL!) to post to.
:param body: The request body (encodes kw if None)
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_headers, http_body)
as (integer, dict, str)
:return: (http_status, http_reason_phrase, http_headers, http_body)
as (integer, unicode, dict, str)
Perform a client authenticated HTTPS request
"""
def connection_factory(host, port):
no_init = secdir == nsslib.current_dbdir
conn = nsslib.NSSConnection(host, port, dbdir=secdir, no_init=no_init,
tls_version_min=api.env.tls_version_min,
tls_version_max=api.env.tls_version_max)
conn = nsslib.NSSConnection(host, port, dbdir=secdir)
conn.set_debuglevel(0)
conn.connect()
conn.sock.set_client_auth_data_callback(
@@ -154,19 +240,17 @@ def https_request(host, port, url, secdir, password, nickname,
nickname, password, nss.get_default_certdb())
return conn
if body is None:
body = urlencode(kw)
body = urlencode(kw)
return _httplib_request(
'https', host, port, url, connection_factory, body,
method=method, headers=headers)
'https', host, port, url, connection_factory, body)
def http_request(host, port, url, **kw):
"""
:param url: The path (not complete URL!) to post to.
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_headers, http_body)
as (integer, dict, str)
:return: (http_status, http_reason_phrase, http_headers, http_body)
as (integer, unicode, dict, str)
Perform an HTTP request.
"""
@@ -179,8 +263,8 @@ def unauthenticated_https_request(host, port, url, **kw):
"""
:param url: The path (not complete URL!) to post to.
:param kw: Keyword arguments to encode into POST body.
:return: (http_status, http_headers, http_body)
as (integer, dict, str)
:return: (http_status, http_reason_phrase, http_headers, http_body)
as (integer, unicode, dict, str)
Perform an unauthenticated HTTPS request.
"""
@@ -190,43 +274,38 @@ def unauthenticated_https_request(host, port, url, **kw):
def _httplib_request(
protocol, host, port, path, connection_factory, request_body,
method='POST', headers=None):
protocol, host, port, path, connection_factory, request_body):
"""
:param request_body: Request body
:param connection_factory: Connection class to use. Will be called
with the host and port arguments.
:param method: HTTP request method (default: 'POST')
Perform a HTTP(s) request.
"""
if isinstance(host, unicode):
host = host.encode('utf-8')
uri = '%s://%s%s' % (protocol, ipautil.format_netloc(host, port), path)
root_logger.debug('request %s %s', method, uri)
root_logger.debug('request %r', uri)
root_logger.debug('request body %r', request_body)
headers = headers or {}
if (
method == 'POST'
and 'content-type' not in (str(k).lower() for k in headers)
):
headers['content-type'] = 'application/x-www-form-urlencoded'
try:
conn = connection_factory(host, port)
conn.request(method, uri, body=request_body, headers=headers)
conn.request('POST', uri,
body=request_body,
headers={'Content-type': 'application/x-www-form-urlencoded'},
)
res = conn.getresponse()
http_status = res.status
http_reason_phrase = unicode(res.reason, 'utf-8')
http_headers = res.msg.dict
http_body = res.read()
conn.close()
except Exception as e:
except Exception, e:
raise NetworkError(uri=uri, error=str(e))
root_logger.debug('response status %d', http_status)
root_logger.debug('response headers %s', http_headers)
root_logger.debug('response body %r', http_body)
root_logger.debug('request status %d', http_status)
root_logger.debug('request reason_phrase %r', http_reason_phrase)
root_logger.debug('request headers %s', http_headers)
root_logger.debug('request body %r', http_body)
return http_status, http_headers, http_body
return http_status, http_reason_phrase, http_headers, http_body

Binary file not shown.

View File

@@ -1,47 +0,0 @@
# Authors: Petr Viktorin <pviktori@redhat.com>
#
# Copyright (C) 2014 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
class SetseboolError(Exception):
"""Raised when setting a SELinux boolean fails
:param failed: Dictionary mapping boolean names to intended values
to their intended values, for booleans that cound not be set
:param command: Command the user can run to set the booleans
The initializer arguments are copied to attributes of the same name.
"""
def __init__(self, failed, command):
message = "Could not set SELinux booleans: %s" % ' '.join(
'%s=%s' % (name, value) for name, value in failed.items())
super(SetseboolError, self).__init__(message)
self.failed = failed
self.command = command
def format_service_warning(self, service_name):
"""Format warning for display when this is raised from service install
"""
return '\n'.join([
'WARNING: %(err)s',
'',
'The %(service)s may not function correctly until ',
'the booleans are successfully changed with the command:',
' %(cmd)s',
'Try updating the policycoreutils and selinux-policy packages.'
]) % {'err': self, 'service': service_name, 'cmd': self.command}

View File

@@ -1,80 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
class Graph():
"""
Simple oriented graph structure
G = (V, E) where G is graph, V set of vertices and E list of edges.
E = (tail, head) where tail and head are vertices
"""
def __init__(self):
self.vertices = set()
self.edges = []
self._adj = dict()
def add_vertex(self, vertex):
self.vertices.add(vertex)
self._adj[vertex] = []
def add_edge(self, tail, head):
if tail not in self.vertices:
raise ValueError("tail is not a vertex")
if head not in self.vertices:
raise ValueError("head is not a vertex")
self.edges.append((tail, head))
self._adj[tail].append(head)
def remove_edge(self, tail, head):
try:
self.edges.remove((tail, head))
except KeyError:
raise ValueError(
"graph does not contain edge: (%s, %s)" % (tail, head))
self._adj[tail].remove(head)
def remove_vertex(self, vertex):
try:
self.vertices.remove(vertex)
except KeyError:
raise ValueError("graph does not contain vertex: %s" % vertex)
# delete _adjacencies
del self._adj[vertex]
for key, _adj in self._adj.items():
_adj[:] = [v for v in _adj if v != vertex]
# delete edges
edges = [e for e in self.edges if e[0] != vertex and e[1] != vertex]
self.edges[:] = edges
def get_tails(self, head):
"""
Get list of vertices where a vertex is on the right side of an edge
"""
return [e[0] for e in self.edges if e[1] == head]
def get_heads(self, tail):
"""
Get list of vertices where a vertex is on the left side of an edge
"""
return [e[1] for e in self.edges if e[0] == tail]
def bfs(self, start=None):
"""
Breadth-first search traversal of the graph from `start` vertex.
Return a set of all visited vertices
"""
if not start:
start = list(self.vertices)[0]
visited = set()
queue = [start]
while queue:
vertex = queue.pop(0)
if vertex not in visited:
visited.add(vertex)
queue.extend(set(self._adj.get(vertex, [])) - visited)
return visited

Binary file not shown.

View File

@@ -1,7 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Installer framework.
"""

View File

@@ -1,360 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Command line support.
"""
import collections
import optparse
import signal
import six
from ipapython import admintool, ipa_log_manager
from ipapython.ipautil import CheckedIPAddress, private_ccache
from . import core, common
__all__ = ['install_tool', 'uninstall_tool']
if six.PY3:
long = int
def install_tool(configurable_class, command_name, log_file_name,
positional_arguments=None, usage=None, debug_option=False,
use_private_ccache=True,
uninstall_log_file_name=None,
uninstall_positional_arguments=None, uninstall_usage=None):
if (uninstall_log_file_name is not None or
uninstall_positional_arguments is not None or
uninstall_usage is not None):
uninstall_kwargs = dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=uninstall_log_file_name,
positional_arguments=uninstall_positional_arguments,
usage=uninstall_usage,
debug_option=debug_option,
)
else:
uninstall_kwargs = None
return type(
'install_tool({0})'.format(configurable_class.__name__),
(InstallTool,),
dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=log_file_name,
positional_arguments=positional_arguments,
usage=usage,
debug_option=debug_option,
uninstall_kwargs=uninstall_kwargs,
use_private_ccache=use_private_ccache,
)
)
def uninstall_tool(configurable_class, command_name, log_file_name,
positional_arguments=None, usage=None, debug_option=False):
return type(
'uninstall_tool({0})'.format(configurable_class.__name__),
(UninstallTool,),
dict(
configurable_class=configurable_class,
command_name=command_name,
log_file_name=log_file_name,
positional_arguments=positional_arguments,
usage=usage,
debug_option=debug_option,
)
)
class ConfigureTool(admintool.AdminTool):
configurable_class = None
debug_option = False
positional_arguments = None
use_private_ccache = True
@staticmethod
def _transform(configurable_class):
raise NotImplementedError
@classmethod
def add_options(cls, parser):
transformed_cls = cls._transform(cls.configurable_class)
if issubclass(transformed_cls, common.Interactive):
parser.add_option(
'-U', '--unattended',
dest='unattended',
default=False,
action='store_true',
help="unattended (un)installation never prompts the user",
)
basic_group = optparse.OptionGroup(parser, "basic options")
groups = collections.OrderedDict()
groups[None] = basic_group
for owner_cls, name in transformed_cls.knobs():
knob_cls = getattr(owner_cls, name)
if cls.positional_arguments and name in cls.positional_arguments:
continue
group_cls = owner_cls.group()
try:
opt_group = groups[group_cls]
except KeyError:
opt_group = groups[group_cls] = optparse.OptionGroup(
parser, "{0} options".format(group_cls.description))
kwargs = dict()
if knob_cls.type is bool:
kwargs['type'] = None
else:
kwargs['type'] = 'string'
kwargs['dest'] = name
kwargs['action'] = 'callback'
kwargs['callback'] = cls._option_callback
kwargs['callback_args'] = (knob_cls,)
if knob_cls.sensitive:
kwargs['sensitive'] = True
if knob_cls.cli_metavar:
kwargs['metavar'] = knob_cls.cli_metavar
if knob_cls.cli_short_name:
short_opt_str = '-{0}'.format(knob_cls.cli_short_name)
else:
short_opt_str = ''
cli_name = knob_cls.cli_name or name.replace('_', '-')
opt_str = '--{0}'.format(cli_name)
if not knob_cls.deprecated:
help = knob_cls.description
else:
help = optparse.SUPPRESS_HELP
opt_group.add_option(
short_opt_str, opt_str,
help=help,
**kwargs
)
if knob_cls.cli_aliases:
opt_strs = ['--{0}'.format(a) for a in knob_cls.cli_aliases]
opt_group.add_option(
*opt_strs,
help=optparse.SUPPRESS_HELP,
**kwargs
)
for group, opt_group in groups.items():
parser.add_option_group(opt_group)
super(ConfigureTool, cls).add_options(parser,
debug_option=cls.debug_option)
@classmethod
def _option_callback(cls, option, opt_str, value, parser, knob_cls):
old_value = getattr(parser.values, option.dest, None)
try:
value = cls._parse_knob(knob_cls, old_value, value)
except ValueError as e:
raise optparse.OptionValueError(
"option {0}: {1}".format(opt_str, e))
setattr(parser.values, option.dest, value)
@classmethod
def _parse_knob(cls, knob_cls, old_value, value):
if knob_cls.type is bool:
parse = bool
is_list = False
value = True
else:
if isinstance(knob_cls.type, tuple):
assert knob_cls.type[0] is list
value_type = knob_cls.type[1]
is_list = True
else:
value_type = knob_cls.type
is_list = False
if value_type is int:
def parse(value):
try:
return int(value, 0)
except ValueError:
raise ValueError(
"invalid integer value: {0}".format(repr(value)))
elif value_type is long:
def parse(value):
try:
return long(value, 0)
except ValueError:
raise ValueError(
"invalid long integer value: {0}".format(
repr(value)))
elif value_type == 'ip':
def parse(value):
try:
return CheckedIPAddress(value)
except Exception as e:
raise ValueError("invalid IP address {0}: {1}".format(
value, e))
elif value_type == 'ip-local':
def parse(value):
try:
return CheckedIPAddress(value, match_local=True)
except Exception as e:
raise ValueError("invalid IP address {0}: {1}".format(
value, e))
elif isinstance(value_type, set):
def parse(value):
if value not in value_type:
raise ValueError(
"invalid choice {0} (choose from {1})".format(
repr(value), ', '.join(
sorted(repr(v) for v in value_type))))
return value
else:
parse = value_type
value = parse(value)
if is_list:
old_value = old_value or []
old_value.append(value)
value = old_value
return value
def validate_options(self, needs_root=True):
super(ConfigureTool, self).validate_options(needs_root=needs_root)
if self.positional_arguments:
if len(self.args) > len(self.positional_arguments):
self.option_parser.error("Too many arguments provided")
index = 0
transformed_cls = self._transform(self.configurable_class)
for owner_cls, name in transformed_cls.knobs():
knob_cls = getattr(owner_cls, name)
if name not in self.positional_arguments:
continue
try:
value = self.args[index]
except IndexError:
break
old_value = getattr(self.options, name, None)
try:
value = self._parse_knob(knob_cls, old_value, value)
except ValueError as e:
self.option_parser.error(
"argument {0}: {1}".format(index + 1, e))
setattr(self.options, name, value)
index += 1
def _setup_logging(self, log_file_mode='w', no_file=False):
if no_file:
log_file_name = None
elif self.options.log_file:
log_file_name = self.options.log_file
else:
log_file_name = self.log_file_name
ipa_log_manager.standard_logging_setup(log_file_name,
debug=self.options.verbose)
self.log = ipa_log_manager.log_mgr.get_logger(self)
if log_file_name:
self.log.debug('Logging to %s' % log_file_name)
elif not no_file:
self.log.debug('Not logging to a file')
def run(self):
kwargs = {}
transformed_cls = self._transform(self.configurable_class)
knob_classes = {n: getattr(c, n) for c, n in transformed_cls.knobs()}
for name in knob_classes:
value = getattr(self.options, name, None)
if value is not None:
kwargs[name] = value
if (issubclass(self.configurable_class, common.Interactive) and
not self.options.unattended):
kwargs['interactive'] = True
try:
cfgr = transformed_cls(**kwargs)
except core.KnobValueError as e:
knob_cls = knob_classes[e.name]
try:
if self.positional_arguments is None:
raise IndexError
index = self.positional_arguments.index(e.name)
except IndexError:
cli_name = knob_cls.cli_name or e.name.replace('_', '-')
desc = "option --{0}".format(cli_name)
else:
desc = "argument {0}".format(index + 1)
self.option_parser.error("{0}: {1}".format(desc, e))
except RuntimeError as e:
self.option_parser.error(str(e))
signal.signal(signal.SIGTERM, self.__signal_handler)
if self.use_private_ccache:
with private_ccache():
super(ConfigureTool, self).run()
cfgr.run()
else:
super(ConfigureTool, self).run()
cfgr.run()
@staticmethod
def __signal_handler(signum, frame):
raise KeyboardInterrupt
class InstallTool(ConfigureTool):
uninstall_kwargs = None
_transform = staticmethod(common.installer)
@classmethod
def add_options(cls, parser):
super(InstallTool, cls).add_options(parser)
if cls.uninstall_kwargs is not None:
uninstall_group = optparse.OptionGroup(parser, "uninstall options")
uninstall_group.add_option(
'--uninstall',
dest='uninstall',
default=False,
action='store_true',
help=("uninstall an existing installation. The uninstall can "
"be run with --unattended option"),
)
parser.add_option_group(uninstall_group)
@classmethod
def get_command_class(cls, options, args):
if cls.uninstall_kwargs is not None and options.uninstall:
uninstall_cls = uninstall_tool(**cls.uninstall_kwargs)
uninstall_cls.option_parser = cls.option_parser
return uninstall_cls
else:
return super(InstallTool, cls).get_command_class(options, args)
class UninstallTool(ConfigureTool):
_transform = staticmethod(common.uninstaller)

View File

@@ -1,115 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Common stuff.
"""
import traceback
from . import core
from .util import from_
__all__ = ['step', 'Installable', 'Interactive', 'Continuous', 'installer',
'uninstaller']
def step():
def decorator(func):
cls = core.Component(Step)
cls._installer = staticmethod(func)
return cls
return decorator
class Installable(core.Configurable):
"""
Configurable which does install or uninstall.
"""
uninstalling = core.Property(False)
def _get_components(self):
components = super(Installable, self)._get_components()
if self.uninstalling:
components = reversed(list(components))
return components
def _configure(self):
if self.uninstalling:
return self._uninstall()
else:
return self._install()
def _install(self):
assert not hasattr(super(Installable, self), '_install')
return super(Installable, self)._configure()
def _uninstall(self):
assert not hasattr(super(Installable, self), '_uninstall')
return super(Installable, self)._configure()
class Step(Installable):
@property
def parent(self):
raise AttributeError('parent')
def _install(self):
for nothing in self._installer(self.parent):
yield from_(super(Step, self)._install())
@staticmethod
def _installer(obj):
yield
def _uninstall(self):
for nothing in self._uninstaller(self.parent):
yield from_(super(Step, self)._uninstall())
@staticmethod
def _uninstaller(obj):
yield
@classmethod
def uninstaller(cls, func):
cls._uninstaller = staticmethod(func)
return cls
class Interactive(core.Configurable):
interactive = core.Property(False)
class Continuous(core.Configurable):
def _handle_exception(self, exc_info):
try:
super(Continuous, self)._handle_exception(exc_info)
except BaseException as e:
self.log.debug(traceback.format_exc())
if isinstance(e, Exception):
self.log.error("%s", e)
def installer(cls):
class Installer(cls, Installable):
def __init__(self, **kwargs):
super(Installer, self).__init__(uninstalling=False,
**kwargs)
Installer.__name__ = 'installer({0})'.format(cls.__name__)
return Installer
def uninstaller(cls):
class Uninstaller(Continuous, cls, Installable):
def __init__(self, **kwargs):
super(Uninstaller, self).__init__(uninstalling=True,
**kwargs)
Uninstaller.__name__ = 'uninstaller({0})'.format(cls.__name__)
return Uninstaller

View File

@@ -1,595 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
The framework core.
"""
import abc
import collections
import functools
import itertools
import sys
import six
from ipapython.ipa_log_manager import root_logger
from . import util
from .util import from_
__all__ = ['InvalidStateError', 'KnobValueError', 'Property', 'Knob',
'Configurable', 'Group', 'Component', 'Composite']
# Configurable states
_VALIDATE_PENDING = 'VALIDATE_PENDING'
_VALIDATE_RUNNING = 'VALIDATE_RUNNING'
_EXECUTE_PENDING = 'EXECUTE_PENDING'
_EXECUTE_RUNNING = 'EXECUTE_RUNNING'
_STOPPED = 'STOPPED'
_FAILED = 'FAILED'
_CLOSED = 'CLOSED'
_missing = object()
_counter = itertools.count()
@functools.cmp_to_key
def _class_key(a, b):
if a is b:
return 0
elif issubclass(a, b):
return -1
elif issubclass(b, a):
return 1
else:
return 0
class InvalidStateError(Exception):
pass
class KnobValueError(ValueError):
def __init__(self, name, message):
super(KnobValueError, self).__init__(message)
self.name = name
class PropertyBase(six.with_metaclass(util.InnerClassMeta, object)):
# shut up pylint
__outer_class__ = None
__outer_name__ = None
_order = None
@property
def default(self):
raise AttributeError('default')
def __init__(self, outer):
pass
def __get__(self, obj, obj_type):
while obj is not None:
try:
return obj.__dict__[self.__outer_name__]
except KeyError:
pass
obj = obj._get_fallback()
try:
return self.default
except AttributeError:
raise AttributeError(self.__outer_name__)
def __set__(self, obj, value):
try:
obj.__dict__[self.__outer_name__] = value
except KeyError:
raise AttributeError(self.__outer_name__)
def __delete__(self, obj):
try:
del obj.__dict__[self.__outer_name__]
except KeyError:
raise AttributeError(self.__outer_name__)
def Property(default=_missing):
class_dict = {}
if default is not _missing:
class_dict['default'] = default
return util.InnerClassMeta('Property', (PropertyBase,), class_dict)
class KnobBase(PropertyBase):
type = None
sensitive = False
deprecated = False
description = None
cli_name = None
cli_short_name = None
cli_aliases = None
cli_metavar = None
def __init__(self, outer):
self.outer = outer
def validate(self, value):
pass
@classmethod
def default_getter(cls, func):
@property
def default(self):
return func(self.outer)
cls.default = default
return cls
@classmethod
def validator(cls, func):
def validate(self, value):
func(self.outer, value)
super(cls, self).validate(value)
cls.validate = validate
return cls
def Knob(type_or_base, default=_missing, sensitive=_missing,
deprecated=_missing, description=_missing, cli_name=_missing,
cli_short_name=_missing, cli_aliases=_missing, cli_metavar=_missing):
class_dict = {}
class_dict['_order'] = next(_counter)
if (not isinstance(type_or_base, type) or
not issubclass(type_or_base, KnobBase)):
class_dict['type'] = type_or_base
type_or_base = KnobBase
if default is not _missing:
class_dict['default'] = default
if sensitive is not _missing:
class_dict['sensitive'] = sensitive
if deprecated is not _missing:
class_dict['deprecated'] = deprecated
if description is not _missing:
class_dict['description'] = description
if cli_name is not _missing:
class_dict['cli_name'] = cli_name
if cli_short_name is not _missing:
class_dict['cli_short_name'] = cli_short_name
if cli_aliases is not _missing:
class_dict['cli_aliases'] = cli_aliases
if cli_metavar is not _missing:
class_dict['cli_metavar'] = cli_metavar
return util.InnerClassMeta('Knob', (type_or_base,), class_dict)
class Configurable(six.with_metaclass(abc.ABCMeta, object)):
"""
Base class of all configurables.
FIXME: details of validate/execute, properties and knobs
"""
@classmethod
def properties(cls):
"""
Iterate over properties defined for the configurable.
"""
assert not hasattr(super(Configurable, cls), 'properties')
seen = set()
for owner_cls in cls.__mro__:
result = []
for name, prop_cls in owner_cls.__dict__.items():
if name in seen:
continue
seen.add(name)
if not isinstance(prop_cls, type):
continue
if not issubclass(prop_cls, PropertyBase):
continue
result.append((prop_cls._order, owner_cls, name))
result = sorted(result, key=lambda r: r[0])
for order, owner_cls, name in result:
yield owner_cls, name
@classmethod
def knobs(cls):
for owner_cls, name in cls.properties():
prop_cls = getattr(owner_cls, name)
if issubclass(prop_cls, KnobBase):
yield owner_cls, name
@classmethod
def group(cls):
assert not hasattr(super(Configurable, cls), 'group')
return None
def __init__(self, **kwargs):
"""
Initialize the configurable.
"""
self.log = root_logger
cls = self.__class__
for owner_cls, name in cls.properties():
if name.startswith('_'):
continue
prop_cls = getattr(owner_cls, name)
if not isinstance(prop_cls, type):
continue
if not issubclass(prop_cls, PropertyBase):
continue
try:
value = kwargs.pop(name)
except KeyError:
pass
else:
setattr(self, name, value)
for owner_cls, name in cls.knobs():
if name.startswith('_'):
continue
if not isinstance(self, owner_cls):
continue
value = getattr(self, name, None)
if value is None:
continue
prop_cls = getattr(owner_cls, name)
prop = prop_cls(self)
try:
prop.validate(value)
except KnobValueError:
raise
except ValueError as e:
raise KnobValueError(name, str(e))
if kwargs:
extra = sorted(kwargs)
raise TypeError(
"{0}() got {1} unexpected keyword arguments: {2}".format(
type(self).__name__,
len(extra),
', '.join(repr(name) for name in extra)))
self._reset()
def _reset(self):
assert not hasattr(super(Configurable, self), '_reset')
self.__state = _VALIDATE_PENDING
self.__gen = util.run_generator_with_yield_from(self._configure())
def _get_components(self):
assert not hasattr(super(Configurable, self), '_get_components')
raise TypeError("{0} is not composite".format(self))
def _get_fallback(self):
return None
@abc.abstractmethod
def _configure(self):
"""
Coroutine which defines the logic of the configurable.
"""
assert not hasattr(super(Configurable, self), '_configure')
self.__transition(_VALIDATE_RUNNING, _EXECUTE_PENDING)
while self.__state != _EXECUTE_RUNNING:
yield
def run(self):
"""
Run the configurable.
"""
self.validate()
if self.__state == _EXECUTE_PENDING:
self.execute()
def validate(self):
"""
Run the validation part of the configurable.
"""
for nothing in self._validator():
pass
def _validator(self):
"""
Coroutine which runs the validation part of the configurable.
"""
return self.__runner(_VALIDATE_PENDING, _VALIDATE_RUNNING)
def execute(self):
"""
Run the execution part of the configurable.
"""
for nothing in self._executor():
pass
def _executor(self):
"""
Coroutine which runs the execution part of the configurable.
"""
return self.__runner(_EXECUTE_PENDING, _EXECUTE_RUNNING)
def done(self):
"""
Return True if the configurable has finished.
"""
return self.__state in (_STOPPED, _FAILED, _CLOSED)
def run_until_executing(self, gen):
while self.__state != _EXECUTE_RUNNING:
try:
yield next(gen)
except StopIteration:
break
def __runner(self, pending_state, running_state):
self.__transition(pending_state, running_state)
step = lambda: next(self.__gen)
while True:
try:
step()
except StopIteration:
self.__transition(running_state, _STOPPED)
break
except GeneratorExit:
self.__transition(running_state, _CLOSED)
break
except BaseException:
exc_info = sys.exc_info()
try:
self._handle_exception(exc_info)
except BaseException:
raise
else:
break
finally:
self.__transition(running_state, _FAILED)
if self.__state != running_state:
break
try:
yield
except BaseException:
exc_info = sys.exc_info()
step = lambda: self.__gen.throw(*exc_info)
else:
step = lambda: next(self.__gen)
def _handle_exception(self, exc_info):
assert not hasattr(super(Configurable, self), '_handle_exception')
six.reraise(*exc_info)
def __transition(self, from_state, to_state):
if self.__state != from_state:
raise InvalidStateError(self.__state)
self.__state = to_state
class Group(Configurable):
@classmethod
def group(cls):
return cls
class ComponentMeta(util.InnerClassMeta, abc.ABCMeta):
pass
class ComponentBase(six.with_metaclass(ComponentMeta, Configurable)):
# shut up pylint
__outer_class__ = None
__outer_name__ = None
_order = None
@classmethod
def group(cls):
result = super(ComponentBase, cls).group()
if result is not None:
return result
else:
return cls.__outer_class__.group()
def __init__(self, parent, **kwargs):
self.__parent = parent
super(ComponentBase, self).__init__(**kwargs)
@property
def parent(self):
return self.__parent
def __get__(self, obj, obj_type):
obj.__dict__[self.__outer_name__] = self
return self
def _get_fallback(self):
return self.__parent
def _handle_exception(self, exc_info):
try:
super(ComponentBase, self)._handle_exception(exc_info)
except BaseException:
exc_info = sys.exc_info()
self.__parent._handle_exception(exc_info)
def Component(cls):
class_dict = {}
class_dict['_order'] = next(_counter)
return ComponentMeta('Component', (ComponentBase, cls), class_dict)
class Composite(Configurable):
"""
Configurable composed of any number of components.
Provides knobs of all child components.
"""
@classmethod
def properties(cls):
name_dict = {}
owner_dict = collections.OrderedDict()
for owner_cls, name in super(Composite, cls).properties():
name_dict[name] = owner_cls
owner_dict.setdefault(owner_cls, []).append(name)
for owner_cls, name in cls.components():
comp_cls = getattr(cls, name)
for owner_cls, name in comp_cls.knobs():
if hasattr(cls, name):
continue
try:
last_owner_cls = name_dict[name]
except KeyError:
name_dict[name] = owner_cls
owner_dict.setdefault(owner_cls, []).append(name)
else:
knob_cls = getattr(owner_cls, name)
last_knob_cls = getattr(last_owner_cls, name)
if issubclass(knob_cls, last_knob_cls):
name_dict[name] = owner_cls
owner_dict[last_owner_cls].remove(name)
owner_dict.setdefault(owner_cls, [])
if name not in owner_dict[owner_cls]:
owner_dict[owner_cls].append(name)
elif not issubclass(last_knob_cls, knob_cls):
raise TypeError("{0}.knobs(): conflicting definitions "
"of '{1}' in {2} and {3}".format(
cls.__name__,
name,
last_owner_cls.__name__,
owner_cls.__name__))
for owner_cls in sorted(owner_dict, key=_class_key):
for name in owner_dict[owner_cls]:
yield owner_cls, name
@classmethod
def components(cls):
assert not hasattr(super(Composite, cls), 'components')
seen = set()
for owner_cls in cls.__mro__:
result = []
for name, comp_cls in owner_cls.__dict__.items():
if name in seen:
continue
seen.add(name)
if not isinstance(comp_cls, type):
continue
if not issubclass(comp_cls, ComponentBase):
continue
result.append((comp_cls._order, owner_cls, name))
result = sorted(result, key=lambda r: r[0])
for order, owner_cls, name in result:
yield owner_cls, name
def __getattr__(self, name):
for owner_cls, knob_name in self.knobs():
if knob_name == name:
break
else:
raise AttributeError(name)
for component in self.__components:
if isinstance(component, owner_cls):
break
else:
raise AttributeError(name)
return getattr(component, name)
def _reset(self):
self.__components = list(self._get_components())
super(Composite, self)._reset()
def _get_components(self):
for owner_cls, name in self.components():
yield getattr(self, name)
def _configure(self):
validate = [(c, c._validator()) for c in self.__components]
while True:
new_validate = []
for child, validator in validate:
try:
next(validator)
except StopIteration:
pass
else:
new_validate.append((child, validator))
if not new_validate:
break
validate = new_validate
yield
if not self.__components:
return
yield from_(super(Composite, self)._configure())
execute = [(c, c._executor()) for c in self.__components
if not c.done()]
while True:
new_execute = []
for child, executor in execute:
try:
next(executor)
except StopIteration:
pass
else:
new_execute.append((child, executor))
if not new_execute:
break
execute = new_execute
yield

View File

@@ -1,164 +0,0 @@
#
# Copyright (C) 2015 FreeIPA Contributors see COPYING for license
#
"""
Utilities.
"""
import sys
import six
class from_(object):
"""
Wrapper for delegating to a subgenerator.
See `run_generator_with_yield_from`.
"""
__slots__ = ('obj',)
def __init__(self, obj):
self.obj = obj
def run_generator_with_yield_from(gen):
"""
Iterate over a generator object with subgenerator delegation.
This implements Python 3's ``yield from`` expressions, using Python 2
syntax:
>>> def subgen():
... yield 'B'
... yield 'C'
...
>>> def gen():
... yield 'A'
... yield from_(subgen())
... yield 'D'
...
>>> list(run_generator_with_yield_from(gen()))
['A', 'B', 'C', 'D']
Returning value from a subgenerator is not supported.
"""
exc_info = None
value = None
stack = [gen]
while stack:
prev_exc_info, exc_info = exc_info, None
prev_value, value = value, None
gen = stack[-1]
try:
if prev_exc_info is None:
value = gen.send(prev_value)
else:
value = gen.throw(*prev_exc_info)
except StopIteration:
stack.pop()
continue
except BaseException:
exc_info = sys.exc_info()
stack.pop()
continue
else:
if isinstance(value, from_):
stack.append(value.obj)
value = None
continue
try:
value = (yield value)
except BaseException:
exc_info = sys.exc_info()
if exc_info is not None:
six.reraise(*exc_info)
class InnerClassMeta(type):
def __new__(cls, name, bases, class_dict):
class_dict.pop('__outer_class__', None)
class_dict.pop('__outer_name__', None)
return super(InnerClassMeta, cls).__new__(cls, name, bases, class_dict)
def __get__(self, obj, obj_type):
outer_class, outer_name = self.__bind(obj_type)
if obj is None:
return self
assert isinstance(obj, outer_class)
try:
return obj.__dict__[outer_name]
except KeyError:
inner = self(obj)
try:
getter = inner.__get__
except AttributeError:
return inner
else:
return getter(obj, obj_type)
def __set__(self, obj, value):
outer_class, outer_name = self.__bind(obj.__class__)
assert isinstance(obj, outer_class)
inner = self(obj)
try:
setter = inner.__set__
except AttributeError:
try:
inner.__delete__
except AttributeError:
obj.__dict__[outer_name] = value
else:
raise AttributeError('__set__')
else:
setter(obj, value)
def __delete__(self, obj):
outer_class, outer_name = self.__bind(obj.__class__)
assert isinstance(obj, outer_class)
inner = self(obj)
try:
deleter = inner.__delete__
except AttributeError:
try:
inner.__set__
except AttributeError:
try:
del obj.__dict__[outer_name]
except KeyError:
raise AttributeError(outer_name)
else:
raise AttributeError('__delete__')
else:
deleter(obj)
def __bind(self, obj_type):
try:
cls = self.__dict__['__outer_class__']
name = self.__dict__['__outer_name__']
except KeyError:
cls, name, value = None, None, None
for cls in obj_type.__mro__:
for name, value in six.iteritems(cls.__dict__):
if value is self:
break
if value is self:
break
assert value is self
self.__outer_class__ = cls
self.__outer_name__ = name
self.__name__ = '.'.join((cls.__name__, name))
self.__qualname__ = self.__name__
return cls, name

View File

@@ -29,7 +29,7 @@ import sys
import re
import copy
from ipapython.log_manager import LogManager, parse_log_level
from log_manager import LogManager, parse_log_level
#-------------------------------------------------------------------------------
@@ -180,12 +180,12 @@ def standard_logging_setup(filename=None, verbose=False, debug=False,
file_handler = dict(name='file',
filename=filename,
filemode=filemode,
permission=0o600,
permission=0600,
level='debug',
format=LOGGING_FORMAT_STANDARD_FILE)
handlers.append(file_handler)
if 'console' in log_mgr.handlers:
if log_mgr.handlers.has_key('console'):
log_mgr.remove_handler('console')
level = 'error'
if verbose:

Binary file not shown.

File diff suppressed because it is too large Load Diff

Binary file not shown.

View File

@@ -17,8 +17,6 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import string
import tempfile
import subprocess
@@ -27,41 +25,32 @@ import os, sys, traceback
import copy
import stat
import shutil
import urllib2
import socket
import struct
from types import *
import re
import xmlrpclib
import datetime
import netaddr
import time
import gssapi
import krbV
import pwd
import grp
from contextlib import contextmanager
import locale
import collections
from dns import resolver, rdatatype, reversename
from dns import resolver, rdatatype
from dns.exception import DNSException
import six
from six.moves import input
from six.moves import urllib
from ipapython.ipa_log_manager import *
from ipapython import ipavalidate
from ipapython import config
from ipaplatform.paths import paths
from ipapython.dn import DN
from ipapython.dnsutil import DNSName
SHARE_DIR = paths.USR_SHARE_IPA_DIR
PLUGINS_SHARE_DIR = paths.IPA_PLUGINS
GEN_PWD_LEN = 12
# Having this in krb_utils would cause circular import
KRB5_KDC_UNREACH = 2529639068 # Cannot contact any KDC for requested realm
KRB5KDC_ERR_SVC_UNAVAILABLE = 2529638941 # A service is not available that is
# required to process the request
IPA_BASEDN_INFO = 'ipa v2.0'
try:
from subprocess import CalledProcessError
@@ -121,7 +110,7 @@ class CheckedIPAddress(netaddr.IPAddress):
# netaddr.IPAddress doesn't handle zone indices in textual
# IPv6 addresses. Try removing zone index and parse the
# address again.
if not isinstance(addr, six.string_types):
if not isinstance(addr, basestring):
raise
addr, sep, foo = addr.partition('%')
if sep != '%':
@@ -155,10 +144,8 @@ class CheckedIPAddress(netaddr.IPAddress):
elif addr.version == 6:
family = 'inet6'
result = run(
[paths.IP, '-family', family, '-oneline', 'address', 'show'],
capture_output=True)
lines = result.output.split('\n')
ipresult = run([paths.IP, '-family', family, '-oneline', 'address', 'show'])
lines = ipresult[0].split('\n')
for line in lines:
fields = line.split()
if len(fields) < 4:
@@ -242,6 +229,7 @@ def template_file(infilename, vars):
with open(infilename) as f:
return template_str(f.read(), vars)
def copy_template_file(infilename, outfilename, vars):
"""Copy a file, performing template substitutions"""
txt = template_file(infilename, vars)
@@ -250,42 +238,20 @@ def copy_template_file(infilename, outfilename, vars):
def write_tmp_file(txt):
fd = tempfile.NamedTemporaryFile('w+')
fd = tempfile.NamedTemporaryFile()
fd.write(txt)
fd.flush()
return fd
def shell_quote(string):
if isinstance(string, str):
return "'" + string.replace("'", "'\\''") + "'"
else:
return b"'" + string.replace(b"'", b"'\\''") + b"'"
return "'" + string.replace("'", "'\\''") + "'"
if six.PY3:
def _log_arg(s):
"""Convert string or bytes to a string suitable for logging"""
if isinstance(s, bytes):
return s.decode(locale.getpreferredencoding(),
errors='replace')
else:
return s
else:
_log_arg = str
class _RunResult(collections.namedtuple('_RunResult',
'output error_output returncode')):
"""Result of ipautil.run"""
def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
capture_output=False, skip_output=False, cwd=None,
runas=None, timeout=None, suplementary_groups=[],
capture_error=False, encoding=None, redirect_output=False):
def run(args, stdin=None, raiseonerr=True,
nolog=(), env=None, capture_output=True, skip_output=False, cwd=None,
runas=None):
"""
Execute an external command.
Execute a command and return stdin, stdout and the process return code.
:param args: List of arguments for the command
:param stdin: Optional input to the command
@@ -306,67 +272,24 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
If a value isn't found in the list it is silently ignored.
:param env: Dictionary of environment variables passed to the command.
When None, current environment is copied
:param capture_output: Capture stdout
:param skip_output: Redirect the output to /dev/null and do not log it
:param capture_output: Capture stderr and stdout
:param skip_output: Redirect the output to /dev/null and do not capture it
:param cwd: Current working directory
:param runas: Name of a user that the command should be run as. The spawned
:param runas: Name of a user that the command shold be run as. The spawned
process will have both real and effective UID and GID set.
:param timeout: Timeout if the command hasn't returned within the specified
number of seconds.
:param suplementary_groups: List of group names that will be used as
suplementary groups for subporcess.
The option runas must be specified together with this option.
:param capture_error: Capture stderr
:param encoding: For Python 3, the encoding to use for output,
error_output, and (if it's not bytes) stdin.
If None, the current encoding according to locale is used.
:param redirect_output: Redirect (error) output to standard (error) output.
:return: An object with these attributes:
`returncode`: The process' exit status
`output` and `error_output`: captured output, as strings. Under
Python 3, these are encoded with the given `encoding`.
None unless `capture_output` or `capture_error`, respectively, are
given
`raw_output`, `raw_error_output`: captured output, as bytes.
`output_log` and `error_log`: The captured output, as strings, with any
unencodable characters discarded. These should only be used
for logging or error messages.
If skip_output is given, all output-related attributes on the result
(that is, all except `returncode`) are None.
For backwards compatibility, the return value can also be used as a
(output, error_output, returncode) triple.
"""
assert isinstance(suplementary_groups, list)
p_in = None
p_out = None
p_err = None
if isinstance(nolog, six.string_types):
if isinstance(nolog, basestring):
# We expect a tuple (or list, or other iterable) of nolog strings.
# Passing just a single string is bad: strings are iterable, so this
# Passing just a single string is bad: strings are also, so this
# would result in every individual character of that string being
# replaced by XXXXXXXX.
# This is a sanity check to prevent that.
raise ValueError('nolog must be a tuple of strings.')
if skip_output and (capture_output or capture_error):
raise ValueError('skip_output is incompatible with '
'capture_output or capture_error')
if redirect_output and (capture_output or capture_error):
raise ValueError('redirect_output is incompatible with '
'capture_output or capture_error')
if skip_output and redirect_output:
raise ValueError('skip_output is incompatible with redirect_output')
if env is None:
# copy default env
env = copy.deepcopy(os.environ)
@@ -375,53 +298,29 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
p_in = subprocess.PIPE
if skip_output:
p_out = p_err = open(paths.DEV_NULL, 'w')
elif redirect_output:
p_out = sys.stdout
p_err = sys.stderr
else:
elif capture_output:
p_out = subprocess.PIPE
p_err = subprocess.PIPE
if encoding is None:
encoding = locale.getpreferredencoding()
if six.PY3 and isinstance(stdin, str):
stdin = stdin.encode(encoding)
if timeout:
# If a timeout was provided, use the timeout command
# to execute the requested command.
args[0:0] = [paths.BIN_TIMEOUT, str(timeout)]
arg_string = nolog_replace(' '.join(_log_arg(a) for a in args), nolog)
arg_string = nolog_replace(' '.join(shell_quote(a) for a in args), nolog)
root_logger.debug('Starting external process')
root_logger.debug('args=%s' % arg_string)
preexec_fn = None
if runas is not None:
pent = pwd.getpwnam(runas)
suplementary_gids = [
grp.getgrnam(group).gr_gid for group in suplementary_groups
]
root_logger.debug('runas=%s (UID %d, GID %s)', runas,
pent.pw_uid, pent.pw_gid)
if suplementary_groups:
for group, gid in zip(suplementary_groups, suplementary_gids):
root_logger.debug('suplementary_group=%s (GID %d)', group, gid)
preexec_fn = lambda: (
os.setgroups(suplementary_gids),
os.setregid(pent.pw_gid, pent.pw_gid),
os.setreuid(pent.pw_uid, pent.pw_uid),
)
preexec_fn = lambda: (os.setregid(pent.pw_gid, pent.pw_gid),
os.setreuid(pent.pw_uid, pent.pw_uid))
try:
p = subprocess.Popen(args, stdin=p_in, stdout=p_out, stderr=p_err,
close_fds=True, env=env, cwd=cwd,
preexec_fn=preexec_fn)
stdout, stderr = p.communicate(stdin)
stdout,stderr = p.communicate(stdin)
stdout,stderr = str(stdout), str(stderr) # Make pylint happy
except KeyboardInterrupt:
root_logger.debug('Process interrupted')
p.wait()
@@ -433,66 +332,29 @@ def run(args, stdin=None, raiseonerr=True, nolog=(), env=None,
if skip_output:
p_out.close() # pylint: disable=E1103
if timeout and p.returncode == 124:
root_logger.debug('Process did not complete before timeout')
root_logger.debug('Process finished, return code=%s', p.returncode)
# The command and its output may include passwords that we don't want
# to log. Replace those.
if skip_output or redirect_output:
output_log = None
error_log = None
else:
if six.PY3:
output_log = stdout.decode(locale.getpreferredencoding(),
errors='replace')
else:
output_log = stdout
if six.PY3:
error_log = stderr.decode(locale.getpreferredencoding(),
errors='replace')
else:
error_log = stderr
output_log = nolog_replace(output_log, nolog)
root_logger.debug('stdout=%s' % output_log)
error_log = nolog_replace(error_log, nolog)
root_logger.debug('stderr=%s' % error_log)
if capture_output:
if six.PY2:
output = stdout
else:
output = stdout.decode(encoding)
else:
output = None
if capture_error:
if six.PY2:
error_output = stderr
else:
error_output = stderr.decode(encoding)
else:
error_output = None
if capture_output and not skip_output:
stdout = nolog_replace(stdout, nolog)
stderr = nolog_replace(stderr, nolog)
root_logger.debug('stdout=%s' % stdout)
root_logger.debug('stderr=%s' % stderr)
if p.returncode != 0 and raiseonerr:
raise CalledProcessError(p.returncode, arg_string, str(output))
raise CalledProcessError(p.returncode, arg_string, stdout)
result = _RunResult(output, error_output, p.returncode)
result.raw_output = stdout
result.raw_error_output = stderr
result.output_log = output_log
result.error_log = error_log
return result
return (stdout, stderr, p.returncode)
def nolog_replace(string, nolog):
"""Replace occurences of strings given in `nolog` with XXXXXXXX"""
for value in nolog:
if not isinstance(value, six.string_types):
if not isinstance(value, basestring):
continue
quoted = urllib.parse.quote(value)
quoted = urllib2.quote(value)
shquoted = shell_quote(value)
for nolog_value in (shquoted, value, quoted):
string = string.replace(nolog_value, 'XXXXXXXX')
@@ -528,18 +390,18 @@ def backup_file(fname):
if file_exists(fname):
os.rename(fname, fname + ".orig")
def _ensure_nonempty_string(string, message):
if not isinstance(string, str) or not string:
raise ValueError(message)
# uses gpg to compress and encrypt a file
def encrypt_file(source, dest, password, workdir = None):
_ensure_nonempty_string(source, 'Missing Source File')
if type(source) is not StringType or not len(source):
raise ValueError('Missing Source File')
#stat it so that we get back an exception if it does no t exist
os.stat(source)
_ensure_nonempty_string(dest, 'Missing Destination File')
_ensure_nonempty_string(password, 'Missing Password')
if type(dest) is not StringType or not len(dest):
raise ValueError('Missing Destination File')
if type(password) is not StringType or not len(password):
raise ValueError('Missing Password')
#create a tempdir so that we can clean up with easily
tempdir = tempfile.mkdtemp('', 'ipa-', workdir)
@@ -560,12 +422,16 @@ def encrypt_file(source, dest, password, workdir = None):
def decrypt_file(source, dest, password, workdir = None):
_ensure_nonempty_string(source, 'Missing Source File')
if type(source) is not StringType or not len(source):
raise ValueError('Missing Source File')
#stat it so that we get back an exception if it does no t exist
os.stat(source)
_ensure_nonempty_string(dest, 'Missing Destination File')
_ensure_nonempty_string(password, 'Missing Password')
if type(dest) is not StringType or not len(dest):
raise ValueError('Missing Destination File')
if type(password) is not StringType or not len(password):
raise ValueError('Missing Password')
#create a tempdir so that we can clean up with easily
tempdir = tempfile.mkdtemp('', 'ipa-', workdir)
@@ -645,15 +511,14 @@ class CIDict(dict):
for key in keys():
self.__setitem__(key, new[key], seen)
seen = set()
for key, value in kwargs.items():
for key, value in kwargs.iteritems():
self.__setitem__(key, value, seen)
def __contains__(self, key):
return super(CIDict, self).__contains__(key.lower())
if six.PY2:
def has_key(self, key):
return super(CIDict, self).has_key(key.lower())
def has_key(self, key):
return super(CIDict, self).has_key(key.lower())
def get(self, key, failobj=None):
try:
@@ -662,38 +527,29 @@ class CIDict(dict):
return failobj
def __iter__(self):
return six.itervalues(self._keys)
return self._keys.itervalues()
def keys(self):
if six.PY2:
return list(self.iterkeys())
else:
return self.iterkeys()
return list(self.iterkeys())
def items(self):
if six.PY2:
return list(self.iteritems())
else:
return self.iteritems()
return list(self.iteritems())
def values(self):
if six.PY2:
return list(self.itervalues())
else:
return self.itervalues()
return list(self.itervalues())
def copy(self):
"""Returns a shallow copy of this CIDict"""
return CIDict(list(self.items()))
return CIDict(self.items())
def iteritems(self):
return ((k, self[k]) for k in six.itervalues(self._keys))
return ((k, self[k]) for k in self._keys.itervalues())
def iterkeys(self):
return six.itervalues(self._keys)
return self._keys.itervalues()
def itervalues(self):
return (v for k, v in six.iteritems(self))
return (v for k, v in self.iteritems())
def setdefault(self, key, value=None):
try:
@@ -863,57 +719,60 @@ def ipa_generate_password(characters=None,pwd_len=None):
def user_input(prompt, default = None, allow_empty = True):
if default == None:
while True:
try:
ret = input("%s: " % prompt)
if allow_empty or ret.strip():
return ret.strip()
except EOFError:
if allow_empty:
return ''
raise RuntimeError("Failed to get user input")
ret = raw_input("%s: " % prompt)
if allow_empty or ret.strip():
return ret
if isinstance(default, six.string_types):
if isinstance(default, basestring):
while True:
try:
ret = input("%s [%s]: " % (prompt, default))
if not ret and (allow_empty or default):
return default
elif ret.strip():
return ret.strip()
except EOFError:
ret = raw_input("%s [%s]: " % (prompt, default))
if not ret and (allow_empty or default):
return default
elif ret.strip():
return ret
if isinstance(default, bool):
choice = "yes" if default else "no"
if default:
choice = "yes"
else:
choice = "no"
while True:
try:
ret = input("%s [%s]: " % (prompt, choice))
ret = ret.strip()
if not ret:
return default
elif ret.lower()[0] == "y":
return True
elif ret.lower()[0] == "n":
return False
except EOFError:
ret = raw_input("%s [%s]: " % (prompt, choice))
if not ret:
return default
elif ret.lower()[0] == "y":
return True
elif ret.lower()[0] == "n":
return False
if isinstance(default, int):
while True:
try:
ret = input("%s [%s]: " % (prompt, default))
ret = ret.strip()
ret = raw_input("%s [%s]: " % (prompt, default))
if not ret:
return default
ret = int(ret)
except ValueError:
pass
except EOFError:
return default
else:
return ret
def get_gsserror(e):
"""
A GSSError exception looks differently in python 2.4 than it does
in python 2.5. Deal with it.
"""
try:
major = e[0]
minor = e[1]
except:
major = e[0][0]
minor = e[0][1]
return (major, minor)
def host_port_open(host, port, socket_type=socket.SOCK_STREAM, socket_timeout=None):
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket_type):
af, socktype, proto, canonname, sa = res
@@ -934,7 +793,7 @@ def host_port_open(host, port, socket_type=socket.SOCK_STREAM, socket_timeout=No
s.recv(512)
return True
except socket.error as e:
except socket.error, e:
pass
finally:
if s:
@@ -955,14 +814,14 @@ def bind_port_responder(port, socket_type=socket.SOCK_STREAM, socket_timeout=Non
try:
addr_infos = socket.getaddrinfo(host, port, family, socket_type, 0,
socket.AI_PASSIVE)
except socket.error as e:
except socket.error, e:
last_socket_error = e
continue
for res in addr_infos:
af, socktype, proto, canonname, sa = res
try:
s = socket.socket(af, socktype, proto)
except socket.error as e:
except socket.error, e:
last_socket_error = e
s = None
continue
@@ -1001,7 +860,7 @@ def bind_port_responder(port, socket_type=socket.SOCK_STREAM, socket_timeout=Non
# Timeout is expectable as it was requested by caller, raise
# the exception back to him
raise
except socket.error as e:
except socket.error, e:
last_socket_error = e
s.close()
s = None
@@ -1013,6 +872,16 @@ def bind_port_responder(port, socket_type=socket.SOCK_STREAM, socket_timeout=Non
if s is None and last_socket_error is not None:
raise last_socket_error # pylint: disable=E0702
def is_host_resolvable(fqdn):
for rdtype in (rdatatype.A, rdatatype.AAAA):
try:
resolver.query(fqdn, rdtype)
except DNSException:
continue
else:
return True
return False
def host_exists(host):
"""
@@ -1027,112 +896,45 @@ def host_exists(host):
else:
return True
def reverse_record_exists(ip_address):
def get_ipa_basedn(conn):
"""
Checks if IP address have some reverse record somewhere.
Does not care where it points.
Get base DN of IPA suffix in given LDAP server.
Returns True/False
None is returned if the suffix is not found
:param conn: Bound LDAPClient that will be used for searching
"""
reverse = reversename.from_address(str(ip_address))
try:
resolver.query(reverse, "PTR")
except DNSException:
# really don't care what exception, PTR is simply unresolvable
return False
return True
entry = conn.get_entry(
DN(), attrs_list=['defaultnamingcontext', 'namingcontexts'])
# FIXME: import ipalib here to prevent import loops
from ipalib import errors
def check_zone_overlap(zone, raise_on_error=True):
root_logger.info("Checking DNS domain %s, please wait ..." % zone)
if not isinstance(zone, DNSName):
zone = DNSName(zone).make_absolute()
# automatic empty zones always exist so checking them is pointless,
# do not report them to avoid meaningless error messages
if is_auto_empty_zone(zone):
return
try:
containing_zone = resolver.zone_for_name(zone)
except DNSException as e:
msg = ("DNS check for domain %s failed: %s." % (zone, e))
if raise_on_error:
raise ValueError(msg)
else:
root_logger.warning(msg)
return
if containing_zone == zone:
contexts = entry['namingcontexts']
if 'defaultnamingcontext' in entry:
# If there is a defaultNamingContext examine that one first
default = entry.single_value['defaultnamingcontext']
if default in contexts:
contexts.remove(default)
contexts.insert(0, default)
for context in contexts:
root_logger.debug("Check if naming context '%s' is for IPA" % context)
try:
ns = [ans.to_text() for ans in resolver.query(zone, 'NS')]
except DNSException as e:
root_logger.debug("Failed to resolve nameserver(s) for domain"
" {0}: {1}".format(zone, e))
ns = []
msg = u"DNS zone {0} already exists in DNS".format(zone)
if ns:
msg += u" and is handled by server(s): {0}".format(', '.join(ns))
raise ValueError(msg)
def is_auto_empty_zone(zone):
assert isinstance(zone, DNSName)
automatic_empty_zones = [DNSName(aez).make_absolute() for aez in [
# RFC 1918
"10.IN-ADDR.ARPA", "16.172.IN-ADDR.ARPA", "17.172.IN-ADDR.ARPA",
"18.172.IN-ADDR.ARPA", "19.172.IN-ADDR.ARPA", "20.172.IN-ADDR.ARPA",
"21.172.IN-ADDR.ARPA", "22.172.IN-ADDR.ARPA", "23.172.IN-ADDR.ARPA",
"24.172.IN-ADDR.ARPA", "25.172.IN-ADDR.ARPA", "26.172.IN-ADDR.ARPA",
"27.172.IN-ADDR.ARPA", "28.172.IN-ADDR.ARPA", "29.172.IN-ADDR.ARPA",
"30.172.IN-ADDR.ARPA", "31.172.IN-ADDR.ARPA", "168.192.IN-ADDR.ARPA",
# RFC 6598
"64.100.IN-ADDR.ARPA", "65.100.IN-ADDR.ARPA", "66.100.IN-ADDR.ARPA",
"67.100.IN-ADDR.ARPA", "68.100.IN-ADDR.ARPA", "69.100.IN-ADDR.ARPA",
"70.100.IN-ADDR.ARPA", "71.100.IN-ADDR.ARPA", "72.100.IN-ADDR.ARPA",
"73.100.IN-ADDR.ARPA", "74.100.IN-ADDR.ARPA", "75.100.IN-ADDR.ARPA",
"76.100.IN-ADDR.ARPA", "77.100.IN-ADDR.ARPA", "78.100.IN-ADDR.ARPA",
"79.100.IN-ADDR.ARPA", "80.100.IN-ADDR.ARPA", "81.100.IN-ADDR.ARPA",
"82.100.IN-ADDR.ARPA", "83.100.IN-ADDR.ARPA", "84.100.IN-ADDR.ARPA",
"85.100.IN-ADDR.ARPA", "86.100.IN-ADDR.ARPA", "87.100.IN-ADDR.ARPA",
"88.100.IN-ADDR.ARPA", "89.100.IN-ADDR.ARPA", "90.100.IN-ADDR.ARPA",
"91.100.IN-ADDR.ARPA", "92.100.IN-ADDR.ARPA", "93.100.IN-ADDR.ARPA",
"94.100.IN-ADDR.ARPA", "95.100.IN-ADDR.ARPA", "96.100.IN-ADDR.ARPA",
"97.100.IN-ADDR.ARPA", "98.100.IN-ADDR.ARPA", "99.100.IN-ADDR.ARPA",
"100.100.IN-ADDR.ARPA", "101.100.IN-ADDR.ARPA",
"102.100.IN-ADDR.ARPA", "103.100.IN-ADDR.ARPA",
"104.100.IN-ADDR.ARPA", "105.100.IN-ADDR.ARPA",
"106.100.IN-ADDR.ARPA", "107.100.IN-ADDR.ARPA",
"108.100.IN-ADDR.ARPA", "109.100.IN-ADDR.ARPA",
"110.100.IN-ADDR.ARPA", "111.100.IN-ADDR.ARPA",
"112.100.IN-ADDR.ARPA", "113.100.IN-ADDR.ARPA",
"114.100.IN-ADDR.ARPA", "115.100.IN-ADDR.ARPA",
"116.100.IN-ADDR.ARPA", "117.100.IN-ADDR.ARPA",
"118.100.IN-ADDR.ARPA", "119.100.IN-ADDR.ARPA",
"120.100.IN-ADDR.ARPA", "121.100.IN-ADDR.ARPA",
"122.100.IN-ADDR.ARPA", "123.100.IN-ADDR.ARPA",
"124.100.IN-ADDR.ARPA", "125.100.IN-ADDR.ARPA",
"126.100.IN-ADDR.ARPA", "127.100.IN-ADDR.ARPA",
# RFC 5735 and RFC 5737
"0.IN-ADDR.ARPA", "127.IN-ADDR.ARPA", "254.169.IN-ADDR.ARPA",
"2.0.192.IN-ADDR.ARPA", "100.51.198.IN-ADDR.ARPA",
"113.0.203.IN-ADDR.ARPA", "255.255.255.255.IN-ADDR.ARPA",
# Local IPv6 Unicast Addresses
"0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA",
"1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.IP6.ARPA",
# LOCALLY ASSIGNED LOCAL ADDRESS SCOPE
"D.F.IP6.ARPA", "8.E.F.IP6.ARPA", "9.E.F.IP6.ARPA", "A.E.F.IP6.ARPA",
"B.E.F.IP6.ARPA",
# Example Prefix, RFC 3849.
"8.B.D.0.1.0.0.2.IP6.ARPA",
# RFC 7534
"EMPTY.AS112.ARPA",
]]
return zone in automatic_empty_zones
[entry] = conn.get_entries(
DN(context), conn.SCOPE_BASE, "(info=IPA*)")
except errors.NotFound:
root_logger.debug("LDAP server did not return info attribute to "
"check for IPA version")
continue
info = entry.single_value['info'].lower()
if info != IPA_BASEDN_INFO:
root_logger.debug("Detected IPA server version (%s) did not match the client (%s)" \
% (info, IPA_BASEDN_INFO))
continue
root_logger.debug("Naming context '%s' is a valid IPA context" % context)
return DN(context)
return None
def config_replace_variables(filepath, replacevars=dict(), appendvars=dict()):
"""
@@ -1348,7 +1150,7 @@ def wait_for_open_ports(host, ports, timeout=0):
if port_open:
break
if timeout and time.time() > op_timeout: # timeout exceeded
raise socket.timeout("Timeout exceeded")
raise socket.timeout()
time.sleep(1)
def wait_for_open_socket(socket_name, timeout=0):
@@ -1365,7 +1167,7 @@ def wait_for_open_socket(socket_name, timeout=0):
s.connect(socket_name)
s.close()
break
except socket.error as e:
except socket.error, e:
if e.errno in (2,111): # 111: Connection refused, 2: File not found
if timeout and time.time() > op_timeout: # timeout exceeded
raise e
@@ -1373,76 +1175,27 @@ def wait_for_open_socket(socket_name, timeout=0):
else:
raise e
def kinit_keytab(principal, keytab, ccache_name, config=None, attempts=1):
def kinit_hostprincipal(keytab, ccachedir, principal):
"""
Given a ccache_path, keytab file and a principal kinit as that user.
Given a ccache directory and a principal kinit as that user.
The optional parameter 'attempts' specifies how many times the credential
initialization should be attempted in case of non-responsive KDC.
This blindly overwrites the current CCNAME so if you need to save
it do so before calling this function.
Thus far this is used to kinit as the local host.
"""
errors_to_retry = {KRB5KDC_ERR_SVC_UNAVAILABLE,
KRB5_KDC_UNREACH}
root_logger.debug("Initializing principal %s using keytab %s"
% (principal, keytab))
root_logger.debug("using ccache %s" % ccache_name)
for attempt in range(1, attempts + 1):
old_config = os.environ.get('KRB5_CONFIG')
if config is not None:
os.environ['KRB5_CONFIG'] = config
else:
os.environ.pop('KRB5_CONFIG', None)
try:
name = gssapi.Name(principal, gssapi.NameType.kerberos_principal)
store = {'ccache': ccache_name,
'client_keytab': keytab}
cred = gssapi.Credentials(name=name, store=store, usage='initiate')
root_logger.debug("Attempt %d/%d: success"
% (attempt, attempts))
return cred
except gssapi.exceptions.GSSError as e:
if e.min_code not in errors_to_retry: # pylint: disable=no-member
raise
root_logger.debug("Attempt %d/%d: failed: %s"
% (attempt, attempts, e))
if attempt == attempts:
root_logger.debug("Maximum number of attempts (%d) reached"
% attempts)
raise
root_logger.debug("Waiting 5 seconds before next retry")
time.sleep(5)
finally:
if old_config is not None:
os.environ['KRB5_CONFIG'] = old_config
else:
os.environ.pop('KRB5_CONFIG', None)
def kinit_password(principal, password, ccache_name, config=None,
armor_ccache_name=None):
"""
perform interactive kinit as principal using password. If using FAST for
web-based authentication, use armor_ccache_path to specify http service
ccache.
"""
root_logger.debug("Initializing principal %s using password" % principal)
args = [paths.KINIT, principal, '-c', ccache_name]
if armor_ccache_name is not None:
root_logger.debug("Using armor ccache %s for FAST webauth"
% armor_ccache_name)
args.extend(['-T', armor_ccache_name])
env = {'LC_ALL': 'C'}
if config is not None:
env['KRB5_CONFIG'] = config
# this workaround enables us to capture stderr and put it
# into the raised exception in case of unsuccessful authentication
result = run(args, stdin=password, env=env, raiseonerr=False,
capture_error=True)
if result.returncode:
raise RuntimeError(result.error_output)
try:
ccache_file = 'FILE:%s/ccache' % ccachedir
krbcontext = krbV.default_context()
ktab = krbV.Keytab(name=keytab, context=krbcontext)
princ = krbV.Principal(name=principal, context=krbcontext)
os.environ['KRB5CCNAME'] = ccache_file
ccache = krbV.CCache(name=ccache_file, context=krbcontext, primary_principal=princ)
ccache.init(princ)
ccache.init_creds_keytab(keytab=ktab, principal=princ)
return ccache_file
except krbV.Krb5Error, e:
raise StandardError('Error initializing principal %s in %s: %s' % (principal, keytab, str(e)))
def dn_attribute_property(private_name):
'''
@@ -1483,77 +1236,5 @@ def restore_hostname(statestore):
if old_hostname is not None and old_hostname != system_hostname:
try:
run([paths.BIN_HOSTNAME, old_hostname])
except CalledProcessError as e:
print("Failed to set this machine hostname back to %s: %s" % (old_hostname, str(e)), file=sys.stderr)
def posixify(string):
"""
Convert a string to a more strict alpha-numeric representation.
- Alpha-numeric, underscore, dot and dash characters are accepted
- Space is converted to underscore
- Other characters are omitted
- Leading dash is stripped
Note: This mapping is not one-to-one and may map different input to the
same result. When using posixify, make sure the you do not map two different
entities to one unintentionally.
"""
def valid_char(char):
return char.isalnum() or char in ('_', '.', '-')
# First replace space characters
replaced = string.replace(' ','_')
omitted = ''.join(filter(valid_char, replaced))
# Leading dash is not allowed
return omitted.lstrip('-')
@contextmanager
def private_ccache(path=None):
if path is None:
dir_path = tempfile.mkdtemp(prefix='krbcc')
path = os.path.join(dir_path, 'ccache')
else:
dir_path = None
original_value = os.environ.get('KRB5CCNAME', None)
os.environ['KRB5CCNAME'] = path
try:
yield
finally:
if original_value is not None:
os.environ['KRB5CCNAME'] = original_value
else:
os.environ.pop('KRB5CCNAME', None)
if os.path.exists(path):
os.remove(path)
if dir_path is not None:
try:
os.rmdir(dir_path)
except OSError:
pass
if six.PY2:
def fsdecode(value):
"""
Decode argument using the file system encoding, as returned by
`sys.getfilesystemencoding()`.
"""
if isinstance(value, six.binary_type):
return value.decode(sys.getfilesystemencoding())
elif isinstance(value, six.text_type):
return value
else:
raise TypeError("expect {0} or {1}, not {2}".format(
six.binary_type.__name__,
six.text_type.__name__,
type(value).__name__))
else:
fsdecode = os.fsdecode #pylint: disable=no-member
except CalledProcessError, e:
print >>sys.stderr, "Failed to set this machine hostname back to %s: %s" % (old_hostname, str(e))

Binary file not shown.

Binary file not shown.

View File

@@ -36,29 +36,24 @@ def dump_keys():
"""
Dump all keys
"""
result = run(['keyctl', 'list', KEYRING], raiseonerr=False,
capture_output=True)
return result.output
(stdout, stderr, rc) = run(['keyctl', 'list', KEYRING], raiseonerr=False)
return stdout
def get_real_key(key):
"""
One cannot request a key based on the description it was created with
so find the one we're looking for.
"""
assert isinstance(key, str)
result = run(['keyctl', 'search', KEYRING, KEYTYPE, key],
raiseonerr=False, capture_output=True)
if result.returncode:
(stdout, stderr, rc) = run(['keyctl', 'search', KEYRING, KEYTYPE, key], raiseonerr=False)
if rc:
raise ValueError('key %s not found' % key)
return result.raw_output.rstrip()
return stdout.rstrip()
def get_persistent_key(key):
assert isinstance(key, str)
result = run(['keyctl', 'get_persistent', KEYRING, key],
raiseonerr=False, capture_output=True)
if result.returncode:
(stdout, stderr, rc) = run(['keyctl', 'get_persistent', KEYRING, key], raiseonerr=False)
if rc:
raise ValueError('persistent key %s not found' % key)
return result.raw_output.rstrip()
return stdout.rstrip()
def is_persistent_keyring_supported():
uid = os.geteuid()
@@ -73,7 +68,6 @@ def has_key(key):
"""
Returns True/False whether the key exists in the keyring.
"""
assert isinstance(key, str)
try:
get_real_key(key)
return True
@@ -86,27 +80,22 @@ def read_key(key):
Use pipe instead of print here to ensure we always get the raw data.
"""
assert isinstance(key, str)
real_key = get_real_key(key)
result = run(['keyctl', 'pipe', real_key], raiseonerr=False,
capture_output=True)
if result.returncode:
raise ValueError('keyctl pipe failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'pipe', real_key], raiseonerr=False)
if rc:
raise ValueError('keyctl pipe failed: %s' % stderr)
return result.raw_output
return stdout
def update_key(key, value):
"""
Update the keyring data. If they key doesn't exist it is created.
"""
assert isinstance(key, str)
assert isinstance(value, bytes)
if has_key(key):
real_key = get_real_key(key)
result = run(['keyctl', 'pupdate', real_key], stdin=value,
raiseonerr=False)
if result.returncode:
raise ValueError('keyctl pupdate failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'pupdate', real_key], stdin=value, raiseonerr=False)
if rc:
raise ValueError('keyctl pupdate failed: %s' % stderr)
else:
add_key(key, value)
@@ -114,22 +103,17 @@ def add_key(key, value):
"""
Add a key to the kernel keyring.
"""
assert isinstance(key, str)
assert isinstance(value, bytes)
if has_key(key):
raise ValueError('key %s already exists' % key)
result = run(['keyctl', 'padd', KEYTYPE, key, KEYRING],
stdin=value, raiseonerr=False)
if result.returncode:
raise ValueError('keyctl padd failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'padd', KEYTYPE, key, KEYRING], stdin=value, raiseonerr=False)
if rc:
raise ValueError('keyctl padd failed: %s' % stderr)
def del_key(key):
"""
Remove a key from the keyring
"""
assert isinstance(key, str)
real_key = get_real_key(key)
result = run(['keyctl', 'unlink', real_key, KEYRING],
raiseonerr=False)
if result.returncode:
raise ValueError('keyctl unlink failed: %s' % result.error_log)
(stdout, stderr, rc) = run(['keyctl', 'unlink', real_key, KEYRING], raiseonerr=False)
if rc:
raise ValueError('keyctl unlink failed: %s' % stderr)

Binary file not shown.

View File

@@ -172,7 +172,7 @@ FAQ (Frequently Asked Questions)
See above. Logger's will never have a level less than the level of
the handlers visible to the logger. If there are no handlers then
loggers can't output anything so their level is set to maxsize.
loggers can't output anything so their level is set to maxint.
#. **I set the default_level but all the loggers are configured
at INFO or DEBUG, what happened?**
@@ -500,7 +500,6 @@ bewildering difficult to get it do what I wanted.
John Dennis <jdennis@redhat.com>
'''
from __future__ import print_function
#-------------------------------------------------------------------------------
import sys
@@ -510,8 +509,6 @@ import logging
import re
import time
import six
#-------------------------------------------------------------------------------
# Default format
LOGGING_DEFAULT_FORMAT = '%(levelname)s %(message)s'
@@ -547,7 +544,7 @@ def get_unique_levels(iterable):
levels = set()
for obj in iterable:
level = getattr(obj, 'level', sys.maxsize)
level = getattr(obj, 'level', sys.maxint)
if level != logging.NOTSET:
levels.add(level)
levels = list(levels)
@@ -557,7 +554,7 @@ def get_unique_levels(iterable):
def get_minimum_level(iterable):
'''
Given a iterable of objects containing a logging level return the
minimum level. If no levels are defined return maxsize.
minimum level. If no levels are defined return maxint.
set of unique levels.
:parameters:
@@ -566,10 +563,10 @@ def get_minimum_level(iterable):
:returns:
Ordered list (min to max) of unique levels.
'''
min_level = sys.maxsize
min_level = sys.maxint
for obj in iterable:
level = getattr(obj, 'level', sys.maxsize)
level = getattr(obj, 'level', sys.maxint)
if level != logging.NOTSET:
if level < min_level:
min_level = level
@@ -603,7 +600,7 @@ def parse_log_level(level):
'''
# Is it a string representation of an integer?
# If so convert to an int.
if isinstance(level, six.string_types):
if isinstance(level, basestring):
try:
level = int(level)
except:
@@ -611,7 +608,7 @@ def parse_log_level(level):
# If it's a string lookup it's name and map to logging level
# otherwise validate the integer value is in range.
if isinstance(level, six.string_types):
if isinstance(level, basestring):
result = log_level_name_map.get(level.lower()) #pylint: disable=E1103
if result is None:
raise ValueError('unknown log level (%s)' % level)
@@ -939,7 +936,7 @@ class LogManager(object):
if value is not None:
try:
level = parse_log_level(value)
except Exception as e:
except Exception, e:
raise ValueError("could not set %s (%s)" % (attr, e))
setattr(self, attr, level)
@@ -962,7 +959,7 @@ class LogManager(object):
if not isinstance(config, dict):
raise TypeError('expected dict for handler config, got "%s"', type(config))
if isinstance(logger, six.string_types):
if isinstance(logger, basestring):
logger = self.get_logger(logger)
else:
if not isinstance(logger, logging.Logger):
@@ -1151,7 +1148,7 @@ class LogManager(object):
stream = cfg.get("stream")
log_handler = cfg.get("log_handler")
if filename:
if "stream" in cfg:
if cfg.has_key("stream"):
raise ValueError("both filename and stream are specified, must be one or the other, config: %s" % cfg)
path = os.path.abspath(filename)
filemode = cfg.get('filemode', 'a')
@@ -1170,7 +1167,7 @@ class LogManager(object):
user = cfg.get('user')
group = cfg.get('group')
if user is not None:
if isinstance(user, six.string_types):
if isinstance(user, basestring):
pw = pwd.getpwnam(user)
uid = pw.pw_uid
elif isinstance(user, int):
@@ -1178,7 +1175,7 @@ class LogManager(object):
else:
raise TypeError("user (%s) is not int or basestring" % user)
if group is not None:
if isinstance(group, six.string_types):
if isinstance(group, basestring):
pw = pwd.getpwnam(group)
gid = pw.pw_gid
elif isinstance(group, int):
@@ -1219,7 +1216,7 @@ class LogManager(object):
datefmt = cfg.get("datefmt", None)
formatter = logging.Formatter(format, datefmt)
time_zone_converter = cfg.get('time_zone_converter', time.localtime)
if isinstance(time_zone_converter, six.string_types):
if isinstance(time_zone_converter, basestring):
converter = {'local' : time.localtime,
'localtime' : time.localtime,
'gmt' : time.gmtime,
@@ -1242,8 +1239,8 @@ class LogManager(object):
if level is not None:
try:
level = parse_log_level(level)
except Exception as e:
print('could not set handler log level "%s" (%s)' % (level, e), file=sys.stderr)
except Exception, e:
print >>sys.stderr, 'could not set handler log level "%s" (%s)' % (level, e)
level = None
if level is None:
level = self.default_level
@@ -1313,7 +1310,7 @@ class LogManager(object):
List of loggers with the handler is bound to.
'''
if isinstance(handler, six.string_types):
if isinstance(handler, basestring):
handler = self.get_handler(handler)
elif isinstance(handler, logging.Handler):
if not handler in self.handlers.values():
@@ -1347,7 +1344,7 @@ class LogManager(object):
use configure_state to track the state of the log manager.
'''
if isinstance(handler, six.string_types):
if isinstance(handler, basestring):
handler = self.get_handler(handler)
elif not isinstance(handler, logging.Handler):
raise TypeError('handler must be basestring or Handler object, got %s' % type(handler))
@@ -1442,7 +1439,7 @@ class LogManager(object):
:return:
The minimum of all the handler's levels. If no
handlers are defined sys.maxsize will be returned.
handlers are defined sys.maxint will be returned.
'''
handlers = self.get_logger_handlers(logger)
@@ -1525,7 +1522,7 @@ class LogManager(object):
'''
is_object = False
if isinstance(who, six.string_types):
if isinstance(who, basestring):
obj_name = who
else:
is_object = True

Binary file not shown.

View File

@@ -18,9 +18,8 @@
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import print_function
import sys
import httplib
import getpass
import socket
from ipapython.ipa_log_manager import *
@@ -30,24 +29,16 @@ import nss.io as io
import nss.nss as nss
import nss.ssl as ssl
import nss.error as error
# Python 3 rename. The package is available in "six.moves.http_client", but
# pylint cannot handle classes from that alias
try:
import httplib
except ImportError:
import http.client as httplib
from ipaplatform.paths import paths
# NSS database currently open
current_dbdir = None
def auth_certificate_callback(sock, check_sig, is_server, certdb):
cert_is_valid = False
cert = sock.get_peer_certificate()
root_logger.debug("auth_certificate_callback: check_sig=%s is_server=%s\n%s",
check_sig, is_server, str(cert))
pin_args = sock.get_pkcs11_pin_arg()
if pin_args is None:
pin_args = ()
@@ -65,10 +56,8 @@ def auth_certificate_callback(sock, check_sig, is_server, certdb):
# will be set to the error code matching the reason why the validation failed
# and the strerror attribute will contain a string describing the reason.
approved_usage = cert.verify_now(certdb, check_sig, intended_usage, *pin_args)
except Exception as e:
root_logger.error(
'cert validation failed for "%s" (%s)', cert.subject,
e.strerror) # pylint: disable=no-member
except Exception, e:
root_logger.error('cert validation failed for "%s" (%s)', cert.subject, e.strerror)
cert_is_valid = False
return cert_is_valid
@@ -96,10 +85,9 @@ def auth_certificate_callback(sock, check_sig, is_server, certdb):
try:
# If the cert fails validation it will raise an exception
cert_is_valid = cert.verify_hostname(hostname)
except Exception as e:
except Exception, e:
root_logger.error('failed verifying socket hostname "%s" matches cert subject "%s" (%s)',
hostname, cert.subject,
e.strerror) # pylint: disable=no-member
hostname, cert.subject, e.strerror)
cert_is_valid = False
return cert_is_valid
@@ -168,7 +156,7 @@ class NSSAddressFamilyFallback(object):
self._create_socket()
self.sock.connect(net_addr)
return
except Exception as e:
except Exception, e:
root_logger.debug("Could not connect socket to %s, error: %s",
net_addr, str(e))
root_logger.debug("Try to continue with next family...")
@@ -183,8 +171,7 @@ class NSSConnection(httplib.HTTPConnection, NSSAddressFamilyFallback):
default_port = httplib.HTTPSConnection.default_port
def __init__(self, host, port=None, strict=None,
dbdir=None, family=socket.AF_UNSPEC, no_init=False,
tls_version_min='tls1.1', tls_version_max='tls1.2'):
dbdir=None, family=socket.AF_UNSPEC, no_init=False):
"""
:param host: the server to connect to
:param port: the port to use (default is set in HTTPConnection)
@@ -193,37 +180,25 @@ class NSSConnection(httplib.HTTPConnection, NSSAddressFamilyFallback):
:param no_init: do not initialize the NSS database. This requires
that the database has already been initialized or
the request will fail.
:param tls_min_version: mininum version of SSL/TLS supported
:param tls_max_version: maximum version of SSL/TLS supported.
"""
httplib.HTTPConnection.__init__(self, host, port, strict)
NSSAddressFamilyFallback.__init__(self, family)
if not dbdir:
raise RuntimeError("dbdir is required")
root_logger.debug('%s init %s', self.__class__.__name__, host)
# If initialization is requested, initialize the new database.
if not no_init:
if nss.nss_is_initialized():
ssl.clear_session_cache()
try:
nss.nss_shutdown()
except NSPRError as e:
if e.errno != error.SEC_ERROR_NOT_INITIALIZED:
raise e
if not dbdir:
raise RuntimeError("dbdir is required")
nss.nss_init(dbdir)
global current_dbdir
current_dbdir = dbdir
if not no_init and nss.nss_is_initialized():
# close any open NSS database and use the new one
ssl.clear_session_cache()
try:
nss.nss_shutdown()
except NSPRError, e:
if e.errno != error.SEC_ERROR_NOT_INITIALIZED:
raise e
nss.nss_init(dbdir)
ssl.set_domestic_policy()
nss.set_password_callback(self.password_callback)
self.tls_version_min = str(tls_version_min)
self.tls_version_max = str(tls_version_max)
def _create_socket(self):
# TODO: remove the try block once python-nss is guaranteed to contain
@@ -243,11 +218,6 @@ class NSSConnection(httplib.HTTPConnection, NSSAddressFamilyFallback):
self.sock = ssl.SSLSocket(family=self.family)
self.sock.set_ssl_option(ssl.SSL_SECURITY, True)
self.sock.set_ssl_option(ssl.SSL_HANDSHAKE_AS_CLIENT, True)
try:
self.sock.set_ssl_version_range(self.tls_version_min, self.tls_version_max)
except NSPRError as e:
root_logger.error('Failed to set TLS range to %s, %s' % (self.tls_version_min, self.tls_version_max))
raise
self.sock.set_ssl_option(ssl_require_safe_negotiation, False)
self.sock.set_ssl_option(ssl_enable_renegotiation, ssl_renegotiate_requires_xtn)
# Provide a callback which notifies us when the SSL handshake is complete
@@ -266,11 +236,8 @@ class NSSConnection(httplib.HTTPConnection, NSSAddressFamilyFallback):
"""
Verify callback. If we get here then the certificate is ok.
"""
channel = sock.get_ssl_channel_info()
suite = ssl.get_cipher_suite_info(channel.cipher_suite)
root_logger.debug("handshake complete, peer = %s", sock.get_peer_name())
root_logger.debug('Protocol: %s' % channel.protocol_version_str.upper())
root_logger.debug('Cipher: %s' % suite.cipher_suite_name)
pass
def connect(self):
self.connect_socket(self.host, self.port)
@@ -291,7 +258,80 @@ class NSSConnection(httplib.HTTPConnection, NSSAddressFamilyFallback):
"""
try:
# FIXME: httplib uses old-style classes so super doesn't work
httplib.HTTPConnection.endheaders(self, message)
except NSPRError as e:
# Python 2.7 changed the API for endheaders. This is an attempt
# to work across versions
(major, minor, micro, releaselevel, serial) = sys.version_info
if major == 2 and minor < 7:
httplib.HTTPConnection.endheaders(self)
else:
httplib.HTTPConnection.endheaders(self, message)
except NSPRError, e:
self.close()
raise e
class NSSHTTPS(httplib.HTTP):
# We would like to use HTTP 1.1 not the older HTTP 1.0 but xmlrpclib
# and httplib do not play well together. httplib when the protocol
# is 1.1 will add a host header in the request. But xmlrpclib
# always adds a host header irregardless of the HTTP protocol
# version. That means the request ends up with 2 host headers,
# but Apache freaks out if it sees 2 host headers, a known Apache
# issue. httplib has a mechanism to skip adding the host header
# (i.e. skip_host in HTTPConnection.putrequest()) but xmlrpclib
# doesn't use it. Oh well, back to 1.0 :-(
#
#_http_vsn = 11
#_http_vsn_str = 'HTTP/1.1'
_connection_class = NSSConnection
def __init__(self, host='', port=None, strict=None, dbdir=None, no_init=False):
# provide a default host, pass the X509 cert info
# urf. compensate for bad input.
if port == 0:
port = None
self._setup(self._connection_class(host, port, strict, dbdir=dbdir, no_init=no_init))
def getreply(self):
"""
Override so we can close duplicated file connection on non-200
responses. This was causing nss_shutdown() to fail with a busy
error.
"""
(status, reason, msg) = httplib.HTTP.getreply(self)
if status != 200:
self.file.close()
return (status, reason, msg)
#------------------------------------------------------------------------------
if __name__ == "__main__":
standard_logging_setup('nsslib.log', debug=True, filemode='a')
root_logger.info("Start")
if False:
conn = NSSConnection("www.verisign.com", 443, dbdir=paths.NSS_DB_DIR)
conn.set_debuglevel(1)
conn.connect()
conn.request("GET", "/")
response = conn.getresponse()
print response.status
#print response.msg
print response.getheaders()
data = response.read()
#print data
conn.close()
if True:
h = NSSHTTPS("www.verisign.com", 443, dbdir=paths.NSS_DB_DIR)
h.connect()
h.putrequest('GET', '/')
h.endheaders()
http_status, http_reason, headers = h.getreply()
print "status = %s %s" % (http_status, http_reason)
print "headers:\n%s" % headers
f = h.getfile()
data = f.read() # Get the raw HTML
f.close()
#print data

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,20 @@
PYTHONLIBDIR ?= $(shell python2 -c "from distutils.sysconfig import *; print get_python_lib()")
PACKAGEDIR ?= $(DESTDIR)/$(PYTHONLIBDIR)/ipa
CONFIGDIR ?= $(DESTDIR)/etc/ipa
all:
python2 setup.py build
install:
if [ "$(DESTDIR)" = "" ]; then \
python2 setup.py install; \
else \
python2 setup.py install --root $(DESTDIR); \
fi
clean:
rm -rf build
distclean: clean
maintainer-clean: distclean

View File

@@ -0,0 +1,57 @@
/*
* Authors:
* John Dennis <jdennis@redhat.com>
*
* Copyright (C) 2009 Red Hat
* see file 'COPYING' for use and warranty information
*
* This program is free software you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <Python.h>
PyDoc_STRVAR(setdefaultencoding_doc,
"setdefaultencoding(encoding='utf-8')\n\
\n\
Set the current default string encoding used by the Unicode implementation.\n\
Defaults to utf-8."
);
static PyObject *
setdefaultencoding(PyObject *self, PyObject *args, PyObject *kwds)
{
static char *kwlist[] = {"utf-8", NULL};
char *encoding;
if (!PyArg_ParseTupleAndKeywords(args, kwds, "s:setdefaultencoding", kwlist, &encoding))
return NULL;
if (PyUnicode_SetDefaultEncoding(encoding))
return NULL;
Py_RETURN_NONE;
}
static PyMethodDef methods[] = {
{"setdefaultencoding", (PyCFunction)setdefaultencoding, METH_VARARGS|METH_KEYWORDS, setdefaultencoding_doc},
{NULL, NULL} /* sentinel */
};
PyMODINIT_FUNC
initdefault_encoding_utf8(void)
{
PyUnicode_SetDefaultEncoding("utf-8");
Py_InitModule3("default_encoding_utf8", methods, "Forces the default encoding to utf-8");
}

View File

@@ -0,0 +1,45 @@
# Authors:
# John Dennis <jdennis@redhat.com>
#
# Copyright (C) 2009 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from distutils.core import setup, Extension
from distutils.sysconfig import get_python_inc
import sys
import os
python_header = os.path.join(get_python_inc(plat_specific=0), 'Python.h')
if not os.path.exists(python_header):
sys.exit("Cannot find Python development packages that provide Python.h")
default_encoding_utf8 = Extension('default_encoding_utf8', ['default_encoding_utf8.c'])
setup(name = 'python-default-encoding',
version = '0.1',
description = 'Forces the default encoding in Python to be utf-8',
long_description = 'Forces the default encoding in Python to be utf-8',
author = 'John Dennis',
author_email = 'jdennis@redhat.com',
maintainer = 'John Dennis',
maintainer_email = 'jdennis@redhat.com',
license = 'GPLv3+',
platforms = 'posix',
url = '',
download_url = '',
ext_modules = [default_encoding_utf8],
)

View File

@@ -1,103 +0,0 @@
# Copyright (C) 2015 IPA Project Contributors, see COPYING for license
from __future__ import print_function
from custodia.message.kem import KEMClient, KEY_USAGE_SIG, KEY_USAGE_ENC
from jwcrypto.common import json_decode
from jwcrypto.jwk import JWK
from ipapython.secrets.kem import IPAKEMKeys
from ipapython.secrets.store import iSecStore
from ipaplatform.paths import paths
from base64 import b64encode
import ldapurl
import gssapi
import os
import requests
class CustodiaClient(object):
def _client_keys(self):
return self.ikk.server_keys
def _server_keys(self, server, realm):
principal = 'host/%s@%s' % (server, realm)
sk = JWK(**json_decode(self.ikk.find_key(principal, KEY_USAGE_SIG)))
ek = JWK(**json_decode(self.ikk.find_key(principal, KEY_USAGE_ENC)))
return (sk, ek)
def _ldap_uri(self, realm):
dashrealm = '-'.join(realm.split('.'))
socketpath = paths.SLAPD_INSTANCE_SOCKET_TEMPLATE % (dashrealm,)
return 'ldapi://' + ldapurl.ldapUrlEscape(socketpath)
def _keystore(self, realm, ldap_uri, auth_type):
config = dict()
if ldap_uri is None:
config['ldap_uri'] = self._ldap_uri(realm)
else:
config['ldap_uri'] = ldap_uri
if auth_type is not None:
config['auth_type'] = auth_type
return iSecStore(config)
def __init__(self, client, server, realm, ldap_uri=None, auth_type=None):
self.client = client
self.creds = None
self.service_name = gssapi.Name('HTTP@%s' % (server,),
gssapi.NameType.hostbased_service)
self.server = server
keyfile = os.path.join(paths.IPA_CUSTODIA_CONF_DIR, 'server.keys')
self.ikk = IPAKEMKeys({'server_keys': keyfile})
self.kemcli = KEMClient(self._server_keys(server, realm),
self._client_keys())
self.keystore = self._keystore(realm, ldap_uri, auth_type)
# FIXME: Remove warnings about missig subjAltName
requests.packages.urllib3.disable_warnings()
def init_creds(self):
name = gssapi.Name('host@%s' % (self.client,),
gssapi.NameType.hostbased_service)
store = {'client_keytab': paths.KRB5_KEYTAB,
'ccache': 'MEMORY:Custodia_%s' % b64encode(os.urandom(8))}
return gssapi.Credentials(name=name, store=store, usage='initiate')
def _auth_header(self):
if not self.creds or self.creds.lifetime < 300:
self.creds = self.init_creds()
ctx = gssapi.SecurityContext(name=self.service_name, creds=self.creds)
authtok = ctx.step()
return {'Authorization': 'Negotiate %s' % b64encode(authtok)}
def fetch_key(self, keyname, store=True):
# Prepare URL
url = 'https://%s/ipa/keys/%s' % (self.server, keyname)
# Prepare signed/encrypted request
encalg = ('RSA1_5', 'A256CBC-HS512')
request = self.kemcli.make_request(keyname, encalg=encalg)
# Prepare Authentication header
headers = self._auth_header()
# Perform request
r = requests.get(url, headers=headers,
params={'type': 'kem', 'value': request})
r.raise_for_status()
reply = r.json()
if 'type' not in reply or reply['type'] != 'kem':
raise RuntimeError('Invlid JSON response type')
value = self.kemcli.parse_reply(keyname, reply['value'])
if store:
self.keystore.set('keys/%s' % keyname, value)
else:
return value

View File

@@ -1,45 +0,0 @@
# Copyright (C) 2015 IPA Project Contributors, see COPYING for license
from __future__ import print_function
import ldap
import ldap.sasl
import ldap.filter
class iSecLdap(object):
def __init__(self, uri, auth_type=None):
self.uri = uri
if auth_type is not None:
self.auth_type = auth_type
else:
if uri.startswith('ldapi'):
self.auth_type = 'EXTERNAL'
else:
self.auth_type = 'GSSAPI'
self._basedn = None
@property
def basedn(self):
if self._basedn is None:
conn = self.connect()
r = conn.search_s('', ldap.SCOPE_BASE)
self._basedn = r[0][1]['defaultnamingcontext'][0]
return self._basedn
def connect(self):
conn = ldap.initialize(self.uri)
if self.auth_type == 'EXTERNAL':
auth_tokens = ldap.sasl.external(None)
elif self.auth_type == 'GSSAPI':
auth_tokens = ldap.sasl.sasl({}, 'GSSAPI')
else:
raise ValueError(
'Invalid authentication type: %s' % self.auth_type)
conn.sasl_interactive_bind_s('', auth_tokens)
return conn
def build_filter(self, formatstr, args):
escaped_args = dict()
for key, value in args.iteritems():
escaped_args[key] = ldap.filter.escape_filter_chars(value)
return formatstr.format(**escaped_args)

View File

@@ -1,204 +0,0 @@
# Copyright (C) 2015 IPA Project Contributors, see COPYING for license
from __future__ import print_function
from ipaplatform.paths import paths
import ConfigParser
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa, ec
from custodia.message.kem import KEMKeysStore
from custodia.message.kem import KEY_USAGE_SIG, KEY_USAGE_ENC, KEY_USAGE_MAP
from jwcrypto.common import json_decode, json_encode
from jwcrypto.common import base64url_encode
from jwcrypto.jwk import JWK
from ipapython.secrets.common import iSecLdap
from binascii import unhexlify
import ldap
IPA_REL_BASE_DN = 'cn=custodia,cn=ipa,cn=etc'
IPA_KEYS_QUERY = '(&(ipaKeyUsage={usage:s})(memberPrincipal={princ:s}))'
RFC5280_USAGE_MAP = {KEY_USAGE_SIG: 'digitalSignature',
KEY_USAGE_ENC: 'dataEncipherment'}
class KEMLdap(iSecLdap):
@property
def keysbase(self):
return '%s,%s' % (IPA_REL_BASE_DN, self.basedn)
def _encode_int(self, i):
I = hex(i).rstrip("L").lstrip("0x")
return base64url_encode(unhexlify((len(I) % 2) * '0' + I))
def _parse_public_key(self, ipa_public_key):
public_key = serialization.load_der_public_key(ipa_public_key,
default_backend())
num = public_key.public_numbers()
if isinstance(num, rsa.RSAPublicNumbers):
return {'kty': 'RSA',
'e': self._encode_int(num.e),
'n': self._encode_int(num.n)}
elif isinstance(num, ec.EllipticCurvePublicNumbers):
if num.curve.name == 'secp256r1':
curve = 'P-256'
elif num.curve.name == 'secp384r1':
curve = 'P-384'
elif num.curve.name == 'secp521r1':
curve = 'P-521'
else:
raise TypeError('Unsupported Elliptic Curve')
return {'kty': 'EC',
'crv': curve,
'x': self._encode_int(num.x),
'y': self._encode_int(num.y)}
else:
raise TypeError('Unknown Public Key type')
def get_key(self, usage, principal):
conn = self.connect()
scope = ldap.SCOPE_SUBTREE
ldap_filter = self.build_filter(IPA_KEYS_QUERY,
{'usage': RFC5280_USAGE_MAP[usage],
'princ': principal})
r = conn.search_s(self.keysbase, scope, ldap_filter)
if len(r) != 1:
raise ValueError("Incorrect number of results (%d) searching for"
"public key for %s" % (len(r), principal))
ipa_public_key = r[0][1]['ipaPublicKey'][0]
jwk = self._parse_public_key(ipa_public_key)
jwk['use'] = KEY_USAGE_MAP[usage]
return json_encode(jwk)
def _format_public_key(self, key):
if isinstance(key, str):
jwkey = json_decode(key)
if 'kty' not in jwkey:
raise ValueError('Invalid key, missing "kty" attribute')
if jwkey['kty'] == 'RSA':
pubnum = rsa.RSAPublicNumbers(jwkey['e'], jwkey['n'])
pubkey = pubnum.public_key(default_backend())
elif jwkey['kty'] == 'EC':
if jwkey['crv'] == 'P-256':
curve = ec.SECP256R1
elif jwkey['crv'] == 'P-384':
curve = ec.SECP384R1
elif jwkey['crv'] == 'P-521':
curve = ec.SECP521R1
else:
raise TypeError('Unsupported Elliptic Curve')
pubnum = ec.EllipticCurvePublicNumbers(
jwkey['x'], jwkey['y'], curve)
pubkey = pubnum.public_key(default_backend())
else:
raise ValueError('Unknown key type: %s' % jwkey['kty'])
elif isinstance(key, rsa.RSAPublicKey):
pubkey = key
elif isinstance(key, ec.EllipticCurvePublicKey):
pubkey = key
else:
raise TypeError('Unknown key type: %s' % type(key))
return pubkey.public_bytes(
encoding=serialization.Encoding.DER,
format=serialization.PublicFormat.SubjectPublicKeyInfo)
def set_key(self, usage, host, principal, key):
public_key = self._format_public_key(key)
conn = self.connect()
name = '%s/%s' % (KEY_USAGE_MAP[usage], host)
dn = 'cn=%s,%s' % (name, self.keysbase)
try:
mods = [('objectClass', ['nsContainer',
'ipaKeyPolicy',
'ipaPublicKeyObject',
'groupOfPrincipals']),
('cn', name),
('ipaKeyUsage', RFC5280_USAGE_MAP[usage]),
('memberPrincipal', principal),
('ipaPublicKey', public_key)]
conn.add_s(dn, mods)
except Exception: # pylint: disable=broad-except
# This may fail if the entry already exists
mods = [(ldap.MOD_REPLACE, 'ipaPublicKey', public_key)]
conn.modify_s(dn, mods)
def newServerKeys(path, keyid):
skey = JWK(generate='RSA', use='sig', kid=keyid)
ekey = JWK(generate='RSA', use='enc', kid=keyid)
with open(path, 'w+') as f:
f.write('[%s,%s]' % (skey.export(), ekey.export()))
return [skey.get_op_key('verify'), ekey.get_op_key('encrypt')]
class IPAKEMKeys(KEMKeysStore):
"""A KEM Keys Store.
This is a store that holds public keys of registered
clients allowed to use KEM messages. It takes the form
of an authorizer merely for the purpose of attaching
itself to a 'request' so that later on the KEM Parser
can fetch the appropariate key to verify/decrypt an
incoming request and make the payload available.
The KEM Parser will actually perform additional
authorization checks in this case.
SimplePathAuthz is extended here as we want to attach the
store only to requests on paths we are configured to
manage.
"""
def __init__(self, config=None, ipaconf=paths.IPA_DEFAULT_CONF):
super(IPAKEMKeys, self).__init__(config)
conf = ConfigParser.ConfigParser()
conf.read(ipaconf)
self.host = conf.get('global', 'host')
self.realm = conf.get('global', 'realm')
self.ldap_uri = config.get('ldap_uri', None)
if self.ldap_uri is None:
self.ldap_uri = conf.get('global', 'ldap_uri', None)
self._server_keys = None
def find_key(self, kid, usage):
if kid is None:
raise TypeError('Key ID is None, should be a SPN')
conn = KEMLdap(self.ldap_uri)
return conn.get_key(usage, kid)
def generate_server_keys(self):
principal = 'host/%s@%s' % (self.host, self.realm)
# Neutralize the key with read if any
self._server_keys = None
# Generate private key and store it
pubkeys = newServerKeys(self.config['server_keys'], principal)
# Store public key in LDAP
ldapconn = KEMLdap(self.ldap_uri)
ldapconn.set_key(KEY_USAGE_SIG, self.host, principal, pubkeys[0])
ldapconn.set_key(KEY_USAGE_ENC, self.host, principal, pubkeys[1])
@property
def server_keys(self):
if self._server_keys is None:
with open(self.config['server_keys']) as f:
jsonkeys = f.read()
dictkeys = json_decode(jsonkeys)
self._server_keys = (JWK(**dictkeys[KEY_USAGE_SIG]),
JWK(**dictkeys[KEY_USAGE_ENC]))
return self._server_keys
# Manual testing
if __name__ == '__main__':
IKK = IPAKEMKeys({'paths': '/',
'server_keys': '/etc/ipa/custodia/server.keys'})
IKK.generate_server_keys()
print(('SIG', IKK.server_keys[0].export_public()))
print(('ENC', IKK.server_keys[1].export_public()))
print(IKK.find_key('host/%s@%s' % (IKK.host, IKK.realm),
usage=KEY_USAGE_SIG))
print(IKK.find_key('host/%s@%s' % (IKK.host, IKK.realm),
usage=KEY_USAGE_ENC))

View File

@@ -1,198 +0,0 @@
# Copyright (C) 2015 IPA Project Contributors, see COPYING for license
from __future__ import print_function
from base64 import b64encode, b64decode
from custodia.store.interface import CSStore
from jwcrypto.common import json_decode, json_encode
from ipaplatform.paths import paths
from ipapython import ipautil
from ipapython.secrets.common import iSecLdap
import ldap
import os
import shutil
import sys
import tempfile
class UnknownKeyName(Exception):
pass
class DBMAPHandler(object):
def __init__(self, config, dbmap, nickname):
raise NotImplementedError
def export_key(self):
raise NotImplementedError
def import_key(self, value):
raise NotImplementedError
def log_error(error):
print(error, file=sys.stderr)
def PKI_TOMCAT_password_callback():
password = None
with open(paths.PKI_TOMCAT_PASSWORD_CONF) as f:
for line in f.readlines():
key, value = line.strip().split('=')
if key == 'internal':
password = value
break
return password
def HTTPD_password_callback():
with open(paths.ALIAS_PWDFILE_TXT) as f:
password = f.read()
return password
class NSSCertDB(DBMAPHandler):
def __init__(self, config, dbmap, nickname):
if 'type' not in dbmap or dbmap['type'] != 'NSSDB':
raise ValueError('Invalid type "%s",'
' expected "NSSDB"' % (dbmap['type'],))
if 'path' not in dbmap:
raise ValueError('Configuration does not provide NSSDB path')
if 'pwcallback' not in dbmap:
raise ValueError('Configuration does not provide Password Calback')
self.nssdb_path = dbmap['path']
self.nickname = nickname
self.nssdb_password = dbmap['pwcallback']()
def export_key(self):
tdir = tempfile.mkdtemp(dir=paths.TMP)
try:
nsspwfile = os.path.join(tdir, 'nsspwfile')
with open(nsspwfile, 'w+') as f:
f.write(self.nssdb_password)
pk12pwfile = os.path.join(tdir, 'pk12pwfile')
password = b64encode(os.urandom(16))
with open(pk12pwfile, 'w+') as f:
f.write(password)
pk12file = os.path.join(tdir, 'pk12file')
ipautil.run([paths.PK12UTIL,
"-d", self.nssdb_path,
"-o", pk12file,
"-n", self.nickname,
"-k", nsspwfile,
"-w", pk12pwfile])
with open(pk12file, 'r') as f:
data = f.read()
finally:
shutil.rmtree(tdir)
return json_encode({'export password': password,
'pkcs12 data': b64encode(data)})
def import_key(self, value):
v = json_decode(value)
tdir = tempfile.mkdtemp(dir=paths.TMP)
try:
nsspwfile = os.path.join(tdir, 'nsspwfile')
with open(nsspwfile, 'w+') as f:
f.write(self.nssdb_password)
pk12pwfile = os.path.join(tdir, 'pk12pwfile')
with open(pk12pwfile, 'w+') as f:
f.write(v['export password'])
pk12file = os.path.join(tdir, 'pk12file')
with open(pk12file, 'w+') as f:
f.write(b64decode(v['pkcs12 data']))
ipautil.run([paths.PK12UTIL,
"-d", self.nssdb_path,
"-i", pk12file,
"-n", self.nickname,
"-k", nsspwfile,
"-w", pk12pwfile])
finally:
shutil.rmtree(tdir)
# Exfiltrate the DM password Hash so it can be set in replica's and this
# way let a replica be install without knowing the DM password and yet
# still keep the DM password synchronized across replicas
class DMLDAP(DBMAPHandler):
def __init__(self, config, dbmap, nickname):
if 'type' not in dbmap or dbmap['type'] != 'DMLDAP':
raise ValueError('Invalid type "%s",'
' expected "DMLDAP"' % (dbmap['type'],))
if nickname != 'DMHash':
raise UnknownKeyName("Unknown Key Named '%s'" % nickname)
self.ldap = iSecLdap(config['ldap_uri'],
config.get('auth_type', None))
def export_key(self):
conn = self.ldap.connect()
r = conn.search_s('cn=config', ldap.SCOPE_BASE,
attrlist=['nsslapd-rootpw'])
if len(r) != 1:
raise RuntimeError('DM Hash not found!')
return json_encode({'dmhash': r[0][1]['nsslapd-rootpw'][0]})
def import_key(self, value):
v = json_decode(value)
conn = self.ldap.connect()
mods = [(ldap.MOD_REPLACE, 'nsslapd-rootpw', str(v['dmhash']))]
conn.modify_s('cn=config', mods)
NAME_DB_MAP = {
'ca': {
'type': 'NSSDB',
'path': paths.PKI_TOMCAT_ALIAS_DIR,
'handler': NSSCertDB,
'pwcallback': PKI_TOMCAT_password_callback,
},
'ra': {
'type': 'NSSDB',
'path': paths.HTTPD_ALIAS_DIR,
'handler': NSSCertDB,
'pwcallback': HTTPD_password_callback,
},
'dm': {
'type': 'DMLDAP',
'handler': DMLDAP,
}
}
class iSecStore(CSStore):
def __init__(self, config=None):
self.config = config
def _get_handler(self, key):
path = key.split('/', 3)
if len(path) != 3 or path[0] != 'keys':
raise ValueError('Invalid name')
if path[1] not in NAME_DB_MAP:
raise UnknownKeyName("Unknown DB named '%s'" % path[1])
dbmap = NAME_DB_MAP[path[1]]
return dbmap['handler'](self.config, dbmap, path[2])
def get(self, key):
try:
key_handler = self._get_handler(key)
value = key_handler.export_key()
except Exception as e: # pylint: disable=broad-except
log_error('Error retrievieng key "%s": %s' % (key, str(e)))
value = None
return value
def set(self, key, value, replace=False):
try:
key_handler = self._get_handler(key)
key_handler.import_key(value)
except Exception as e: # pylint: disable=broad-except
log_error('Error storing key "%s": %s' % (key, str(e)))
def list(self, keyfilter=None):
raise NotImplementedError
def cut(self, key):
raise NotImplementedError

View File

@@ -52,7 +52,7 @@ def setup_package():
try:
setup(
name = "ipapython",
version = "4.3.1",
version = "4.0.5",
license = "GPL",
author = "Karl MacMillan, et.al.",
author_email = "kmacmill@redhat.com",
@@ -62,13 +62,10 @@ def setup_package():
description = DOCLINES[0],
long_description = "\n".join(DOCLINES[2:]),
download_url = "http://www.freeipa.org/page/Downloads",
classifiers=[line for line in CLASSIFIERS.split('\n') if line],
classifiers=filter(None, CLASSIFIERS.split('\n')),
platforms = ["Linux", "Solaris", "Unix"],
package_dir = {'ipapython': ''},
packages = ["ipapython",
"ipapython.dnssec",
"ipapython.secrets",
"ipapython.install"],
packages = [ "ipapython" ],
)
finally:
del sys.path[0]

7
ipapython/setup.py.in Executable file → Normal file
View File

@@ -62,13 +62,10 @@ def setup_package():
description = DOCLINES[0],
long_description = "\n".join(DOCLINES[2:]),
download_url = "http://www.freeipa.org/page/Downloads",
classifiers=[line for line in CLASSIFIERS.split('\n') if line],
classifiers=filter(None, CLASSIFIERS.split('\n')),
platforms = ["Linux", "Solaris", "Unix"],
package_dir = {'ipapython': ''},
packages = ["ipapython",
"ipapython.dnssec",
"ipapython.secrets",
"ipapython.install"],
packages = [ "ipapython" ],
)
finally:
del sys.path[0]

View File

@@ -25,15 +25,9 @@ SSH utilities.
import base64
import re
import struct
import binascii
from hashlib import md5, sha1
from hashlib import sha256 #pylint: disable=E0611
import six
if six.PY3:
unicode = str
__all__ = ['SSHPublicKey']
OPENSSH_BASE_REGEX = re.compile(r'^[\t ]*(?P<keytype>[^\x00\n\r]+?) [\t ]*(?P<key>[^\x00\n\r]+?)(?:[\t ]+(?P<comment>[^\x00\n\r]*?)[\t ]*)?$')
@@ -54,16 +48,15 @@ class SSHPublicKey(object):
self._options = key._options
return
if not isinstance(key, (bytes, unicode)):
raise TypeError("argument must be bytes or unicode, got %s" % type(key).__name__)
if not isinstance(key, (str, unicode)):
raise TypeError("argument must be str or unicode, got %s" % type(key).__name__)
# All valid public key blobs start with 3 null bytes (see RFC 4253
# section 6.6, RFC 4251 section 5 and RFC 4250 section 4.6)
if isinstance(key, bytes) and key[:3] != b'\0\0\0':
if isinstance(key, str) and key[:3] != '\0\0\0':
key = key.decode(encoding)
valid = self._parse_raw(key) or self._parse_base64(key) or self._parse_openssh(key)
if not valid:
raise ValueError("not a valid SSH public key")
@@ -73,7 +66,7 @@ class SSHPublicKey(object):
self._options = options
def _parse_raw(self, key):
if not isinstance(key, bytes):
if not isinstance(key, str):
return False
try:
@@ -102,7 +95,7 @@ class SSHPublicKey(object):
try:
key = base64.b64decode(key)
except (TypeError, ValueError):
except TypeError:
return False
return self._parse_raw(key)
@@ -170,8 +163,7 @@ class SSHPublicKey(object):
return bool(self._options)
def openssh(self):
key = base64.b64encode(self._key).decode('ascii')
out = u'%s %s' % (self._keytype, key)
out = u'%s %s' % (self._keytype, base64.b64encode(self._key))
if self._options:
options = []
@@ -203,8 +195,6 @@ class SSHPublicKey(object):
keytype = 2
elif self._keytype.startswith('ecdsa-sha2-') and '@' not in self._keytype:
keytype = 3
elif self._keytype == 'ssh-ed25519':
keytype = 4
else:
return
fp = fpfunc(self._key).hexdigest().upper()

Binary file not shown.

View File

@@ -27,18 +27,14 @@ import os
import os.path
import shutil
from ipapython.ipa_log_manager import *
import ConfigParser
import random
import six
from six.moves.configparser import SafeConfigParser
import string
from ipapython import ipautil
from ipaplatform.tasks import tasks
from ipaplatform.paths import paths
if six.PY3:
unicode = str
SYSRESTORE_PATH = paths.TMP
SYSRESTORE_INDEXFILE = "sysrestore.index"
SYSRESTORE_STATEFILE = "sysrestore.state"
@@ -70,8 +66,7 @@ class FileStore:
self.files = {}
p = SafeConfigParser()
p.optionxform = str
p = ConfigParser.SafeConfigParser()
p.read(self._index)
for section in p.sections():
@@ -92,15 +87,15 @@ class FileStore:
os.remove(self._index)
return
p = SafeConfigParser()
p.optionxform = str
p = ConfigParser.SafeConfigParser()
p.add_section('files')
for (key, value) in self.files.items():
p.set('files', key, str(value))
with open(self._index, "w") as f:
p.write(f)
f = file(self._index, "w")
p.write(f)
f.close()
def backup_file(self, path):
"""Create a copy of the file at @path - so long as a copy
@@ -133,8 +128,7 @@ class FileStore:
stat = os.stat(path)
template = '{stat.st_mode},{stat.st_uid},{stat.st_gid},{path}'
self.files[filename] = template.format(stat=stat, path=path)
self.files[filename] = string.join([str(stat.st_mode),str(stat.st_uid),str(stat.st_gid),path], ',')
self.save()
def has_file(self, path):
@@ -144,7 +138,7 @@ class FileStore:
"""
result = False
for (key, value) in self.files.items():
(mode,uid,gid,filepath) = value.split(',', 3)
(mode,uid,gid,filepath) = string.split(value, ',', 3)
if (filepath == path):
result = True
break
@@ -177,7 +171,7 @@ class FileStore:
filename = None
for (key, value) in self.files.items():
(mode,uid,gid,filepath) = value.split(',', 3)
(mode,uid,gid,filepath) = string.split(value, ',', 3)
if (filepath == path):
filename = key
break
@@ -193,9 +187,7 @@ class FileStore:
if new_path is not None:
path = new_path
shutil.copy(backup_path, path) # SELinux needs copy
os.remove(backup_path)
shutil.move(backup_path, path)
os.chown(path, int(uid), int(gid))
os.chmod(path, int(mode))
@@ -219,22 +211,20 @@ class FileStore:
for (filename, value) in self.files.items():
(mode,uid,gid,path) = value.split(',', 3)
(mode,uid,gid,path) = string.split(value, ',', 3)
backup_path = os.path.join(self._path, filename)
if not os.path.exists(backup_path):
root_logger.debug(" -> Not restoring - '%s' doesn't exist", backup_path)
continue
shutil.copy(backup_path, path) # SELinux needs copy
os.remove(backup_path)
shutil.move(backup_path, path)
os.chown(path, int(uid), int(gid))
os.chmod(path, int(mode))
tasks.restore_context(path)
# force file to be deleted
#force file to be deleted
self.files = {}
self.save()
@@ -268,7 +258,7 @@ class FileStore:
filename = None
for (key, value) in self.files.items():
(mode,uid,gid,filepath) = value.split(',', 3)
(mode,uid,gid,filepath) = string.split(value, ',', 3)
if (filepath == path):
filename = key
break
@@ -283,7 +273,7 @@ class FileStore:
try:
os.unlink(backup_path)
except Exception as e:
except Exception, e:
root_logger.error('Error removing %s: %s' % (backup_path, str(e)))
del self.files[filename]
@@ -293,11 +283,8 @@ class FileStore:
class StateFile:
"""A metadata file for recording system state which can
be backed up and later restored.
StateFile gets reloaded every time to prevent loss of information
recorded by child processes. But we do not solve concurrency
because there is no need for it right now.
The format is something like:
be backed up and later restored. The format is something
like:
[httpd]
running=True
@@ -329,8 +316,7 @@ class StateFile:
self.modules = {}
p = SafeConfigParser()
p.optionxform = str
p = ConfigParser.SafeConfigParser()
p.read(self._path)
for module in p.sections():
@@ -358,16 +344,16 @@ class StateFile:
os.remove(self._path)
return
p = SafeConfigParser()
p.optionxform = str
p = ConfigParser.SafeConfigParser()
for module in self.modules.keys():
p.add_section(module)
for (key, value) in self.modules[module].items():
p.set(module, key, str(value))
with open(self._path, "w") as f:
p.write(f)
f = file(self._path, "w")
p.write(f)
f.close()
def backup_state(self, module, key, value):
"""Backup an item of system state from @module, identified
@@ -377,12 +363,10 @@ class StateFile:
if not isinstance(value, (str, bool, unicode)):
raise ValueError("Only strings, booleans or unicode strings are supported")
self._load()
if module not in self.modules:
if not self.modules.has_key(module):
self.modules[module] = {}
if key not in self.modules:
if not self.modules.has_key(key):
self.modules[module][key] = value
self.save()
@@ -394,9 +378,7 @@ class StateFile:
If the item doesn't exist, #None will be returned, otherwise
the original string or boolean value is returned.
"""
self._load()
if module not in self.modules:
if not self.modules.has_key(module):
return None
return self.modules[module].get(key, None)
@@ -407,8 +389,6 @@ class StateFile:
If the item doesn't exist, no change is done.
"""
self._load()
try:
del self.modules[module][key]
except KeyError:
@@ -438,7 +418,7 @@ class StateFile:
Can be used to determine if a service is configured.
"""
if module in self.modules:
if self.modules.has_key(module):
return True
else:
return False

BIN
ipapython/sysrestore.pyc Normal file

Binary file not shown.

View File

@@ -18,10 +18,10 @@
#
# The full version including strings
VERSION="4.3.1"
VERSION="4.0.5"
# A fuller version including the vendor tag (e.g. 3.3.3-34.fc20)
VENDOR_VERSION="4.3.1"
VENDOR_VERSION="4.0.5"
# Just the numeric portion of the version so one can do direct numeric
@@ -41,8 +41,8 @@ VENDOR_VERSION="4.3.1"
# IPA 3.2.1: NUM_VERSION=30201
# IPA 3.2.99: NUM_VERSION=30299 (development version)
# IPA 3.3.0: NUM_VERSION=30300
NUM_VERSION=40301
NUM_VERSION=40005
# The version of the API.
API_VERSION=u'2.164'
API_VERSION=u'2.101'

Binary file not shown.