Import Upstream version 4.12.4
This commit is contained in:
0
ipaserver/custodia/httpd/__init__.py
Normal file
0
ipaserver/custodia/httpd/__init__.py
Normal file
52
ipaserver/custodia/httpd/authenticators.py
Normal file
52
ipaserver/custodia/httpd/authenticators.py
Normal file
@@ -0,0 +1,52 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
from ipaserver.custodia import log
|
||||
from ipaserver.custodia.plugin import HTTPAuthenticator, PluginOption
|
||||
|
||||
|
||||
class SimpleCredsAuth(HTTPAuthenticator):
|
||||
uid = PluginOption('pwd_uid', -1, "User id or name, -1 ignores user")
|
||||
gid = PluginOption('grp_gid', -1, "Group id or name, -1 ignores group")
|
||||
|
||||
def handle(self, request):
|
||||
creds = request.get('creds')
|
||||
if creds is None:
|
||||
self.logger.debug('SCA: Missing "creds" from request')
|
||||
return False
|
||||
uid = int(creds['uid'])
|
||||
gid = int(creds['gid'])
|
||||
uid_match = self.uid != -1 and self.uid == uid
|
||||
gid_match = self.gid != -1 and self.gid == gid
|
||||
if uid_match or gid_match:
|
||||
self.audit_svc_access(log.AUDIT_SVC_AUTH_PASS,
|
||||
request['client_id'],
|
||||
"%d, %d" % (uid, gid))
|
||||
return True
|
||||
else:
|
||||
self.audit_svc_access(log.AUDIT_SVC_AUTH_FAIL,
|
||||
request['client_id'],
|
||||
"%d, %d" % (uid, gid))
|
||||
return False
|
||||
|
||||
|
||||
class SimpleHeaderAuth(HTTPAuthenticator):
|
||||
header = PluginOption(str, 'REMOTE_USER', "header name")
|
||||
value = PluginOption('str_set', None,
|
||||
"Comma-separated list of required values")
|
||||
|
||||
def handle(self, request):
|
||||
if self.header not in request['headers']:
|
||||
self.logger.debug('SHA: No "headers" in request')
|
||||
return None
|
||||
value = request['headers'][self.header]
|
||||
if self.value is not None:
|
||||
if value not in self.value:
|
||||
self.audit_svc_access(log.AUDIT_SVC_AUTH_FAIL,
|
||||
request['client_id'], value)
|
||||
return False
|
||||
|
||||
self.audit_svc_access(log.AUDIT_SVC_AUTH_PASS,
|
||||
request['client_id'], value)
|
||||
request['remote_user'] = value
|
||||
return True
|
||||
47
ipaserver/custodia/httpd/authorizers.py
Normal file
47
ipaserver/custodia/httpd/authorizers.py
Normal file
@@ -0,0 +1,47 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from ipaserver.custodia import log
|
||||
from ipaserver.custodia.plugin import HTTPAuthorizer
|
||||
|
||||
|
||||
class SimplePathAuthz(HTTPAuthorizer):
|
||||
# keep SimplePathAuthz an old-style plugin for now.
|
||||
# KEMKeysStore and IPAKEMKeys haven't been ported.
|
||||
|
||||
def __init__(self, config):
|
||||
super(SimplePathAuthz, self).__init__(config)
|
||||
self.paths = []
|
||||
if 'paths' in self.config:
|
||||
self.paths = self.config['paths'].split()
|
||||
|
||||
def handle(self, request):
|
||||
reqpath = path = request.get('path', '')
|
||||
|
||||
# if an authorized path does not end in /
|
||||
# check if it matches fullpath for strict match
|
||||
for authz in self.paths:
|
||||
if authz.endswith('/'):
|
||||
continue
|
||||
if authz.endswith('.'):
|
||||
# special case to match a path ending in /
|
||||
authz = authz[:-1]
|
||||
if authz == path:
|
||||
self.audit_svc_access(log.AUDIT_SVC_AUTHZ_PASS,
|
||||
request['client_id'], path)
|
||||
return True
|
||||
|
||||
while path != '':
|
||||
if path in self.paths:
|
||||
self.audit_svc_access(log.AUDIT_SVC_AUTHZ_PASS,
|
||||
request['client_id'], path)
|
||||
return True
|
||||
if path == '/':
|
||||
path = ''
|
||||
else:
|
||||
path, _head = os.path.split(path)
|
||||
|
||||
self.logger.debug('No path in %s matched %s', self.paths, reqpath)
|
||||
return None
|
||||
507
ipaserver/custodia/httpd/server.py
Normal file
507
ipaserver/custodia/httpd/server.py
Normal file
@@ -0,0 +1,507 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import atexit
|
||||
import errno
|
||||
import os
|
||||
import shutil
|
||||
import socket
|
||||
import struct
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from http.server import BaseHTTPRequestHandler
|
||||
from socketserver import ForkingTCPServer, BaseServer
|
||||
from urllib.parse import parse_qs, unquote, urlparse
|
||||
|
||||
import six
|
||||
|
||||
from ipaserver.custodia import log
|
||||
from ipaserver.custodia.plugin import HTTPError
|
||||
|
||||
try:
|
||||
from systemd import daemon as sd
|
||||
except ImportError:
|
||||
sd = None
|
||||
if 'NOTIFY_SOCKET' in os.environ:
|
||||
warnings.warn(
|
||||
"NOTIFY_SOCKET env var is set but python-systemd bindings are "
|
||||
"not available!",
|
||||
category=RuntimeWarning
|
||||
)
|
||||
if 'LISTEN_FDS' in os.environ:
|
||||
warnings.warn(
|
||||
"LISTEN_FDS env var is set, but python-systemd bindings are"
|
||||
"not available!",
|
||||
category=RuntimeWarning
|
||||
)
|
||||
|
||||
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
SO_PEERCRED = getattr(socket, 'SO_PEERCRED', 17)
|
||||
SO_PEERSEC = getattr(socket, 'SO_PEERSEC', 31)
|
||||
SELINUX_CONTEXT_LEN = 256
|
||||
MAX_REQUEST_SIZE = 10 * 1024 * 1024 # For now limit body to 10MiB
|
||||
|
||||
|
||||
class ForkingHTTPServer(ForkingTCPServer):
|
||||
"""
|
||||
A forking HTTP Server.
|
||||
Each request runs into a forked server so that the whole environment
|
||||
is clean and isolated, and parallel requests cannot unintentionally
|
||||
influence one another.
|
||||
|
||||
When a request is received it is parsed by the handler_class provided
|
||||
at server initialization.
|
||||
"""
|
||||
server_string = "Custodia/0.1"
|
||||
allow_reuse_address = True
|
||||
socket_file = None
|
||||
|
||||
def __init__(self, server_address, handler_class, config,
|
||||
bind_and_activate=True):
|
||||
# pylint: disable=non-parent-init-called
|
||||
# Init just BaseServer, TCPServer creates a socket.
|
||||
BaseServer.__init__(self, server_address, handler_class)
|
||||
|
||||
if isinstance(server_address, socket.socket):
|
||||
# It's a bound and activates socket from systemd.
|
||||
self.socket = server_address
|
||||
bind_and_activate = False
|
||||
else:
|
||||
self.socket = socket.socket(self.address_family,
|
||||
self.socket_type)
|
||||
|
||||
# copied from TCPServer
|
||||
if bind_and_activate:
|
||||
try:
|
||||
self.server_bind()
|
||||
self.server_activate()
|
||||
except BaseException:
|
||||
self.server_close()
|
||||
raise
|
||||
|
||||
if self.socket.family == socket.AF_UNIX:
|
||||
self.socket_file = self.socket.getsockname()
|
||||
|
||||
if 'consumers' not in config:
|
||||
raise ValueError('Configuration does not provide any consumer')
|
||||
self.config = config
|
||||
if 'server_string' in self.config:
|
||||
self.server_string = self.config['server_string']
|
||||
self.auditlog = log.auditlog
|
||||
|
||||
|
||||
class ForkingUnixHTTPServer(ForkingHTTPServer):
|
||||
address_family = socket.AF_UNIX
|
||||
|
||||
def server_bind(self):
|
||||
self.unlink()
|
||||
# Remove on exit
|
||||
atexit.register(self.unlink)
|
||||
basedir = os.path.dirname(self.server_address)
|
||||
if not os.path.isdir(basedir):
|
||||
os.makedirs(basedir, mode=0o755)
|
||||
ForkingHTTPServer.server_bind(self)
|
||||
os.chmod(self.server_address, 0o666)
|
||||
|
||||
def unlink(self):
|
||||
try:
|
||||
os.unlink(self.server_address)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
class HTTPRequestHandler(BaseHTTPRequestHandler):
|
||||
|
||||
"""
|
||||
This request handler is a slight modification of BaseHTTPRequestHandler
|
||||
where the per-request handler is replaced.
|
||||
|
||||
When a request comes in it is parsed and the 'request' dictionary is
|
||||
populated accordingly. Additionally a 'creds' structure is added to the
|
||||
request.
|
||||
|
||||
The 'creds' structure contains the data retrieved via a call to
|
||||
getsockopt with the SO_PEERCRED option. This retrieves via kernel assist
|
||||
the uid,gid and pid of the process on the other side of the unix socket
|
||||
on which the request has been made. This can be used for authentication
|
||||
and/or authorization purposes.
|
||||
The 'creds' structure is further augmented with a 'context' option
|
||||
containing the Selinux Context string for the calling process, if
|
||||
available.
|
||||
|
||||
after the request is parsed the server's pipeline() function is invoked
|
||||
in order to handle it. The pipeline() should return a response object,
|
||||
where te return 'code', the 'output' and 'headers' may be found.
|
||||
|
||||
If no 'code' is present the request is assumed to be successful and a
|
||||
'200 OK' status code will be sent back to the client.
|
||||
|
||||
The 'output' parameter can be a string or a file like object.
|
||||
|
||||
The 'headers' objct must be a dictionary where keys are headers names.
|
||||
|
||||
By default we assume HTTP1.0
|
||||
"""
|
||||
|
||||
protocol_version = "HTTP/1.0"
|
||||
|
||||
def __init__(self, request, client_address, server):
|
||||
self.requestline = ''
|
||||
self.request_version = ''
|
||||
self.command = ''
|
||||
self.raw_requestline = None
|
||||
self.close_connection = 0
|
||||
self.path = None # quoted, raw path
|
||||
self.path_chain = None # tuple of unquoted segments
|
||||
self.query = None
|
||||
self.url = None
|
||||
self.body = None
|
||||
self.loginuid = None
|
||||
self._creds = False
|
||||
BaseHTTPRequestHandler.__init__(self, request, client_address, server)
|
||||
|
||||
def version_string(self):
|
||||
return self.server.server_string
|
||||
|
||||
def _get_loginuid(self, pid):
|
||||
loginuid = None
|
||||
# NOTE: Using proc to find the login uid is not reliable
|
||||
# this is why login uid is fetched separately and not stored
|
||||
# into 'creds', to avoid giving the false impression it can be
|
||||
# used to perform access control decisions
|
||||
try:
|
||||
with open("/proc/%i/loginuid" % pid, "r") as f:
|
||||
loginuid = int(f.read())
|
||||
except IOError as e:
|
||||
if e.errno != errno.ENOENT:
|
||||
raise
|
||||
if loginuid == -1:
|
||||
loginuid = None
|
||||
return loginuid
|
||||
|
||||
@property
|
||||
def peer_creds(self):
|
||||
if self._creds is not False:
|
||||
return self._creds
|
||||
# works only for unix sockets
|
||||
if self.request.family != socket.AF_UNIX:
|
||||
self._creds = None
|
||||
return self._creds
|
||||
# pid_t: signed int32, uid_t/gid_t: unsigned int32
|
||||
fmt = 'iII'
|
||||
creds = self.request.getsockopt(socket.SOL_SOCKET, SO_PEERCRED,
|
||||
struct.calcsize(fmt))
|
||||
pid, uid, gid = struct.unpack(fmt, creds)
|
||||
try:
|
||||
creds = self.request.getsockopt(socket.SOL_SOCKET, SO_PEERSEC,
|
||||
SELINUX_CONTEXT_LEN)
|
||||
context = creds.rstrip(b'\x00').decode('utf-8')
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.debug("Couldn't retrieve SELinux Context", exc_info=True)
|
||||
context = None
|
||||
|
||||
self._creds = {'pid': pid, 'uid': uid, 'gid': gid, 'context': context}
|
||||
return self._creds
|
||||
|
||||
@property
|
||||
def peer_info(self):
|
||||
if self.peer_creds is not None:
|
||||
return self._creds['pid']
|
||||
elif self.request.family in {socket.AF_INET, socket.AF_INET6}:
|
||||
return self.request.getpeername()
|
||||
return None
|
||||
|
||||
@property
|
||||
def peer_cert(self):
|
||||
if not hasattr(self.request, 'getpeercert'):
|
||||
return None
|
||||
return self.request.getpeercert()
|
||||
|
||||
def parse_request(self):
|
||||
if not BaseHTTPRequestHandler.parse_request(self):
|
||||
return False
|
||||
|
||||
# grab the loginuid from `/proc` as soon as possible
|
||||
creds = self.peer_creds
|
||||
if creds is not None:
|
||||
self.loginuid = self._get_loginuid(creds['pid'])
|
||||
|
||||
# after basic parsing also use urlparse to retrieve individual
|
||||
# elements of a request.
|
||||
url = urlparse(self.path)
|
||||
|
||||
# Yes, override path with the path part only
|
||||
self.path = url.path
|
||||
self.path_chain = self._parse_path(url)
|
||||
|
||||
# Create dict out of query
|
||||
self.query = parse_qs(url.query)
|
||||
|
||||
# keep the rest into the 'url' element in case someone needs it
|
||||
self.url = url
|
||||
|
||||
return True
|
||||
|
||||
def _parse_path(self, url):
|
||||
path_chain = []
|
||||
for segment in url.path.split('/'):
|
||||
# unquote URL path encoding
|
||||
segment = unquote(segment)
|
||||
path_chain.append(segment)
|
||||
return tuple(path_chain)
|
||||
|
||||
def parse_body(self):
|
||||
length = int(self.headers.get('content-length', 0))
|
||||
if length > MAX_REQUEST_SIZE:
|
||||
raise HTTPError(413)
|
||||
if length == 0:
|
||||
self.body = None
|
||||
else:
|
||||
self.body = self.rfile.read(length)
|
||||
|
||||
def handle_one_request(self):
|
||||
if self.request.family == socket.AF_UNIX:
|
||||
# Set a fake client address to make log functions happy
|
||||
self.client_address = ['127.0.0.1', 0]
|
||||
try:
|
||||
if not self.server.config:
|
||||
self.close_connection = 1
|
||||
return
|
||||
self.raw_requestline = self.rfile.readline(65537)
|
||||
if not self.raw_requestline:
|
||||
self.close_connection = 1
|
||||
return
|
||||
if len(self.raw_requestline) > 65536:
|
||||
self.requestline = ''
|
||||
self.request_version = ''
|
||||
self.command = ''
|
||||
self.send_error(414)
|
||||
self.wfile.flush()
|
||||
return
|
||||
if not self.parse_request():
|
||||
self.close_connection = 1
|
||||
return
|
||||
try:
|
||||
self.parse_body()
|
||||
except HTTPError as e:
|
||||
self.send_error(e.code, e.mesg)
|
||||
self.wfile.flush()
|
||||
return
|
||||
request = {'creds': self.peer_creds,
|
||||
'client_cert': self.peer_cert,
|
||||
'client_id': self.peer_info,
|
||||
'command': self.command,
|
||||
'path': self.path,
|
||||
'path_chain': self.path_chain,
|
||||
'query': self.query,
|
||||
'url': self.url,
|
||||
'version': self.request_version,
|
||||
'headers': self.headers,
|
||||
'body': self.body}
|
||||
logger.debug(
|
||||
"REQUEST: %s %s, query: %r, cred: %r, client_id: %s, "
|
||||
"headers: %r, body: %r",
|
||||
request['command'], request['path_chain'], request['query'],
|
||||
request['creds'], request['client_id'],
|
||||
dict(request['headers']), request['body']
|
||||
)
|
||||
try:
|
||||
response = self.pipeline(self.server.config, request)
|
||||
if response is None:
|
||||
raise HTTPError(500)
|
||||
except HTTPError as e:
|
||||
self.send_error(e.code, e.mesg)
|
||||
self.wfile.flush()
|
||||
return
|
||||
except socket.timeout as e:
|
||||
self.log_error("Request timed out: %r", e)
|
||||
self.close_connection = 1
|
||||
return
|
||||
except Exception as e: # pylint: disable=broad-except
|
||||
self.log_error("Handler failed: %r", e, exc_info=True)
|
||||
self.send_error(500)
|
||||
self.wfile.flush()
|
||||
return
|
||||
|
||||
self.send_response(response.get('code', 200))
|
||||
for header, value in six.iteritems(response.get('headers', {})):
|
||||
self.send_header(header, value)
|
||||
self.end_headers()
|
||||
|
||||
output = response.get('output', None)
|
||||
if hasattr(output, 'read'):
|
||||
shutil.copyfileobj(output, self.wfile)
|
||||
output.close()
|
||||
elif output is not None:
|
||||
self.wfile.write(output)
|
||||
else:
|
||||
self.close_connection = 1
|
||||
self.wfile.flush()
|
||||
return
|
||||
except socket.timeout as e:
|
||||
self.log_error("Request timed out: %r", e)
|
||||
self.close_connection = 1
|
||||
return
|
||||
|
||||
# pylint: disable=arguments-differ
|
||||
def log_error(self, fmtstr, *args, **kwargs):
|
||||
logger.error(fmtstr, *args, **kwargs)
|
||||
|
||||
def pipeline(self, config, request):
|
||||
"""
|
||||
The pipeline() function handles authentication and invocation of
|
||||
the correct consumer based on the server configuration, that is
|
||||
provided at initialization time.
|
||||
|
||||
When authentication is performed all the authenticators are
|
||||
executed. If any returns False, authentication fails and a 403
|
||||
error is raised. If none of them positively succeeds and they all
|
||||
return None then also authentication fails and a 403 error is
|
||||
raised. Authentication plugins can add attributes to the request
|
||||
object for use of authorization or other plugins.
|
||||
|
||||
When authorization is performed and positive result will cause the
|
||||
operation to be accepted and any negative result will cause it to
|
||||
fail. If no authorization plugin returns a positive result a 403
|
||||
error is returned.
|
||||
|
||||
Once authentication and authorization are successful the pipeline
|
||||
will parse the path component and find the consumer plugin that
|
||||
handles the provided path walking up the path component by
|
||||
component until a consumer is found.
|
||||
|
||||
Paths are walked up from the leaf to the root, so if two consumers
|
||||
hang on the same tree, the one closer to the leaf will be used. If
|
||||
there is a trailing path when the conumer is selected then it will
|
||||
be stored in the request dicstionary named 'trail'. The 'trail' is
|
||||
an ordered list of the path components below the consumer entry
|
||||
point.
|
||||
"""
|
||||
path_chain = request['path_chain']
|
||||
if not path_chain or path_chain[0] != '':
|
||||
# no path or not an absolute path
|
||||
raise HTTPError(400)
|
||||
|
||||
# auth framework here
|
||||
authers = config.get('authenticators')
|
||||
if authers is None:
|
||||
raise HTTPError(403)
|
||||
valid_once = False
|
||||
for auth in authers:
|
||||
valid = authers[auth].handle(request)
|
||||
if valid is False:
|
||||
raise HTTPError(403)
|
||||
elif valid is True:
|
||||
valid_once = True
|
||||
if valid_once is not True:
|
||||
self.server.auditlog.svc_access(self.__class__.__name__,
|
||||
log.AUDIT_SVC_AUTH_FAIL,
|
||||
request['client_id'], 'No auth')
|
||||
raise HTTPError(403)
|
||||
|
||||
# auhz framework here
|
||||
authzers = config.get('authorizers')
|
||||
if authzers is None:
|
||||
raise HTTPError(403)
|
||||
authz_ok = None
|
||||
for authz in authzers:
|
||||
valid = authzers[authz].handle(request)
|
||||
if valid is True:
|
||||
authz_ok = True
|
||||
elif valid is False:
|
||||
authz_ok = False
|
||||
break
|
||||
if authz_ok is not True:
|
||||
self.server.auditlog.svc_access(self.__class__.__name__,
|
||||
log.AUDIT_SVC_AUTHZ_FAIL,
|
||||
request['client_id'],
|
||||
path_chain)
|
||||
raise HTTPError(403)
|
||||
|
||||
# Select consumer
|
||||
trail = []
|
||||
while path_chain:
|
||||
if path_chain in config['consumers']:
|
||||
con = config['consumers'][path_chain]
|
||||
if len(trail) != 0:
|
||||
request['trail'] = trail
|
||||
return con.handle(request)
|
||||
trail.insert(0, path_chain[-1])
|
||||
path_chain = path_chain[:-1]
|
||||
|
||||
raise HTTPError(404)
|
||||
|
||||
|
||||
class HTTPServer:
|
||||
handler = HTTPRequestHandler
|
||||
|
||||
def __init__(self, srvurl, config):
|
||||
url = urlparse(srvurl)
|
||||
serverclass, address = self._get_serverclass(url)
|
||||
if sd is not None:
|
||||
address = self._get_systemd_socket(address)
|
||||
self.httpd = serverclass(address, self.handler, config)
|
||||
|
||||
def _get_serverclass(self, url):
|
||||
if url.scheme == 'http+unix':
|
||||
# Unix socket
|
||||
address = unquote(url.netloc)
|
||||
if not address:
|
||||
raise ValueError('Empty address {}'.format(url))
|
||||
logger.info('Serving on Unix socket %s', address)
|
||||
serverclass = ForkingUnixHTTPServer
|
||||
elif url.scheme == 'http':
|
||||
host, port = url.netloc.split(":")
|
||||
address = (host, int(port))
|
||||
logger.info('Serving on %s (HTTP)', url.netloc)
|
||||
serverclass = ForkingHTTPServer
|
||||
else:
|
||||
raise ValueError('Unknown URL Scheme: %s' % url.scheme)
|
||||
return serverclass, address
|
||||
|
||||
def _get_systemd_socket(self, address):
|
||||
fds = sd.listen_fds()
|
||||
if not fds:
|
||||
return address
|
||||
elif len(fds) > 1:
|
||||
raise ValueError('Too many listening sockets', fds)
|
||||
|
||||
if isinstance(address, tuple):
|
||||
port = address[1]
|
||||
# systemd uses IPv6
|
||||
if not sd.is_socket_inet(fds[0], family=socket.AF_INET6,
|
||||
type=socket.SOCK_STREAM,
|
||||
listening=True, port=port):
|
||||
raise ValueError(
|
||||
"FD {} is not TCP IPv6 socket on port {}".format(
|
||||
fds[0], port
|
||||
)
|
||||
)
|
||||
logger.info('Using systemd socket activation on port %i', port)
|
||||
sock = socket.fromfd(fds[0], socket.AF_INET6, socket.SOCK_STREAM)
|
||||
else:
|
||||
if not sd.is_socket_unix(fds[0], socket.SOCK_STREAM,
|
||||
listening=True, path=address):
|
||||
raise ValueError(
|
||||
"FD {} is not Unix stream socket on path {}".format(
|
||||
fds[0], address
|
||||
)
|
||||
)
|
||||
logger.info('Using systemd socket activation on path %s', address)
|
||||
sock = socket.fromfd(fds[0], socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
# Python 2.7's socket.fromfd() returns _socket.socket
|
||||
sock = socket.socket(_sock=sock)
|
||||
return sock
|
||||
|
||||
def get_socket(self):
|
||||
return (self.httpd.socket, self.httpd.socket_file)
|
||||
|
||||
def serve(self):
|
||||
if sd is not None and sd.booted():
|
||||
sd.notify("READY=1")
|
||||
return self.httpd.serve_forever()
|
||||
190
ipaserver/custodia/log.py
Normal file
190
ipaserver/custodia/log.py
Normal file
@@ -0,0 +1,190 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
import six
|
||||
|
||||
|
||||
LOGGING_FORMAT = "%(asctime)s - %(origin)-32s - %(message)s"
|
||||
LOGGING_DATEFORMAT = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
|
||||
class OriginContextFilter(logging.Filter):
|
||||
"""Context filter to include 'origin' attribute in record
|
||||
"""
|
||||
def filter(self, record):
|
||||
if not hasattr(record, 'origin'):
|
||||
record.origin = record.name.split('.')[-1]
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class CustodiaFormatter(logging.Formatter):
|
||||
def format(self, record):
|
||||
# customize record.exc_text, Formatter.format() does not modify
|
||||
# exc_text when it has been set before.
|
||||
short_exc = False
|
||||
if record.exc_info and not record.exc_text:
|
||||
if getattr(record, "exc_fullstack", True):
|
||||
record.exc_text = self.formatException(record.exc_info)
|
||||
else:
|
||||
short_exc = True
|
||||
record.exc_text = u"{0.__name__}: {1}".format(
|
||||
record.exc_info[0], record.exc_info[1]
|
||||
)
|
||||
|
||||
result = super(CustodiaFormatter, self).format(record)
|
||||
if short_exc:
|
||||
# format() adds \n between message and exc_text
|
||||
text, exc = result.rsplit(u'\n', 1)
|
||||
return u"{0} ({1})".format(text, exc)
|
||||
else:
|
||||
return result
|
||||
|
||||
|
||||
class CustodiaLoggingAdapter(logging.LoggerAdapter):
|
||||
def __init__(self, plugin, debug):
|
||||
logger = logging.getLogger(
|
||||
'{0.__class__.__module__}.{0.__class__.__name__}'.format(plugin)
|
||||
)
|
||||
logger.setLevel(logging.DEBUG if debug else logging.INFO)
|
||||
extra = {'origin': plugin.origin}
|
||||
super(CustodiaLoggingAdapter, self).__init__(logger, extra=extra)
|
||||
|
||||
def exception(self, msg, *args, **kwargs):
|
||||
"""Like standard exception() logger but only print stack in debug mode
|
||||
"""
|
||||
extra = kwargs.setdefault('extra', {})
|
||||
extra['exc_fullstack'] = self.isEnabledFor(logging.DEBUG)
|
||||
kwargs['exc_info'] = True
|
||||
self.log(logging.ERROR, msg, *args, **kwargs)
|
||||
|
||||
|
||||
def getLogger(name):
|
||||
"""Create logger with custom exception() method
|
||||
"""
|
||||
def exception(self, msg, *args, **kwargs):
|
||||
extra = kwargs.setdefault('extra', {})
|
||||
extra['exc_fullstack'] = self.isEnabledFor(logging.DEBUG)
|
||||
kwargs['exc_info'] = True
|
||||
self.log(logging.ERROR, msg, *args, **kwargs)
|
||||
|
||||
logger = logging.getLogger(name)
|
||||
logger.exception = six.create_bound_method(exception, logger)
|
||||
return logger
|
||||
|
||||
|
||||
def setup_logging(debug=False, auditfile=None, handler=None):
|
||||
root_logger = logging.getLogger()
|
||||
# default is stream handler to stderr
|
||||
if handler is None:
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
|
||||
# remove handler instance from root handler to prevent multiple
|
||||
# output handlers.
|
||||
handler_cls = type(handler)
|
||||
root_logger.handlers[:] = list(
|
||||
h for h in root_logger.handlers if not isinstance(h, handler_cls)
|
||||
)
|
||||
|
||||
# configure handler
|
||||
handler.setFormatter(CustodiaFormatter(
|
||||
fmt=LOGGING_FORMAT, datefmt=LOGGING_DATEFORMAT
|
||||
))
|
||||
handler.addFilter(OriginContextFilter())
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
# set logging level
|
||||
custodia_logger = getLogger('custodia')
|
||||
if debug:
|
||||
custodia_logger.setLevel(logging.DEBUG)
|
||||
custodia_logger.debug('Custodia debug logger enabled')
|
||||
# If the global debug is enabled, turn debug on in all 'custodia.*'
|
||||
# loggers
|
||||
logdict = logging.Logger.manager.loggerDict
|
||||
for name, obj in logdict.items():
|
||||
if not isinstance(obj, logging.Logger):
|
||||
continue
|
||||
if name.startswith('custodia.'):
|
||||
obj.setLevel(logging.DEBUG)
|
||||
else:
|
||||
custodia_logger.setLevel(logging.INFO)
|
||||
|
||||
# setup file handler for audit log
|
||||
audit_logger = logging.getLogger('custodia.audit')
|
||||
if auditfile is not None and len(audit_logger.handlers) == 0:
|
||||
audit_fmt = logging.Formatter(LOGGING_FORMAT, LOGGING_DATEFORMAT)
|
||||
audit_hdrl = logging.FileHandler(auditfile)
|
||||
audit_hdrl.setFormatter(audit_fmt)
|
||||
audit_logger.addHandler(audit_hdrl)
|
||||
|
||||
custodia_logger.debug('Custodia audit log: %s', auditfile)
|
||||
|
||||
|
||||
AUDIT_NONE = 0
|
||||
AUDIT_GET_ALLOWED = 1
|
||||
AUDIT_GET_DENIED = 2
|
||||
AUDIT_SET_ALLOWED = 3
|
||||
AUDIT_SET_DENIED = 4
|
||||
AUDIT_DEL_ALLOWED = 5
|
||||
AUDIT_DEL_DENIED = 6
|
||||
AUDIT_LAST = 7
|
||||
AUDIT_SVC_NONE = 8
|
||||
AUDIT_SVC_AUTH_PASS = 9
|
||||
AUDIT_SVC_AUTH_FAIL = 10
|
||||
AUDIT_SVC_AUTHZ_PASS = 11
|
||||
AUDIT_SVC_AUTHZ_FAIL = 12
|
||||
AUDIT_SVC_LAST = 13
|
||||
AUDIT_MESSAGES = [
|
||||
"AUDIT FAILURE",
|
||||
"ALLOWED: '%(client)s' requested key '%(key)s'", # AUDIT_GET_ALLOWED
|
||||
"DENIED: '%(client)s' requested key '%(key)s'", # AUDIT_GET_DENIED
|
||||
"ALLOWED: '%(client)s' stored key '%(key)s'", # AUDIT_SET_ALLOWED
|
||||
"DENIED: '%(client)s' stored key '%(key)s'", # AUDIT_SET_DENIED
|
||||
"ALLOWED: '%(client)s' deleted key '%(key)s'", # AUDIT_DEL_ALLOWED
|
||||
"DENIED: '%(client)s' deleted key '%(key)s'", # AUDIT_DEL_DENIED
|
||||
"AUDIT FAILURE 7",
|
||||
"AUDIT FAILURE 8",
|
||||
"PASS: '%(cli)s' authenticated as '%(name)s'", # SVC_AUTH_PASS
|
||||
"FAIL: '%(cli)s' authenticated as '%(name)s'", # SVC_AUTH_FAIL
|
||||
"PASS: '%(cli)s' authorized for '%(name)s'", # SVC_AUTHZ_PASS
|
||||
"FAIL: '%(cli)s' authorized for '%(name)s'", # SVC_AUTHZ_FAIL
|
||||
"AUDIT FAILURE 13",
|
||||
]
|
||||
|
||||
|
||||
class AuditLog:
|
||||
def __init__(self, logger):
|
||||
self.logger = logger
|
||||
|
||||
def key_access(self, origin, action, client, keyname):
|
||||
if action <= AUDIT_NONE or action >= AUDIT_LAST:
|
||||
action = AUDIT_NONE
|
||||
msg = AUDIT_MESSAGES[action]
|
||||
args = {'client': client, 'key': keyname}
|
||||
self.logger.info(msg, args, extra={'origin': origin})
|
||||
|
||||
def svc_access(self, origin, action, client, name):
|
||||
if action <= AUDIT_SVC_NONE or action >= AUDIT_SVC_LAST:
|
||||
action = AUDIT_NONE
|
||||
msg = AUDIT_MESSAGES[action]
|
||||
args = {'cli': client, 'name': name}
|
||||
self.logger.info(msg, args, extra={'origin': origin})
|
||||
|
||||
|
||||
auditlog = AuditLog(logging.getLogger('custodia.audit'))
|
||||
|
||||
|
||||
class ProvisionalWarning(FutureWarning):
|
||||
pass
|
||||
|
||||
|
||||
def warn_provisional(modulename, stacklevel=3):
|
||||
msg = ("Module '{}' is a provisional API. It may changed or get "
|
||||
"removed in future releases.")
|
||||
return warnings.warn(msg.format(modulename), ProvisionalWarning,
|
||||
stacklevel=stacklevel)
|
||||
0
ipaserver/custodia/message/__init__.py
Normal file
0
ipaserver/custodia/message/__init__.py
Normal file
68
ipaserver/custodia/message/common.py
Normal file
68
ipaserver/custodia/message/common.py
Normal file
@@ -0,0 +1,68 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
from ipaserver.custodia.log import getLogger
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class InvalidMessage(Exception):
|
||||
"""Invalid Message.
|
||||
|
||||
This exception is raised when a message cannot be parsed
|
||||
or validated.
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
logger.debug(message)
|
||||
super(InvalidMessage, self).__init__(message)
|
||||
|
||||
|
||||
class UnknownMessageType(Exception):
|
||||
"""Unknown Message Type.
|
||||
|
||||
This exception is raised when a message is of an unknown
|
||||
type.
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
logger.debug(message)
|
||||
super(UnknownMessageType, self).__init__(message)
|
||||
|
||||
|
||||
class UnallowedMessage(Exception):
|
||||
"""Unallowed Message.
|
||||
|
||||
This exception is raise when the message type is know but
|
||||
is not allowed.
|
||||
"""
|
||||
def __init__(self, message=None):
|
||||
logger.debug(message)
|
||||
super(UnallowedMessage, self).__init__(message)
|
||||
|
||||
|
||||
class MessageHandler:
|
||||
|
||||
def __init__(self, request):
|
||||
self.req = request
|
||||
self.name = None
|
||||
self.payload = None
|
||||
self.msg_type = None
|
||||
|
||||
def parse(self, msg, name):
|
||||
"""Parses the message.
|
||||
|
||||
:param req: the original request
|
||||
:param msg: a decoded json string with the incoming message
|
||||
|
||||
:raises InvalidMessage: if the message cannot be parsed or validated
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
def reply(self, output):
|
||||
"""Generates a reply.
|
||||
|
||||
:param req: the original request
|
||||
:param output: a Python object that can be converted to JSON
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
64
ipaserver/custodia/message/formats.py
Normal file
64
ipaserver/custodia/message/formats.py
Normal file
@@ -0,0 +1,64 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
from ipaserver.custodia.message.common import InvalidMessage
|
||||
from ipaserver.custodia.message.common import UnallowedMessage
|
||||
from ipaserver.custodia.message.common import UnknownMessageType
|
||||
from ipaserver.custodia.message.kem import KEMHandler
|
||||
from ipaserver.custodia.message.simple import SimpleKey
|
||||
|
||||
|
||||
default_types = ['simple', 'kem']
|
||||
|
||||
key_types = {'simple': SimpleKey,
|
||||
'kem': KEMHandler}
|
||||
|
||||
|
||||
class Validator:
|
||||
"""Validates incoming messages."""
|
||||
|
||||
def __init__(self, allowed=None):
|
||||
"""Creates a Validator object.
|
||||
|
||||
:param allowed: list of allowed message types (optional)
|
||||
"""
|
||||
self.allowed = allowed or default_types
|
||||
self.types = key_types.copy()
|
||||
|
||||
def add_types(self, types):
|
||||
self.types.update(types)
|
||||
|
||||
def parse(self, request, msg, name):
|
||||
if not isinstance(msg, dict):
|
||||
raise InvalidMessage('The message must be a dict')
|
||||
|
||||
if 'type' not in msg:
|
||||
raise InvalidMessage('The type is missing')
|
||||
|
||||
if isinstance(msg['type'], list):
|
||||
if len(msg['type']) != 1:
|
||||
raise InvalidMessage('Type is multivalued: %s' % msg['type'])
|
||||
msg_type = msg['type'][0]
|
||||
else:
|
||||
msg_type = msg['type']
|
||||
|
||||
if 'value' not in msg:
|
||||
raise InvalidMessage('The value is missing')
|
||||
|
||||
if isinstance(msg['value'], list):
|
||||
if len(msg['value']) != 1:
|
||||
raise InvalidMessage('Value is multivalued: %s' % msg['value'])
|
||||
msg_value = msg['value'][0]
|
||||
else:
|
||||
msg_value = msg['value']
|
||||
|
||||
if msg_type not in self.types:
|
||||
raise UnknownMessageType("Type '%s' is unknown" % msg_type)
|
||||
|
||||
if msg_type not in self.allowed:
|
||||
raise UnallowedMessage("Message type '%s' not allowed" % (
|
||||
msg_type,))
|
||||
|
||||
handler = self.types[msg_type](request)
|
||||
handler.parse(msg_value, name)
|
||||
return handler
|
||||
247
ipaserver/custodia/message/kem.py
Normal file
247
ipaserver/custodia/message/kem.py
Normal file
@@ -0,0 +1,247 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
from jwcrypto.common import json_decode
|
||||
from jwcrypto.common import json_encode
|
||||
from jwcrypto.jwe import JWE
|
||||
from jwcrypto.jwk import JWK
|
||||
from jwcrypto.jws import JWS
|
||||
from jwcrypto.jwt import JWT
|
||||
|
||||
from ipaserver.custodia.httpd.authorizers import SimplePathAuthz
|
||||
from ipaserver.custodia.log import getLogger
|
||||
from ipaserver.custodia.message.common import InvalidMessage
|
||||
from ipaserver.custodia.message.common import MessageHandler
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
KEY_USAGE_SIG = 0
|
||||
KEY_USAGE_ENC = 1
|
||||
KEY_USAGE_MAP = {KEY_USAGE_SIG: 'sig', KEY_USAGE_ENC: 'enc'}
|
||||
|
||||
|
||||
class UnknownPublicKey(Exception):
|
||||
def __init__(self, message=None):
|
||||
logger.debug(message)
|
||||
super(UnknownPublicKey, self).__init__(message)
|
||||
|
||||
|
||||
class KEMKeysStore(SimplePathAuthz):
|
||||
"""A KEM Keys Store.
|
||||
|
||||
This is a store that holds public keys of registered
|
||||
clients allowed to use KEM messages. It takes the form
|
||||
of an authorizer merely for the purpose of attaching
|
||||
itself to a 'request' so that later on the KEM Parser
|
||||
can fetch the appropriate key to verify/decrypt an
|
||||
incoming request and make the payload available.
|
||||
|
||||
The KEM Parser will actually perform additional
|
||||
authorization checks in this case.
|
||||
|
||||
SimplePathAuthz is extended here as we ant to attach the
|
||||
store only to requests on paths we are configured to
|
||||
manage.
|
||||
"""
|
||||
|
||||
def __init__(self, config):
|
||||
super(KEMKeysStore, self).__init__(config)
|
||||
self._server_keys = None
|
||||
self._alg = None
|
||||
self._enc = None
|
||||
|
||||
def _db_key(self, kid):
|
||||
return os.path.join('kemkeys', kid)
|
||||
|
||||
def handle(self, request):
|
||||
inpath = super(KEMKeysStore, self).handle(request)
|
||||
if inpath:
|
||||
request['KEMKeysStore'] = self
|
||||
return inpath
|
||||
|
||||
def find_key(self, kid, usage):
|
||||
dbkey = self._db_key('%s/%s' % (KEY_USAGE_MAP[usage], kid))
|
||||
pubkey = self.store.get(dbkey)
|
||||
if pubkey is None:
|
||||
raise UnknownPublicKey(kid)
|
||||
return pubkey
|
||||
|
||||
@property
|
||||
def server_keys(self):
|
||||
if self._server_keys is None:
|
||||
if 'server_keys' not in self.config:
|
||||
raise UnknownPublicKey("Server Keys not defined")
|
||||
skey = self.find_key(self.config['server_keys'], KEY_USAGE_SIG)
|
||||
ekey = self.find_key(self.config['server_keys'], KEY_USAGE_ENC)
|
||||
self._server_keys = [JWK(**(json_decode(skey))),
|
||||
JWK(**(json_decode(ekey)))]
|
||||
return self._server_keys
|
||||
|
||||
@property
|
||||
def alg(self):
|
||||
if self._alg is None:
|
||||
alg = self.config.get('signing_algorithm', None)
|
||||
if alg is None:
|
||||
ktype = self.server_keys[KEY_USAGE_SIG]['kty']
|
||||
if ktype == 'RSA':
|
||||
alg = 'RS256'
|
||||
elif ktype == 'EC':
|
||||
alg = 'ES256'
|
||||
else:
|
||||
raise ValueError('Key type unsupported for signing')
|
||||
self._alg = alg
|
||||
return self._alg
|
||||
|
||||
|
||||
def check_kem_claims(claims, name):
|
||||
if 'sub' not in claims:
|
||||
raise InvalidMessage('Missing subject in payload')
|
||||
if claims['sub'] != name:
|
||||
raise InvalidMessage('Key name %s does not match subject %s' % (
|
||||
name, claims['sub']))
|
||||
if 'exp' not in claims:
|
||||
raise InvalidMessage('Missing expiration time in payload')
|
||||
if claims['exp'] - (10 * 60) > int(time.time()):
|
||||
raise InvalidMessage('Message expiration too far in the future')
|
||||
if claims['exp'] < int(time.time()):
|
||||
raise InvalidMessage('Message Expired')
|
||||
|
||||
|
||||
class KEMHandler(MessageHandler):
|
||||
"""Handles 'kem' messages"""
|
||||
|
||||
def __init__(self, request):
|
||||
super(KEMHandler, self).__init__(request)
|
||||
self.kkstore = self.req.get('KEMKeysStore', None)
|
||||
if self.kkstore is None:
|
||||
raise Exception('KEM KeyStore not configured')
|
||||
self.client_keys = None
|
||||
self.name = None
|
||||
|
||||
def _get_key(self, header, usage):
|
||||
if 'kid' not in header:
|
||||
raise InvalidMessage("Missing key identifier")
|
||||
|
||||
key = self.kkstore.find_key(header.get('kid'), usage)
|
||||
if key is None:
|
||||
raise UnknownPublicKey('Key found [kid:%s]' % header.get('kid'))
|
||||
return json_decode(key)
|
||||
|
||||
def parse(self, msg, name):
|
||||
"""Parses the message.
|
||||
|
||||
We check that the message is properly formatted.
|
||||
|
||||
:param msg: a json-encoded value containing a JWS or JWE+JWS token
|
||||
|
||||
:raises InvalidMessage: if the message cannot be parsed or validated
|
||||
|
||||
:returns: A verified payload
|
||||
"""
|
||||
|
||||
try:
|
||||
jtok = JWT(jwt=msg)
|
||||
except Exception as e:
|
||||
raise InvalidMessage('Failed to parse message: %s' % str(e))
|
||||
|
||||
try:
|
||||
token = jtok.token
|
||||
if isinstance(token, JWE):
|
||||
token.decrypt(self.kkstore.server_keys[KEY_USAGE_ENC])
|
||||
# If an encrypted payload is received then there must be
|
||||
# a nested signed payload to verify the provenance.
|
||||
payload = token.payload.decode('utf-8')
|
||||
token = JWS()
|
||||
token.deserialize(payload)
|
||||
elif isinstance(token, JWS):
|
||||
pass
|
||||
else:
|
||||
raise TypeError("Invalid Token type: %s" % type(jtok))
|
||||
|
||||
# Retrieve client keys for later use
|
||||
self.client_keys = [
|
||||
JWK(**self._get_key(token.jose_header, KEY_USAGE_SIG)),
|
||||
JWK(**self._get_key(token.jose_header, KEY_USAGE_ENC))]
|
||||
|
||||
# verify token and get payload
|
||||
token.verify(self.client_keys[KEY_USAGE_SIG])
|
||||
claims = json_decode(token.payload)
|
||||
except Exception as e:
|
||||
logger.debug('Failed to validate message', exc_info=True)
|
||||
raise InvalidMessage('Failed to validate message: %s' % str(e))
|
||||
|
||||
check_kem_claims(claims, name)
|
||||
self.name = name
|
||||
self.payload = claims.get('value')
|
||||
self.msg_type = 'kem'
|
||||
|
||||
return {'type': self.msg_type,
|
||||
'value': {'kid': self.client_keys[KEY_USAGE_ENC].get('kid'),
|
||||
'claims': claims}}
|
||||
|
||||
def reply(self, output):
|
||||
if self.client_keys is None:
|
||||
raise UnknownPublicKey("Peer key not defined")
|
||||
|
||||
ktype = self.client_keys[KEY_USAGE_ENC]['kty']
|
||||
if ktype == 'RSA':
|
||||
enc = ('RSA-OAEP', 'A256CBC-HS512')
|
||||
else:
|
||||
raise ValueError("'%s' type not supported yet" % ktype)
|
||||
|
||||
value = make_enc_kem(self.name, output,
|
||||
self.kkstore.server_keys[KEY_USAGE_SIG],
|
||||
self.kkstore.alg,
|
||||
self.client_keys[1], enc)
|
||||
|
||||
return {'type': 'kem', 'value': value}
|
||||
|
||||
|
||||
class KEMClient:
|
||||
|
||||
def __init__(self, server_keys, client_keys):
|
||||
self.server_keys = server_keys
|
||||
self.client_keys = client_keys
|
||||
|
||||
def make_request(self, name, value=None, alg="RS256", encalg=None):
|
||||
if encalg is None:
|
||||
return make_sig_kem(name, value,
|
||||
self.client_keys[KEY_USAGE_SIG], alg)
|
||||
else:
|
||||
return make_enc_kem(name, value,
|
||||
self.client_keys[KEY_USAGE_SIG], alg,
|
||||
self.server_keys[KEY_USAGE_ENC], encalg)
|
||||
|
||||
def parse_reply(self, name, message):
|
||||
claims = decode_enc_kem(message,
|
||||
self.client_keys[KEY_USAGE_ENC],
|
||||
self.server_keys[KEY_USAGE_SIG])
|
||||
check_kem_claims(claims, name)
|
||||
return claims['value']
|
||||
|
||||
|
||||
def make_sig_kem(name, value, key, alg):
|
||||
header = {'kid': key.get('kid'), 'alg': alg}
|
||||
claims = {'sub': name, 'exp': int(time.time() + (5 * 60))}
|
||||
if value is not None:
|
||||
claims['value'] = value
|
||||
jwt = JWT(header, claims)
|
||||
jwt.make_signed_token(key)
|
||||
return jwt.serialize(compact=True)
|
||||
|
||||
|
||||
def make_enc_kem(name, value, sig_key, alg, enc_key, enc):
|
||||
plaintext = make_sig_kem(name, value, sig_key, alg)
|
||||
eprot = {'kid': enc_key.get('kid'), 'alg': enc[0], 'enc': enc[1]}
|
||||
jwe = JWE(plaintext, json_encode(eprot))
|
||||
jwe.add_recipient(enc_key)
|
||||
return jwe.serialize(compact=True)
|
||||
|
||||
|
||||
def decode_enc_kem(message, enc_key, sig_key):
|
||||
jwe = JWT(jwt=message, key=enc_key)
|
||||
jws = JWT(jwt=jwe.claims, key=sig_key)
|
||||
return json_decode(jws.claims)
|
||||
42
ipaserver/custodia/message/simple.py
Normal file
42
ipaserver/custodia/message/simple.py
Normal file
@@ -0,0 +1,42 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
from six import string_types
|
||||
|
||||
from ipaserver.custodia.message.common import InvalidMessage
|
||||
from ipaserver.custodia.message.common import MessageHandler
|
||||
|
||||
|
||||
class SimpleKey(MessageHandler):
|
||||
"""Handles 'simple' messages"""
|
||||
|
||||
def parse(self, msg, name):
|
||||
"""Parses a simple message
|
||||
|
||||
:param msg: the json-decoded value
|
||||
:param name: the requested name
|
||||
|
||||
:raises UnknownMessageType: if the type is not 'simple'
|
||||
:raises InvalidMessage: if the message cannot be parsed or validated
|
||||
"""
|
||||
|
||||
# On requests we imply 'simple' if there is no input message
|
||||
if msg is None:
|
||||
return
|
||||
|
||||
if not isinstance(msg, string_types):
|
||||
raise InvalidMessage("The 'value' attribute is not a string")
|
||||
|
||||
self.name = name
|
||||
self.payload = msg
|
||||
self.msg_type = 'simple'
|
||||
|
||||
def reply(self, output):
|
||||
if output is None:
|
||||
return None
|
||||
|
||||
if self.name.endswith('/'):
|
||||
# directory listings are pass-through with simple messages
|
||||
return output
|
||||
|
||||
return {'type': self.msg_type, 'value': output}
|
||||
479
ipaserver/custodia/plugin.py
Normal file
479
ipaserver/custodia/plugin.py
Normal file
@@ -0,0 +1,479 @@
|
||||
# Copyright (C) 2016 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import abc
|
||||
import configparser
|
||||
import grp
|
||||
import inspect
|
||||
import json
|
||||
import pwd
|
||||
import re
|
||||
|
||||
from jwcrypto.common import json_encode
|
||||
|
||||
import six
|
||||
|
||||
from .log import CustodiaLoggingAdapter, auditlog, getLogger
|
||||
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class _Required:
|
||||
__slots__ = ()
|
||||
|
||||
def __repr__(self):
|
||||
return 'REQUIRED'
|
||||
|
||||
|
||||
class INHERIT_GLOBAL: # noqa: N801
|
||||
__slots__ = ('default',)
|
||||
|
||||
def __init__(self, default):
|
||||
self.default = default
|
||||
|
||||
def __repr__(self):
|
||||
return 'INHERIT_GLOBAL({})'.format(self.default)
|
||||
|
||||
|
||||
REQUIRED = _Required()
|
||||
|
||||
|
||||
class CustodiaException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class HTTPError(CustodiaException):
|
||||
def __init__(self, code=None, message=None):
|
||||
self.code = code if code is not None else 500
|
||||
self.mesg = message
|
||||
errstring = '%d: %s' % (self.code, self.mesg)
|
||||
super(HTTPError, self).__init__(errstring)
|
||||
|
||||
|
||||
class CSStoreError(CustodiaException):
|
||||
pass
|
||||
|
||||
|
||||
class CSStoreExists(CustodiaException):
|
||||
pass
|
||||
|
||||
|
||||
class CSStoreUnsupported(CustodiaException):
|
||||
pass
|
||||
|
||||
|
||||
class CSStoreDenied(CustodiaException):
|
||||
pass
|
||||
|
||||
|
||||
class OptionHandler:
|
||||
"""Handler and parser for plugin options
|
||||
"""
|
||||
def __init__(self, parser, section):
|
||||
self.parser = parser
|
||||
self.section = section
|
||||
# handler is reserved to look up the plugin class
|
||||
self.seen = {'handler'}
|
||||
|
||||
def get(self, po):
|
||||
"""Lookup value for a PluginOption instance
|
||||
|
||||
Args:
|
||||
po: PluginOption
|
||||
|
||||
Returns: converted value
|
||||
"""
|
||||
name = po.name
|
||||
typ = po.typ
|
||||
default = po.default
|
||||
|
||||
handler = getattr(self, '_get_{}'.format(typ), None)
|
||||
if handler is None:
|
||||
raise ValueError(typ)
|
||||
self.seen.add(name)
|
||||
|
||||
if not self.parser.has_option(self.section, name):
|
||||
if default is REQUIRED:
|
||||
raise NameError(self.section, name)
|
||||
if isinstance(default, INHERIT_GLOBAL):
|
||||
return handler('global', name, default.default)
|
||||
# don't return default here, give the handler a chance to modify
|
||||
# the default, e.g. pw_uid with default='root' returns 0.
|
||||
|
||||
return handler(self.section, name, default)
|
||||
|
||||
def check_surplus(self):
|
||||
surplus = []
|
||||
for name, _value in self.parser.items(self.section):
|
||||
if (name not in self.seen and not
|
||||
self.parser.has_option(configparser.DEFAULTSECT, name)):
|
||||
surplus.append(name)
|
||||
return surplus
|
||||
|
||||
def _get_int(self, section, name, default):
|
||||
return self.parser.getint(section, name, fallback=default)
|
||||
|
||||
def _get_oct(self, section, name, default):
|
||||
value = self.parser.get(section, name, fallback=default)
|
||||
return int(value, 8)
|
||||
|
||||
def _get_hex(self, section, name, default):
|
||||
value = self.parser.get(section, name, fallback=default)
|
||||
return int(value, 16)
|
||||
|
||||
def _get_float(self, section, name, default):
|
||||
return self.parser.getfloat(section, name, fallback=default)
|
||||
|
||||
def _get_bool(self, section, name, default):
|
||||
return self.parser.getboolean(section, name, fallback=default)
|
||||
|
||||
def _get_regex(self, section, name, default):
|
||||
value = self.parser.get(section, name, fallback=default)
|
||||
if not value:
|
||||
return None
|
||||
else:
|
||||
return re.compile(value)
|
||||
|
||||
def _get_str(self, section, name, default):
|
||||
return self.parser.get(section, name, fallback=default)
|
||||
|
||||
def _split_string(self, value):
|
||||
if ',' in value:
|
||||
values = value.split(',')
|
||||
else:
|
||||
values = value.split(' ')
|
||||
return list(v.strip() for v in values if v.strip())
|
||||
|
||||
def _get_str_set(self, section, name, default):
|
||||
try:
|
||||
value = self.parser.get(section, name)
|
||||
except configparser.NoOptionError:
|
||||
return default
|
||||
if not value or not value.strip():
|
||||
return None
|
||||
else:
|
||||
return set(self._split_string(value))
|
||||
|
||||
def _get_str_list(self, section, name, default):
|
||||
try:
|
||||
value = self.parser.get(section, name)
|
||||
except configparser.NoOptionError:
|
||||
return default
|
||||
if not value or not value.strip():
|
||||
return None
|
||||
else:
|
||||
return self._split_string(value)
|
||||
|
||||
def _get_store(self, section, name, default):
|
||||
return self.parser.get(section, name, fallback=default)
|
||||
|
||||
def _get_pwd_uid(self, section, name, default):
|
||||
value = self.parser.get(section, name, fallback=default)
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
return pwd.getpwnam(value).pw_uid
|
||||
|
||||
def _get_grp_gid(self, section, name, default):
|
||||
value = self.parser.get(section, name, fallback=default)
|
||||
try:
|
||||
return int(value)
|
||||
except ValueError:
|
||||
return grp.getgrnam(value).gr_gid
|
||||
|
||||
def _get_json(self, section, name, default):
|
||||
value = self.parser.get(section, name, fallback=default)
|
||||
return json.loads(value)
|
||||
|
||||
|
||||
class PluginOption:
|
||||
"""Plugin option
|
||||
|
||||
code::
|
||||
|
||||
class MyPlugin(CustodiaPlugin):
|
||||
number = PluginOption(int, REQUIRED, 'my value')
|
||||
values = PluginOption('str_list', 'foo bar', 'a list of strings')
|
||||
|
||||
|
||||
config::
|
||||
|
||||
[myplugin]
|
||||
handler = MyPlugin
|
||||
number = 1
|
||||
values = egg spam python
|
||||
|
||||
|
||||
**Supported value types**
|
||||
|
||||
*str*
|
||||
plain string
|
||||
*str_set*
|
||||
set of comma-separated or space-separated strings
|
||||
*str_list*
|
||||
ordered list of comma-separated or space-separated strings
|
||||
*int*
|
||||
number (converted from base 10)
|
||||
*hex*
|
||||
number (converted from base 16)
|
||||
*oct*
|
||||
number (converted from base 8)
|
||||
*float*
|
||||
floating point number
|
||||
*bool*
|
||||
boolean (true: on, true, yes, 1; false: off, false, no, 0)
|
||||
*regex*
|
||||
regular expression string
|
||||
*store*
|
||||
special value for refer to a store plugin
|
||||
*pwd_uid*
|
||||
numeric user id or user name
|
||||
*grp_gid*
|
||||
numeric group id or group name
|
||||
*json*
|
||||
JSON string
|
||||
"""
|
||||
__slots__ = ('name', 'typ', 'default', 'doc')
|
||||
|
||||
def __init__(self, typ, default, doc):
|
||||
self.name = None
|
||||
if typ in {str, int, float, bool, oct, hex}:
|
||||
self.typ = typ.__name__
|
||||
else:
|
||||
self.typ = typ
|
||||
self.default = default
|
||||
self.doc = doc
|
||||
|
||||
def __repr__(self):
|
||||
if self.default is REQUIRED:
|
||||
msg = "<Required option {0.name} ({0.typ}): {0.doc}>"
|
||||
else:
|
||||
msg = ("<Option {0.name} ({0.typ}, default: '{0.default}'): "
|
||||
"{0.doc}>")
|
||||
return msg.format(self)
|
||||
|
||||
|
||||
class CustodiaPluginMeta(abc.ABCMeta):
|
||||
def __new__(cls, name, bases, namespace, **kwargs):
|
||||
ncls = super(CustodiaPluginMeta, cls).__new__(
|
||||
cls, name, bases, namespace, **kwargs)
|
||||
|
||||
sig = inspect.signature(ncls.__init__)
|
||||
args = list(sig.parameters)
|
||||
|
||||
if args[1:3] != ['config', 'section']:
|
||||
# old-style plugin class
|
||||
ncls._options = None
|
||||
return ncls
|
||||
|
||||
# new-style plugin class
|
||||
# every plugin has a debug option. In case it is not set, the debug
|
||||
# flag from [global] is inherited.
|
||||
if not hasattr(ncls, 'debug'):
|
||||
ncls.debug = PluginOption(bool, INHERIT_GLOBAL(False), '')
|
||||
# get options
|
||||
options = []
|
||||
for name, value in inspect.getmembers(ncls):
|
||||
if not isinstance(value, PluginOption):
|
||||
continue
|
||||
value.name = name
|
||||
options.append(value)
|
||||
|
||||
ncls._options = tuple(options)
|
||||
return ncls
|
||||
|
||||
|
||||
@six.add_metaclass(CustodiaPluginMeta)
|
||||
class CustodiaPlugin:
|
||||
"""Abstract base class for all Custodia plugins
|
||||
"""
|
||||
_options = ()
|
||||
|
||||
def __init__(self, config, section=None):
|
||||
origin, debug = self._configure(config, section)
|
||||
self._auditlog = auditlog
|
||||
self.section = section # plugin loader sets section for old plugins
|
||||
self.origin = origin
|
||||
self.logger = CustodiaLoggingAdapter(self, debug)
|
||||
|
||||
def audit_key_access(self, *args, **kwargs):
|
||||
self._auditlog.key_access(self.origin, *args, **kwargs)
|
||||
|
||||
def audit_svc_access(self, *args, **kwargs):
|
||||
self._auditlog.svc_access(self.origin, *args, **kwargs)
|
||||
|
||||
def _configure(self, config, section):
|
||||
if section is not None and self._options is not None:
|
||||
# new style configuration
|
||||
opt = OptionHandler(config, section)
|
||||
for option in self._options:
|
||||
value = opt.get(option)
|
||||
# special case for store
|
||||
if option.typ == 'store':
|
||||
if option.name != 'store':
|
||||
raise ValueError(option.name)
|
||||
self.store_name = value
|
||||
self.store = None
|
||||
else:
|
||||
setattr(self, option.name, value)
|
||||
|
||||
surplus = opt.check_surplus()
|
||||
if surplus:
|
||||
raise ValueError('Surplus options in {}: {}'.format(
|
||||
section, surplus))
|
||||
|
||||
origin = '%s-[%s]' % (type(self).__name__, section)
|
||||
debug = self.debug # pylint: disable=no-member
|
||||
else:
|
||||
# old style configuration
|
||||
if config is None:
|
||||
config = {}
|
||||
self.config = config
|
||||
# special case for store
|
||||
if 'store' in config:
|
||||
self.store_name = self.config.get('store')
|
||||
self.store = None
|
||||
origin = config.get('facility_name', self.__class__.__name__)
|
||||
debug = config.get('debug', 'false').lower() == 'true'
|
||||
|
||||
return origin, debug
|
||||
|
||||
def _attach_store(self, config, cfgparser, context):
|
||||
"""Attach nested store
|
||||
"""
|
||||
if getattr(self, 'store', None) is not None:
|
||||
# already attached
|
||||
return
|
||||
store_plugin = config['stores'].get(self.store_name)
|
||||
if store_plugin is None:
|
||||
raise ValueError(
|
||||
"'{}' references non-existing store '{}'".format(
|
||||
self.section, self.store_name))
|
||||
self.store = store_plugin
|
||||
store_plugin.finalize_init(config, cfgparser, context=self)
|
||||
|
||||
def finalize_init(self, config, cfgparser, context=None):
|
||||
"""Two-phase initialization
|
||||
|
||||
Args:
|
||||
config: server config dictionary
|
||||
cfgparser: configparser instance
|
||||
context: initialization context (None for global)
|
||||
"""
|
||||
if getattr(self, 'store_name', None) is not None:
|
||||
self._attach_store(config, cfgparser, context)
|
||||
|
||||
|
||||
class CSStore(CustodiaPlugin):
|
||||
"""Base class for stores
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
def get(self, key):
|
||||
pass
|
||||
|
||||
@abc.abstractmethod
|
||||
def set(self, key, value, replace=False):
|
||||
pass
|
||||
|
||||
# relax ABC for now, see https://github.com/latchset/custodia/issues/84
|
||||
|
||||
# @abc.abstractmethod
|
||||
def span(self, key):
|
||||
raise NotImplementedError
|
||||
|
||||
# @abc.abstractmethod
|
||||
def list(self, keyfilter=None):
|
||||
raise NotImplementedError
|
||||
|
||||
# @abc.abstractmethod
|
||||
def cut(self, key):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class HTTPAuthorizer(CustodiaPlugin):
|
||||
"""Base class for authorizers
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
def handle(self, request):
|
||||
pass
|
||||
|
||||
|
||||
class HTTPAuthenticator(CustodiaPlugin):
|
||||
"""Base class for authenticators
|
||||
"""
|
||||
@abc.abstractmethod
|
||||
def handle(self, request):
|
||||
pass
|
||||
|
||||
|
||||
DEFAULT_CTYPE = 'text/html; charset=utf-8'
|
||||
SUPPORTED_COMMANDS = ['GET', 'PUT', 'POST', 'DELETE']
|
||||
|
||||
|
||||
class HTTPConsumer(CustodiaPlugin):
|
||||
"""Base class for consumers
|
||||
"""
|
||||
def __init__(self, config, section=None):
|
||||
super(HTTPConsumer, self).__init__(config, section)
|
||||
self.subs = dict()
|
||||
self.root = self
|
||||
|
||||
def add_sub(self, name, sub):
|
||||
self.subs[name] = sub
|
||||
if hasattr(sub, 'root'):
|
||||
sub.root = self.root
|
||||
|
||||
def _find_handler(self, request):
|
||||
base = self
|
||||
command = request.get('command', 'GET')
|
||||
if command not in SUPPORTED_COMMANDS:
|
||||
raise HTTPError(501)
|
||||
trail = request.get('trail', None)
|
||||
if trail is not None:
|
||||
for comp in trail:
|
||||
subs = getattr(base, 'subs', {})
|
||||
if comp in subs:
|
||||
base = subs[comp]
|
||||
trail.pop(0)
|
||||
else:
|
||||
break
|
||||
|
||||
handler = getattr(base, command)
|
||||
if handler is None:
|
||||
raise HTTPError(400)
|
||||
|
||||
return handler
|
||||
|
||||
def handle(self, request):
|
||||
handler = self._find_handler(request)
|
||||
response = {'headers': dict()}
|
||||
|
||||
# Handle request
|
||||
output = handler(request, response)
|
||||
if output is None:
|
||||
output = response.get('output')
|
||||
|
||||
ct = response['headers'].get('Content-Type')
|
||||
if ct is None:
|
||||
ct = response['headers']['Content-Type'] = DEFAULT_CTYPE
|
||||
|
||||
if 'application/json' in ct and isinstance(output, (dict, list)):
|
||||
output = json_encode(output).encode('utf-8')
|
||||
response['headers']['Content-Length'] = str(len(output))
|
||||
|
||||
response['output'] = output
|
||||
|
||||
if output is not None and not hasattr(output, 'read') \
|
||||
and not isinstance(output, six.binary_type):
|
||||
msg = "Handler {} returned unsupported type {} ({}):\n{!r}"
|
||||
raise TypeError(msg.format(handler, type(output), ct, output))
|
||||
|
||||
if output is not None and 'Content-Length' not in response['headers']:
|
||||
if hasattr(output, 'read'):
|
||||
# LOG: warning file-type objects should set Content-Length
|
||||
pass
|
||||
else:
|
||||
response['headers']['Content-Length'] = str(len(output))
|
||||
|
||||
return response
|
||||
20
ipaserver/custodia/root.py
Normal file
20
ipaserver/custodia/root.py
Normal file
@@ -0,0 +1,20 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
|
||||
from ipaserver.custodia.plugin import HTTPConsumer, PluginOption
|
||||
from ipaserver.custodia.secrets import Secrets
|
||||
|
||||
|
||||
class Root(HTTPConsumer):
|
||||
store = PluginOption('store', None, None)
|
||||
|
||||
def __init__(self, config, section):
|
||||
super(Root, self).__init__(config, section)
|
||||
if self.store_name is not None:
|
||||
self.add_sub('secrets', Secrets(config, section))
|
||||
|
||||
def GET(self, request, response):
|
||||
msg = json.dumps({'message': "Quis custodiet ipsos custodes?"})
|
||||
return msg.encode('utf-8')
|
||||
399
ipaserver/custodia/secrets.py
Normal file
399
ipaserver/custodia/secrets.py
Normal file
@@ -0,0 +1,399 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import os
|
||||
from base64 import b64decode, b64encode
|
||||
|
||||
from ipaserver.custodia import log
|
||||
from ipaserver.custodia.message.common import UnallowedMessage
|
||||
from ipaserver.custodia.message.common import UnknownMessageType
|
||||
from ipaserver.custodia.message.formats import Validator
|
||||
from ipaserver.custodia.plugin import (
|
||||
CSStoreDenied, CSStoreError, CSStoreExists, CSStoreUnsupported
|
||||
)
|
||||
from ipaserver.custodia.plugin import HTTPConsumer, HTTPError, PluginOption
|
||||
|
||||
|
||||
class Secrets(HTTPConsumer):
|
||||
allowed_keytypes = PluginOption('str_set', 'simple', None)
|
||||
store = PluginOption('store', None, None)
|
||||
|
||||
def __init__(self, config, section):
|
||||
super(Secrets, self).__init__(config, section)
|
||||
self._validator = Validator(self.allowed_keytypes)
|
||||
|
||||
def _db_key(self, trail):
|
||||
if len(trail) < 2:
|
||||
self.logger.debug(
|
||||
"Forbidden action: Operation only permitted within a "
|
||||
"container")
|
||||
raise HTTPError(403)
|
||||
return os.path.join('keys', *trail)
|
||||
|
||||
def _db_container_key(self, default, trail):
|
||||
f = None
|
||||
if len(trail) > 1:
|
||||
f = self._db_key(trail)
|
||||
elif len(trail) == 1 and trail[0] != '':
|
||||
self.logger.debug(
|
||||
"Forbidden action: Wrong container path. Container names must "
|
||||
"end with '/'")
|
||||
raise HTTPError(403)
|
||||
elif default is None:
|
||||
self.logger.debug("Forbidden action: No default namespace")
|
||||
raise HTTPError(403)
|
||||
else:
|
||||
# Use the default namespace
|
||||
f = self._db_key([default, ''])
|
||||
return f
|
||||
|
||||
def _parse(self, request, query, name):
|
||||
return self._validator.parse(request, query, name)
|
||||
|
||||
def _parse_query(self, request, name):
|
||||
# default to simple
|
||||
query = request.get('query', '')
|
||||
if len(query) == 0:
|
||||
query = {'type': 'simple', 'value': ''}
|
||||
return self._parse(request, query, name)
|
||||
|
||||
def _parse_bin_body(self, request, name):
|
||||
body = request.get('body')
|
||||
if body is None:
|
||||
raise HTTPError(400)
|
||||
value = b64encode(bytes(body)).decode('utf-8')
|
||||
payload = {'type': 'simple', 'value': value}
|
||||
return self._parse(request, payload, name)
|
||||
|
||||
def _parse_body(self, request, name):
|
||||
body = request.get('body')
|
||||
if body is None:
|
||||
raise HTTPError(400)
|
||||
value = json.loads(bytes(body).decode('utf-8'))
|
||||
return self._parse(request, value, name)
|
||||
|
||||
def _parse_maybe_body(self, request, name):
|
||||
body = request.get('body')
|
||||
if body is None:
|
||||
value = {'type': 'simple', 'value': ''}
|
||||
else:
|
||||
value = json.loads(bytes(body).decode('utf-8'))
|
||||
return self._parse(request, value, name)
|
||||
|
||||
def _parent_exists(self, default, trail):
|
||||
# check that the containers exist
|
||||
basename = self._db_container_key(trail[0], trail[:-1] + [''])
|
||||
try:
|
||||
keylist = self.root.store.list(basename)
|
||||
except CSStoreError:
|
||||
raise HTTPError(500)
|
||||
|
||||
self.logger.debug('parent_exists: %s (%s, %r) -> %r',
|
||||
basename, default, trail, keylist)
|
||||
|
||||
if keylist is not None:
|
||||
return True
|
||||
|
||||
# create default namespace if it is the only missing piece
|
||||
if len(trail) == 2 and default == trail[0]:
|
||||
container = self._db_container_key(default, '')
|
||||
self.root.store.span(container)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _format_reply(self, request, response, handler, output):
|
||||
reply = handler.reply(output)
|
||||
# special case to allow *very* simple clients
|
||||
if handler.msg_type == 'simple':
|
||||
binary = False
|
||||
accept = request.get('headers', {}).get('Accept', None)
|
||||
if accept is not None:
|
||||
types = accept.split(',')
|
||||
for t in types:
|
||||
if t.strip() == 'application/json':
|
||||
binary = False
|
||||
elif t.strip() == 'application/octet-stream':
|
||||
binary = True
|
||||
if binary:
|
||||
response['headers'][
|
||||
'Content-Type'] = 'application/octet-stream'
|
||||
response['output'] = b64decode(reply['value'])
|
||||
return
|
||||
|
||||
if reply is not None:
|
||||
response['headers'][
|
||||
'Content-Type'] = 'application/json; charset=utf-8'
|
||||
response['output'] = reply
|
||||
|
||||
def GET(self, request, response):
|
||||
trail = request.get('trail', [])
|
||||
if len(trail) == 0 or trail[-1] == '':
|
||||
self._list(trail, request, response)
|
||||
else:
|
||||
self._get_key(trail, request, response)
|
||||
|
||||
def PUT(self, request, response):
|
||||
trail = request.get('trail', [])
|
||||
if len(trail) == 0 or trail[-1] == '':
|
||||
raise HTTPError(405)
|
||||
else:
|
||||
self._set_key(trail, request, response)
|
||||
|
||||
def DELETE(self, request, response):
|
||||
trail = request.get('trail', [])
|
||||
if len(trail) == 0:
|
||||
raise HTTPError(405)
|
||||
if trail[-1] == '':
|
||||
self._destroy(trail, request, response)
|
||||
else:
|
||||
self._del_key(trail, request, response)
|
||||
|
||||
def POST(self, request, response):
|
||||
trail = request.get('trail', [])
|
||||
if len(trail) > 0 and trail[-1] == '':
|
||||
self._create(trail, request, response)
|
||||
else:
|
||||
raise HTTPError(405)
|
||||
|
||||
def _list(self, trail, request, response):
|
||||
try:
|
||||
name = '/'.join(trail)
|
||||
msg = self._parse_query(request, name)
|
||||
except Exception as e:
|
||||
raise HTTPError(406, str(e))
|
||||
default = request.get('default_namespace', None)
|
||||
basename = self._db_container_key(default, trail)
|
||||
try:
|
||||
keylist = self.root.store.list(basename)
|
||||
self.logger.debug('list %s returned %r', basename, keylist)
|
||||
if keylist is None:
|
||||
raise HTTPError(404)
|
||||
response['headers'][
|
||||
'Content-Type'] = 'application/json; charset=utf-8'
|
||||
response['output'] = msg.reply(keylist)
|
||||
except CSStoreDenied:
|
||||
self.logger.exception(
|
||||
"List: Permission to perform this operation was denied")
|
||||
raise HTTPError(403)
|
||||
except CSStoreError:
|
||||
self.logger.exception('List: Internal server error')
|
||||
raise HTTPError(500)
|
||||
except CSStoreUnsupported:
|
||||
self.logger.exception('List: Unsupported operation')
|
||||
raise HTTPError(501)
|
||||
|
||||
def _create(self, trail, request, response):
|
||||
try:
|
||||
name = '/'.join(trail)
|
||||
msg = self._parse_maybe_body(request, name)
|
||||
except Exception as e:
|
||||
raise HTTPError(406, str(e))
|
||||
default = request.get('default_namespace', None)
|
||||
basename = self._db_container_key(None, trail)
|
||||
try:
|
||||
if len(trail) > 2:
|
||||
ok = self._parent_exists(default, trail[:-1])
|
||||
if not ok:
|
||||
raise HTTPError(404)
|
||||
|
||||
self.root.store.span(basename)
|
||||
except CSStoreDenied:
|
||||
self.logger.exception(
|
||||
"Create: Permission to perform this operation was denied")
|
||||
raise HTTPError(403)
|
||||
except CSStoreExists:
|
||||
self.logger.debug('Create: Key already exists')
|
||||
response['code'] = 200
|
||||
return
|
||||
except CSStoreError:
|
||||
self.logger.exception('Create: Internal server error')
|
||||
raise HTTPError(500)
|
||||
except CSStoreUnsupported:
|
||||
self.logger.exception('Create: Unsupported operation')
|
||||
raise HTTPError(501)
|
||||
|
||||
output = msg.reply(None)
|
||||
if output is not None:
|
||||
response['headers'][
|
||||
'Content-Type'] = 'application/json; charset=utf-8'
|
||||
response['output'] = output
|
||||
response['code'] = 201
|
||||
|
||||
def _destroy(self, trail, request, response):
|
||||
try:
|
||||
name = '/'.join(trail)
|
||||
msg = self._parse_maybe_body(request, name)
|
||||
except Exception as e:
|
||||
raise HTTPError(406, str(e))
|
||||
basename = self._db_container_key(None, trail)
|
||||
try:
|
||||
keylist = self.root.store.list(basename)
|
||||
if keylist is None:
|
||||
raise HTTPError(404)
|
||||
if len(keylist) != 0:
|
||||
raise HTTPError(409)
|
||||
ret = self.root.store.cut(basename.rstrip('/'))
|
||||
except CSStoreDenied:
|
||||
self.logger.exception(
|
||||
"Delete: Permission to perform this operation was denied")
|
||||
raise HTTPError(403)
|
||||
except CSStoreError:
|
||||
self.logger.exception('Delete: Internal server error')
|
||||
raise HTTPError(500)
|
||||
except CSStoreUnsupported:
|
||||
self.logger.exception('Delete: Unsupported operation')
|
||||
raise HTTPError(501)
|
||||
|
||||
if ret is False:
|
||||
raise HTTPError(404)
|
||||
|
||||
output = msg.reply(None)
|
||||
if output is None:
|
||||
response['code'] = 204
|
||||
else:
|
||||
response['headers'][
|
||||
'Content-Type'] = 'application/json; charset=utf-8'
|
||||
response['output'] = output
|
||||
response['code'] = 200
|
||||
|
||||
def _client_name(self, request):
|
||||
if 'remote_user' in request:
|
||||
return request['remote_user']
|
||||
elif 'creds' in request:
|
||||
creds = request['creds']
|
||||
return '<pid={pid:d} uid={uid:d} gid={gid:d}>'.format(**creds)
|
||||
else:
|
||||
return 'Unknown'
|
||||
|
||||
def _audit(self, ok, fail, fn, trail, request, response):
|
||||
action = fail
|
||||
client = self._client_name(request)
|
||||
key = '/'.join(trail)
|
||||
try:
|
||||
fn(trail, request, response)
|
||||
action = ok
|
||||
finally:
|
||||
self.audit_key_access(action, client, key)
|
||||
|
||||
def _get_key(self, trail, request, response):
|
||||
self._audit(log.AUDIT_GET_ALLOWED, log.AUDIT_GET_DENIED,
|
||||
self._int_get_key, trail, request, response)
|
||||
|
||||
def _int_get_key(self, trail, request, response):
|
||||
try:
|
||||
name = '/'.join(trail)
|
||||
handler = self._parse_query(request, name)
|
||||
except Exception as e:
|
||||
raise HTTPError(406, str(e))
|
||||
key = self._db_key(trail)
|
||||
try:
|
||||
output = self.root.store.get(key)
|
||||
if output is None:
|
||||
raise HTTPError(404)
|
||||
elif len(output) == 0:
|
||||
raise HTTPError(406)
|
||||
self._format_reply(request, response, handler, output)
|
||||
except CSStoreDenied:
|
||||
self.logger.exception(
|
||||
"Get: Permission to perform this operation was denied")
|
||||
raise HTTPError(403)
|
||||
except CSStoreError:
|
||||
self.logger.exception('Get: Internal server error')
|
||||
raise HTTPError(500)
|
||||
except CSStoreUnsupported:
|
||||
self.logger.exception('Get: Unsupported operation')
|
||||
raise HTTPError(501)
|
||||
|
||||
def _set_key(self, trail, request, response):
|
||||
self._audit(log.AUDIT_SET_ALLOWED, log.AUDIT_SET_DENIED,
|
||||
self._int_set_key, trail, request, response)
|
||||
|
||||
def _int_set_key(self, trail, request, response):
|
||||
try:
|
||||
name = '/'.join(trail)
|
||||
|
||||
content_type = request.get('headers', {}).get('Content-Type', '')
|
||||
content_type_value = content_type.split(';')[0].strip()
|
||||
if content_type_value == 'application/octet-stream':
|
||||
msg = self._parse_bin_body(request, name)
|
||||
elif content_type_value == 'application/json':
|
||||
msg = self._parse_body(request, name)
|
||||
else:
|
||||
raise ValueError('Invalid Content-Type')
|
||||
except UnknownMessageType as e:
|
||||
raise HTTPError(406, str(e))
|
||||
except UnallowedMessage as e:
|
||||
raise HTTPError(406, str(e))
|
||||
except Exception as e:
|
||||
raise HTTPError(400, str(e))
|
||||
|
||||
# must _db_key first as access control is done here for now
|
||||
# otherwise users would e able to probe containers in namespaces
|
||||
# they do not have access to.
|
||||
key = self._db_key(trail)
|
||||
|
||||
try:
|
||||
default = request.get('default_namespace', None)
|
||||
ok = self._parent_exists(default, trail)
|
||||
if not ok:
|
||||
raise HTTPError(404)
|
||||
|
||||
ok = self.root.store.set(key, msg.payload)
|
||||
except CSStoreDenied:
|
||||
self.logger.exception(
|
||||
"Set: Permission to perform this operation was denied")
|
||||
raise HTTPError(403)
|
||||
except CSStoreExists:
|
||||
self.logger.exception('Set: Key already exist')
|
||||
raise HTTPError(409)
|
||||
except CSStoreError:
|
||||
self.logger.exception('Set: Internal Server Error')
|
||||
raise HTTPError(500)
|
||||
except CSStoreUnsupported:
|
||||
self.logger.exception('Set: Unsupported operation')
|
||||
raise HTTPError(501)
|
||||
|
||||
output = msg.reply(None)
|
||||
if output is not None:
|
||||
response['headers'][
|
||||
'Content-Type'] = 'application/json; charset=utf-8'
|
||||
response['output'] = output
|
||||
response['code'] = 201
|
||||
|
||||
def _del_key(self, trail, request, response):
|
||||
self._audit(log.AUDIT_DEL_ALLOWED, log.AUDIT_DEL_DENIED,
|
||||
self._int_del_key, trail, request, response)
|
||||
|
||||
def _int_del_key(self, trail, request, response):
|
||||
try:
|
||||
name = '/'.join(trail)
|
||||
msg = self._parse_maybe_body(request, name)
|
||||
except Exception as e:
|
||||
raise HTTPError(406, str(e))
|
||||
key = self._db_key(trail)
|
||||
try:
|
||||
ret = self.root.store.cut(key)
|
||||
except CSStoreDenied:
|
||||
self.logger.exception(
|
||||
"Delete: Permission to perform this operation was denied")
|
||||
raise HTTPError(403)
|
||||
except CSStoreError:
|
||||
self.logger.exception('Delete: Internal Server Error')
|
||||
raise HTTPError(500)
|
||||
except CSStoreUnsupported:
|
||||
self.logger.exception('Delete: Unsupported operation')
|
||||
raise HTTPError(501)
|
||||
|
||||
if ret is False:
|
||||
raise HTTPError(404)
|
||||
|
||||
output = msg.reply(None)
|
||||
if output is None:
|
||||
response['code'] = 204
|
||||
else:
|
||||
response['headers'][
|
||||
'Content-Type'] = 'application/json; charset=utf-8'
|
||||
response['output'] = output
|
||||
response['code'] = 200
|
||||
142
ipaserver/custodia/server/__init__.py
Normal file
142
ipaserver/custodia/server/__init__.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import importlib
|
||||
import os
|
||||
|
||||
import pkg_resources
|
||||
|
||||
import six
|
||||
|
||||
from ipaserver.custodia import log
|
||||
from ipaserver.custodia.httpd.server import HTTPServer
|
||||
|
||||
from .args import default_argparser
|
||||
from .args import parse_args as _parse_args
|
||||
from .config import parse_config as _parse_config
|
||||
|
||||
logger = log.getLogger('custodia')
|
||||
|
||||
__all__ = ['default_argparser', 'main']
|
||||
|
||||
|
||||
def attach_store(typename, plugins, stores):
|
||||
for name, c in six.iteritems(plugins):
|
||||
if getattr(c, 'store_name', None) is None:
|
||||
continue
|
||||
try:
|
||||
c.store = stores[c.store_name]
|
||||
except KeyError:
|
||||
raise ValueError('[%s%s] references unexisting store '
|
||||
'"%s"' % (typename, name, c.store_name))
|
||||
|
||||
|
||||
def _load_plugin_class(menu, name):
|
||||
"""Load Custodia plugin
|
||||
|
||||
Entry points are preferred over dotted import path.
|
||||
"""
|
||||
group = 'custodia.{}'.format(menu)
|
||||
eps = list(pkg_resources.iter_entry_points(group, name))
|
||||
if len(eps) > 1:
|
||||
raise ValueError(
|
||||
"Multiple entry points for {} {}: {}".format(menu, name, eps))
|
||||
elif len(eps) == 1:
|
||||
# backwards compatibility with old setuptools
|
||||
ep = eps[0]
|
||||
if hasattr(ep, 'resolve'):
|
||||
return ep.resolve()
|
||||
else:
|
||||
return ep.load(require=False)
|
||||
elif '.' in name:
|
||||
# fall back to old style dotted name
|
||||
module, classname = name.rsplit('.', 1)
|
||||
m = importlib.import_module(module)
|
||||
return getattr(m, classname)
|
||||
else:
|
||||
raise ValueError("{}: {} not found".format(menu, name))
|
||||
|
||||
|
||||
def _create_plugin(cfgparser, section, menu):
|
||||
if not cfgparser.has_option(section, 'handler'):
|
||||
raise ValueError('Invalid section, missing "handler"')
|
||||
|
||||
handler_name = cfgparser.get(section, 'handler')
|
||||
hconf = {'facility_name': section}
|
||||
try:
|
||||
handler = _load_plugin_class(menu, handler_name)
|
||||
classname = handler.__name__
|
||||
hconf['facility_name'] = '%s-[%s]' % (classname, section)
|
||||
except Exception as e:
|
||||
raise ValueError('Invalid format for "handler" option '
|
||||
'[%r]: %s' % (e, handler_name))
|
||||
|
||||
if handler._options is not None:
|
||||
# new-style plugin with parser and section
|
||||
plugin = handler(cfgparser, section)
|
||||
else:
|
||||
# old-style plugin with config dict
|
||||
hconf.update(cfgparser.items(section))
|
||||
hconf.pop('handler')
|
||||
plugin = handler(hconf)
|
||||
plugin.section = section
|
||||
return plugin
|
||||
|
||||
|
||||
def _load_plugins(config, cfgparser):
|
||||
"""Load and initialize plugins
|
||||
"""
|
||||
# set umask before any plugin gets a chance to create a file
|
||||
os.umask(config['umask'])
|
||||
|
||||
for s in cfgparser.sections():
|
||||
if s in {'ENV', 'global'}:
|
||||
# ENV section is only used for interpolation
|
||||
continue
|
||||
|
||||
if s.startswith('/'):
|
||||
menu = 'consumers'
|
||||
path_chain = s.split('/')
|
||||
if path_chain[-1] == '':
|
||||
path_chain = path_chain[:-1]
|
||||
name = tuple(path_chain)
|
||||
else:
|
||||
if s.startswith('auth:'):
|
||||
menu = 'authenticators'
|
||||
name = s[5:]
|
||||
elif s.startswith('authz:'):
|
||||
menu = 'authorizers'
|
||||
name = s[6:]
|
||||
elif s.startswith('store:'):
|
||||
menu = 'stores'
|
||||
name = s[6:]
|
||||
else:
|
||||
raise ValueError('Invalid section name [%s].\n' % s)
|
||||
|
||||
try:
|
||||
config[menu][name] = _create_plugin(cfgparser, s, menu)
|
||||
except Exception as e:
|
||||
logger.debug("Plugin '%s' failed to load.", name, exc_info=True)
|
||||
raise RuntimeError(menu, name, e)
|
||||
|
||||
# 2nd initialization stage
|
||||
for menu in ['authenticators', 'authorizers', 'consumers', 'stores']:
|
||||
plugins = config[menu]
|
||||
for name in sorted(plugins):
|
||||
plugin = plugins[name]
|
||||
plugin.finalize_init(config, cfgparser, context=None)
|
||||
|
||||
|
||||
def main(argparser=None):
|
||||
args = _parse_args(argparser=argparser)
|
||||
# parse arguments and populate config with basic settings
|
||||
cfgparser, config = _parse_config(args)
|
||||
# initialize logging
|
||||
log.setup_logging(config['debug'], config['auditlog'])
|
||||
logger.info('Custodia instance %s', args.instance or '<main>')
|
||||
logger.debug('Config file(s) %s loaded', config['configfiles'])
|
||||
# load plugins after logging
|
||||
_load_plugins(config, cfgparser)
|
||||
# create and run server
|
||||
httpd = HTTPServer(config['server_url'], config)
|
||||
httpd.serve()
|
||||
7
ipaserver/custodia/server/__main__.py
Normal file
7
ipaserver/custodia/server/__main__.py
Normal file
@@ -0,0 +1,7 @@
|
||||
# Copyright (C) 2015 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
from ipaserver.custodia.server import main
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
80
ipaserver/custodia/server/args.py
Normal file
80
ipaserver/custodia/server/args.py
Normal file
@@ -0,0 +1,80 @@
|
||||
# Copyright (C) 2015-2017 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import argparse
|
||||
import os
|
||||
|
||||
|
||||
class AbsFileType(argparse.FileType):
|
||||
"""argparse file type with absolute path
|
||||
"""
|
||||
def __call__(self, string):
|
||||
if string != '-':
|
||||
string = os.path.abspath(string)
|
||||
return super(AbsFileType, self).__call__(string)
|
||||
|
||||
|
||||
class ConfigfileAction(argparse.Action):
|
||||
"""Default action handler for configfile
|
||||
"""
|
||||
default_path = '/etc/custodia/custodia.conf'
|
||||
default_instance = '/etc/custodia/{instance}.conf'
|
||||
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
if values is None:
|
||||
if namespace.instance is not None:
|
||||
values = self.default_instance.format(
|
||||
instance=namespace.instance
|
||||
)
|
||||
else:
|
||||
values = self.default_path
|
||||
values = self.type(values)
|
||||
setattr(namespace, self.dest, values)
|
||||
|
||||
|
||||
def instance_name(string):
|
||||
"""Check for valid instance name
|
||||
"""
|
||||
invalid = ':/@'
|
||||
if set(string).intersection(invalid):
|
||||
msg = 'Invalid instance name {}'.format(string)
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
return string
|
||||
|
||||
|
||||
default_argparser = argparse.ArgumentParser(
|
||||
prog='custodia',
|
||||
description='Custodia server'
|
||||
)
|
||||
default_argparser.add_argument(
|
||||
'--debug',
|
||||
action='store_true',
|
||||
help='Debug mode'
|
||||
)
|
||||
default_argparser.add_argument(
|
||||
'--instance',
|
||||
type=instance_name,
|
||||
help='Instance name',
|
||||
default=None
|
||||
)
|
||||
default_argparser.add_argument(
|
||||
'configfile',
|
||||
nargs='?',
|
||||
action=ConfigfileAction,
|
||||
type=AbsFileType('r'),
|
||||
help=('Path to custodia server config (default: '
|
||||
'/etc/custodia/{instance}/custodia.conf)'),
|
||||
)
|
||||
|
||||
|
||||
def parse_args(args=None, argparser=None):
|
||||
if argparser is None:
|
||||
argparser = default_argparser
|
||||
|
||||
# namespace with default values
|
||||
namespace = argparse.Namespace(
|
||||
debug=False,
|
||||
instance=None,
|
||||
)
|
||||
|
||||
return argparser.parse_args(args, namespace)
|
||||
160
ipaserver/custodia/server/config.py
Normal file
160
ipaserver/custodia/server/config.py
Normal file
@@ -0,0 +1,160 @@
|
||||
# Copyright (C) 2015-2017 Custodia Project Contributors - see LICENSE file
|
||||
from __future__ import absolute_import
|
||||
|
||||
import configparser
|
||||
import glob
|
||||
import os
|
||||
import socket
|
||||
from urllib.parse import quote as url_escape
|
||||
|
||||
|
||||
class CustodiaConfig:
|
||||
CONFIG_SPECIALS = ['authenticators', 'authorizers', 'consumers', 'stores']
|
||||
|
||||
DEFAULT_PATHS = [
|
||||
('libdir', '/var/lib/custodia/{instance}'),
|
||||
('logdir', '/var/log/custodia/{instance}'),
|
||||
('rundir', '/var/run/custodia/{instance}'),
|
||||
('socketdir', '/var/run/custodia'),
|
||||
]
|
||||
|
||||
def __init__(self, args):
|
||||
self.args = args
|
||||
self.config = {}
|
||||
self.defaults = None
|
||||
self.parser = None
|
||||
|
||||
def get_defaults(self):
|
||||
configpath = self.args.configfile.name
|
||||
instance = self.args.instance
|
||||
defaults = {
|
||||
# Do not use getfqdn(). Internaly it calls gethostbyaddr which
|
||||
# might perform a DNS query.
|
||||
'hostname': socket.gethostname(),
|
||||
'configdir': os.path.dirname(configpath),
|
||||
'confdpattern': os.path.join(configpath + '.d', '*.conf'),
|
||||
'instance': instance if instance else '',
|
||||
}
|
||||
for name, path in self.DEFAULT_PATHS:
|
||||
defaults[name] = os.path.abspath(path.format(**defaults))
|
||||
return defaults
|
||||
|
||||
def create_parser(self):
|
||||
parser = configparser.ConfigParser(
|
||||
interpolation=configparser.ExtendedInterpolation(),
|
||||
defaults=self.defaults
|
||||
)
|
||||
parser.optionxform = str
|
||||
|
||||
# add env
|
||||
parser.add_section(u'ENV')
|
||||
for k, v in os.environ.items():
|
||||
if set(v).intersection('\r\n\x00'):
|
||||
continue
|
||||
parser.set(u'ENV', k, v.replace(u'$', u'$$'))
|
||||
|
||||
# default globals
|
||||
parser.add_section(u'global')
|
||||
parser.set(u'global', u'auditlog', u'${logdir}/audit.log')
|
||||
parser.set(u'global', u'debug', u'false')
|
||||
parser.set(u'global', u'umask', u'027')
|
||||
parser.set(u'global', u'makedirs', u'false')
|
||||
|
||||
return parser
|
||||
|
||||
def read_configs(self):
|
||||
with self.args.configfile as f:
|
||||
self.parser.read_file(f)
|
||||
|
||||
configfiles = [self.args.configfile.name]
|
||||
|
||||
pattern = self.parser.get(u'DEFAULT', u'confdpattern')
|
||||
if pattern:
|
||||
confdfiles = glob.glob(pattern)
|
||||
confdfiles.sort()
|
||||
for confdfile in confdfiles:
|
||||
with open(confdfile) as f:
|
||||
self.parser.read_file(f)
|
||||
configfiles.append(confdfile)
|
||||
|
||||
return configfiles
|
||||
|
||||
def makedirs(self):
|
||||
for name, _path in self.DEFAULT_PATHS:
|
||||
path = self.parser.get(u'DEFAULT', name)
|
||||
parent = os.path.dirname(path)
|
||||
# create parents according to umask
|
||||
if not os.path.isdir(parent):
|
||||
os.makedirs(parent)
|
||||
# create final directory with restricted permissions
|
||||
if not os.path.isdir(path):
|
||||
os.mkdir(path, 0o700)
|
||||
|
||||
def populate_config(self):
|
||||
config = self.config
|
||||
|
||||
for s in self.CONFIG_SPECIALS:
|
||||
config[s] = {}
|
||||
|
||||
for opt, val in self.parser.items(u'global'):
|
||||
if opt in self.CONFIG_SPECIALS:
|
||||
raise ValueError('"%s" is an invalid '
|
||||
'[global] option' % opt)
|
||||
config[opt] = val
|
||||
|
||||
config['tls_verify_client'] = self.parser.getboolean(
|
||||
'global', 'tls_verify_client', fallback=False)
|
||||
config['debug'] = self.parser.getboolean(
|
||||
'global', 'debug', fallback=False)
|
||||
config['makedirs'] = self.parser.getboolean(
|
||||
'global', 'makedirs', fallback=False)
|
||||
if self.args.debug:
|
||||
config['debug'] = self.args.debug
|
||||
|
||||
config['auditlog'] = os.path.abspath(config.get('auditlog'))
|
||||
config['umask'] = int(config.get('umask', '027'), 8)
|
||||
|
||||
url = config.get('server_url')
|
||||
sock = config.get('server_socket')
|
||||
|
||||
if url and sock:
|
||||
raise ValueError(
|
||||
"'server_url' and 'server_socket' are mutually exclusive.")
|
||||
|
||||
if not url and not sock:
|
||||
# no option but, use default socket path
|
||||
socketdir = self.parser.get(u'DEFAULT', u'socketdir')
|
||||
name = self.args.instance if self.args.instance else 'custodia'
|
||||
sock = os.path.join(socketdir, name + '.sock')
|
||||
|
||||
if sock:
|
||||
server_socket = os.path.abspath(sock)
|
||||
config['server_url'] = 'http+unix://{}/'.format(
|
||||
url_escape(server_socket, ''))
|
||||
|
||||
def __call__(self):
|
||||
self.defaults = self.get_defaults()
|
||||
self.parser = self.create_parser()
|
||||
self.config['configfiles'] = self.read_configs()
|
||||
self.populate_config()
|
||||
if self.config[u'makedirs']:
|
||||
self.makedirs()
|
||||
return self.parser, self.config
|
||||
|
||||
|
||||
def parse_config(args):
|
||||
ccfg = CustodiaConfig(args)
|
||||
return ccfg()
|
||||
|
||||
|
||||
def test(arglist):
|
||||
from pprint import pprint
|
||||
from .args import parse_args
|
||||
args = parse_args(arglist)
|
||||
parser, config = parse_config(args)
|
||||
pprint(parser.items("DEFAULT"))
|
||||
pprint(config)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
test(['--instance=demo', './tests/empty.conf'])
|
||||
Reference in New Issue
Block a user