From 48017a9d4c66344860bbf007bccc435cc02f9fd5 Mon Sep 17 00:00:00 2001 From: Non Sequitur Date: Wed, 17 Oct 2018 11:42:09 -0400 Subject: [PATCH 001/357] Added get_by_attributes to the certificates service, for fetching certs based on arbitrary attributes. Also associated test and extra tests for other service methods --- lemur/certificates/service.py | 18 ++++++- lemur/sources/service.py | 7 ++- lemur/tests/test_certificates.py | 83 ++++++++++++++++++++++++++++++++ 3 files changed, 106 insertions(+), 2 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 0bd50694..c8a5365b 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -54,7 +54,7 @@ def get_by_name(name): def get_by_serial(serial): """ - Retrieves certificate by it's Serial. + Retrieves certificate(s) by serial number. :param serial: :return: """ @@ -64,6 +64,22 @@ def get_by_serial(serial): return Certificate.query.filter(Certificate.serial == serial).all() +def get_by_attributes(conditions): + """ + Retrieves certificate(s) by conditions given in a hash of given key=>value pairs. + :param serial: + :return: + """ + # Ensure that each of the given conditions corresponds to actual columns + # if not, silently remove it + for attr in conditions.keys(): + if attr not in Certificate.__table__.columns: + conditions.pop(attr) + + query = database.session_query(Certificate) + return database.find_all(query, Certificate, conditions).all() + + def delete(cert_id): """ Delete's a certificate. diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 227f1bce..5002041c 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -116,7 +116,12 @@ def sync_certificates(source, user): for certificate in certificates: exists = False - if certificate.get('name'): + + if certificate.get('search', None): + conditions = certificate.pop('search') + exists = certificate_service.get_by_attributes(conditions) + + if not exists and certificate.get('name'): result = certificate_service.get_by_name(certificate['name']) if result: exists = [result] diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index 1a4d644b..0f46e4a5 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -41,6 +41,89 @@ def test_get_or_increase_name(session, certificate): assert get_or_increase_name('certificate1', int(serial, 16)) == 'certificate1-{}-1'.format(serial) +def test_get_all_certs(session, certificate): + from lemur.certificates.service import get_all_certs + assert len(get_all_certs()) > 1 + + +def test_get_by_name(session, certificate): + from lemur.certificates.service import get_by_name + + found = get_by_name(certificate.name) + + assert found + + +def test_get_by_serial(session, certificate): + from lemur.certificates.service import get_by_serial + + found = get_by_serial(certificate.serial) + + assert found + + +def test_delete_cert(session): + from lemur.certificates.service import delete, get + from lemur.tests.factories import CertificateFactory + + delete_this = CertificateFactory(name='DELETEME') + session.commit() + + cert_exists = get(delete_this.id) + + # it needs to exist first + assert cert_exists + + delete(delete_this.id) + cert_exists = get(delete_this.id) + + # then not exist after delete + assert not cert_exists + + +def test_get_by_attributes(session, certificate): + from lemur.certificates.service import get_by_attributes + + # Should get one cert + certificate1 = get_by_attributes({ + 'name': 'SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231' + }) + + # Should get one cert using multiple attrs + certificate2 = get_by_attributes({ + 'name': 'test-cert-11111111-1', + 'cn': 'san.example.org' + }) + + # Should get multiple certs + multiple = get_by_attributes({ + 'cn': 'LemurTrust Unittests Class 1 CA 2018', + 'issuer': 'LemurTrustUnittestsRootCA2018' + }) + + assert len(certificate1) == 1 + assert len(certificate2) == 1 + assert len(multiple) > 1 + + +def test_find_duplicates(session): + from lemur.certificates.service import find_duplicates + + cert = { + 'body': SAN_CERT_STR, + 'chain': INTERMEDIATE_CERT_STR + } + + dups1 = find_duplicates(cert) + + cert['chain'] = '' + + dups2 = find_duplicates(cert) + + assert len(dups1) > 0 + assert len(dups2) > 0 + + def test_get_certificate_primitives(certificate): from lemur.certificates.service import get_certificate_primitives From 060c78fd91241af1a638b570b769db133a9e7f04 Mon Sep 17 00:00:00 2001 From: Wesley Hartford Date: Mon, 10 Dec 2018 15:33:04 -0800 Subject: [PATCH 002/357] Fix Kubernetes Destination Plugin The Kubernetes plugin was broken. There were two major issues: * The server certificate was entered in a string input making it impossible (as far as I know) to enter a valid PEM certificate. * The base64 encoding calls were passing strings where bytes were expected. The fix to the first issue depends on #2218 and a change in the options structure. I've also included some improved input validation and logging. --- lemur/plugins/lemur_kubernetes/plugin.py | 92 +++++++++++++++--------- 1 file changed, 58 insertions(+), 34 deletions(-) diff --git a/lemur/plugins/lemur_kubernetes/plugin.py b/lemur/plugins/lemur_kubernetes/plugin.py index ee466596..a640a677 100644 --- a/lemur/plugins/lemur_kubernetes/plugin.py +++ b/lemur/plugins/lemur_kubernetes/plugin.py @@ -11,12 +11,14 @@ .. moduleauthor:: Mikhail Khodorovskiy """ import base64 -import os -import urllib -import requests import itertools +import os -from lemur.certificates.models import Certificate +import requests +from flask import current_app + +from lemur.common.defaults import common_name +from lemur.common.utils import parse_certificate from lemur.plugins.bases import DestinationPlugin DEFAULT_API_VERSION = 'v1' @@ -26,21 +28,32 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): # _resolve_uri(k8s_base_uri, namespace, kind, name, api_ver=DEFAULT_API_VERSION) url = _resolve_uri(k8s_base_uri, namespace, kind) + current_app.logger.debug("K8S POST request URL: %s", url) create_resp = k8s_api.post(url, json=data) + current_app.logger.debug("K8S POST response: %s", create_resp) if 200 <= create_resp.status_code <= 299: return None - elif create_resp.json()['reason'] != 'AlreadyExists': - return create_resp.content + else: + json = create_resp.json() + if 'reason' in json: + if json['reason'] != 'AlreadyExists': + return create_resp.content + else: + return create_resp.content - update_resp = k8s_api.put(_resolve_uri(k8s_base_uri, namespace, kind, name), json=data) + url = _resolve_uri(k8s_base_uri, namespace, kind, name) + current_app.logger.debug("K8S PUT request URL: %s", url) + + update_resp = k8s_api.put(url, json=data) + current_app.logger.debug("K8S PUT response: %s", update_resp) if not 200 <= update_resp.status_code <= 299: return update_resp.content - return + return None def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION,): @@ -61,6 +74,12 @@ def _resolve_uri(k8s_base_uri, namespace, kind, name=None, api_ver=DEFAULT_API_V ])) +# Performs Base64 encoding of string to string using the base64.b64encode() function +# which encodes bytes to bytes. +def base64encode(string): + return base64.b64encode(string.encode()).decode() + + class KubernetesDestinationPlugin(DestinationPlugin): title = 'Kubernetes' slug = 'kubernetes-destination' @@ -74,28 +93,28 @@ class KubernetesDestinationPlugin(DestinationPlugin): 'name': 'kubernetesURL', 'type': 'str', 'required': True, - 'validation': '@(https?|http)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS', + 'validation': 'https?://[a-zA-Z0-9.-]+(?::[0-9]+)?', 'helpMessage': 'Must be a valid Kubernetes server URL!', }, { 'name': 'kubernetesAuthToken', 'type': 'str', 'required': True, - 'validation': '/^$|\s+/', + 'validation': '[0-9a-zA-Z-_.]+', 'helpMessage': 'Must be a valid Kubernetes server Token!', }, { 'name': 'kubernetesServerCertificate', - 'type': 'str', + 'type': 'textarea', 'required': True, - 'validation': '/^$|\s+/', + 'validation': '-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----', 'helpMessage': 'Must be a valid Kubernetes server Certificate!', }, { 'name': 'kubernetesNamespace', 'type': 'str', 'required': True, - 'validation': '/^$|\s+/', + 'validation': '[a-z0-9]([-a-z0-9]*[a-z0-9])?', 'helpMessage': 'Must be a valid Kubernetes Namespace!', }, @@ -106,33 +125,38 @@ class KubernetesDestinationPlugin(DestinationPlugin): def upload(self, name, body, private_key, cert_chain, options, **kwargs): - k8_bearer = self.get_option('kubernetesAuthToken', options) - k8_cert = self.get_option('kubernetesServerCertificate', options) - k8_namespace = self.get_option('kubernetesNamespace', options) - k8_base_uri = self.get_option('kubernetesURL', options) + try: + k8_bearer = self.get_option('kubernetesAuthToken', options) + k8_cert = self.get_option('kubernetesServerCertificate', options) + k8_namespace = self.get_option('kubernetesNamespace', options) + k8_base_uri = self.get_option('kubernetesURL', options) - k8s_api = K8sSession(k8_bearer, k8_cert) + k8s_api = K8sSession(k8_bearer, k8_cert) - cert = Certificate(body=body) + cn = common_name(parse_certificate(body)) - # in the future once runtime properties can be passed-in - use passed-in secret name - secret_name = 'certs-' + urllib.quote_plus(cert.name) + # in the future once runtime properties can be passed-in - use passed-in secret name + secret_name = 'certs-' + cn - err = ensure_resource(k8s_api, k8s_base_uri=k8_base_uri, namespace=k8_namespace, kind="secret", name=secret_name, data={ - 'apiVersion': 'v1', - 'kind': 'Secret', - 'metadata': { - 'name': secret_name, - }, - 'data': { - 'combined.pem': base64.b64encode(body + private_key), - 'ca.crt': base64.b64encode(cert_chain), - 'service.key': base64.b64encode(private_key), - 'service.crt': base64.b64encode(body), - } - }) + err = ensure_resource(k8s_api, k8s_base_uri=k8_base_uri, namespace=k8_namespace, kind="secret", name=secret_name, data={ + 'apiVersion': 'v1', + 'kind': 'Secret', + 'metadata': { + 'name': secret_name, + }, + 'data': { + 'combined.pem': base64encode('%s\n%s' % (body, private_key)), + 'ca.crt': base64encode(cert_chain), + 'service.key': base64encode(private_key), + 'service.crt': base64encode(body), + } + }) + except Exception as e: + current_app.logger.exception("Exception in upload") + raise e if err is not None: + current_app.logger.debug("Error deploying resource: %s", err) raise Exception("Error uploading secret: " + err) From bc621c14680b42ca3d98ebc083b973f3210621b9 Mon Sep 17 00:00:00 2001 From: Wesley Hartford Date: Wed, 12 Dec 2018 13:25:36 -0800 Subject: [PATCH 003/357] Improve the Kubernetes Destination plugin The plugin now supports loading details from local files rather than requiring them to be entered through the UI. This is especially relaent when Lemur is deployed on Kubernetes as the certificate, token, and current namespace will be injected into the pod. The location these details are injected are the defaults if no configuration details are supplied. The plugin now supports deploying the secret in three different formats: * Full - matches the formate used by the plugin prior to these changes. * TLS - creates a secret of type kubernetes.io/tls and includes the certificate chain and private key, this format is used by many kubernetes features. * Certificate - creates a secret containing only the certificate chain, suitable for use as trust authority where private keys should _NOT_ be deployed. The deployed secret can now have a name set through the configuration options; the setting allows the insertion of the placeholder '{common_name}' which will be replaced by the certificate's common name value. Debug level logging has been added. --- lemur/plugins/lemur_kubernetes/plugin.py | 187 ++++++++++++++++++----- 1 file changed, 149 insertions(+), 38 deletions(-) diff --git a/lemur/plugins/lemur_kubernetes/plugin.py b/lemur/plugins/lemur_kubernetes/plugin.py index a640a677..25ce8757 100644 --- a/lemur/plugins/lemur_kubernetes/plugin.py +++ b/lemur/plugins/lemur_kubernetes/plugin.py @@ -25,7 +25,6 @@ DEFAULT_API_VERSION = 'v1' def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): - # _resolve_uri(k8s_base_uri, namespace, kind, name, api_ver=DEFAULT_API_VERSION) url = _resolve_uri(k8s_base_uri, namespace, kind) current_app.logger.debug("K8S POST request URL: %s", url) @@ -56,11 +55,12 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): return None -def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION,): +def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION): api_group = 'api' if '/' in api_ver: api_group = 'apis' - return '{base}/{api_group}/{api_ver}/namespaces'.format(base=k8s_base_uri, api_group=api_group, api_ver=api_ver) + ('/' + namespace if namespace else '') + return '{base}/{api_group}/{api_ver}/namespaces'.format(base=k8s_base_uri, api_group=api_group, api_ver=api_ver) + ( + '/' + namespace if namespace else '') def _resolve_uri(k8s_base_uri, namespace, kind, name=None, api_ver=DEFAULT_API_VERSION): @@ -80,6 +80,35 @@ def base64encode(string): return base64.b64encode(string.encode()).decode() +def build_secret(secret_format, secret_name, body, private_key, cert_chain): + secret = { + 'apiVersion': 'v1', + 'kind': 'Secret', + 'type': 'Opaque', + 'metadata': { + 'name': secret_name, + } + } + if secret_format == 'Full': + secret['data'] = { + 'combined.pem': base64encode('%s\n%s' % (body, private_key)), + 'ca.crt': base64encode(cert_chain), + 'service.key': base64encode(private_key), + 'service.crt': base64encode(body), + } + if secret_format == 'TLS': + secret['type'] = 'kubernetes.io/tls' + secret['data'] = { + 'tls.crt': base64encode(cert_chain), + 'tls.key': base64encode(private_key) + } + if secret_format == 'Certificate': + secret['data'] = { + 'tls.crt': base64encode(cert_chain), + } + return secret + + class KubernetesDestinationPlugin(DestinationPlugin): title = 'Kubernetes' slug = 'kubernetes-destination' @@ -89,35 +118,81 @@ class KubernetesDestinationPlugin(DestinationPlugin): author_url = 'https://github.com/mik373/lemur' options = [ + { + 'name': 'secretNameFormat', + 'type': 'str', + 'required': False, + # Validation is difficult. This regex is used by kubectl to validate secret names: + # [a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)* + # Allowing the insertion of "{common_name}" (or any other such placeholder} + # at any point in the string proved very challenging and had a tendency to + # cause my browser to hang. The specified expression will allow any valid string + # but will also accept many invalid strings. + 'validation': '(?:[a-z0-9.-]|\\{common_name\\})+', + 'helpMessage': 'Must be a valid secret name, possibly including "{common_name}"', + 'default': '{common_name}' + }, { 'name': 'kubernetesURL', 'type': 'str', - 'required': True, + 'required': False, 'validation': 'https?://[a-zA-Z0-9.-]+(?::[0-9]+)?', 'helpMessage': 'Must be a valid Kubernetes server URL!', + 'default': 'https://kubernetes.default' }, { 'name': 'kubernetesAuthToken', 'type': 'str', - 'required': True, + 'required': False, 'validation': '[0-9a-zA-Z-_.]+', 'helpMessage': 'Must be a valid Kubernetes server Token!', }, + { + 'name': 'kubernetesAuthTokenFile', + 'type': 'str', + 'required': False, + 'validation': '(/[^/]+)+', + 'helpMessage': 'Must be a valid file path!', + 'default': '/var/run/secrets/kubernetes.io/serviceaccount/token' + }, { 'name': 'kubernetesServerCertificate', 'type': 'textarea', - 'required': True, + 'required': False, 'validation': '-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----', 'helpMessage': 'Must be a valid Kubernetes server Certificate!', }, + { + 'name': 'kubernetesServerCertificateFile', + 'type': 'str', + 'required': False, + 'validation': '(/[^/]+)+', + 'helpMessage': 'Must be a valid file path!', + 'default': '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' + }, { 'name': 'kubernetesNamespace', 'type': 'str', - 'required': True, + 'required': False, 'validation': '[a-z0-9]([-a-z0-9]*[a-z0-9])?', 'helpMessage': 'Must be a valid Kubernetes Namespace!', }, - + { + 'name': 'kubernetesNamespaceFile', + 'type': 'str', + 'required': False, + 'validation': '(/[^/]+)+', + 'helpMessage': 'Must be a valid file path!', + 'default': '/var/run/secrets/kubernetes.io/serviceaccount/namespace' + }, + { + 'name': 'secretFormat', + 'type': 'select', + 'required': True, + 'available': ['Full', 'TLS', 'Certificate'], + 'helpMessage': 'The type of Secret to create.', + 'default': 'Full' + } ] def __init__(self, *args, **kwargs): @@ -126,31 +201,31 @@ class KubernetesDestinationPlugin(DestinationPlugin): def upload(self, name, body, private_key, cert_chain, options, **kwargs): try: - k8_bearer = self.get_option('kubernetesAuthToken', options) - k8_cert = self.get_option('kubernetesServerCertificate', options) - k8_namespace = self.get_option('kubernetesNamespace', options) k8_base_uri = self.get_option('kubernetesURL', options) + secret_format = self.get_option('secretFormat', options) - k8s_api = K8sSession(k8_bearer, k8_cert) + k8s_api = K8sSession( + self.k8s_bearer(options), + self.k8s_cert(options) + ) cn = common_name(parse_certificate(body)) - # in the future once runtime properties can be passed-in - use passed-in secret name - secret_name = 'certs-' + cn + secret_name_format = self.get_option('secretNameFormat', options) + + secret_name = secret_name_format.format(common_name=cn) + + secret = build_secret(secret_format, secret_name, body, private_key, cert_chain) + + err = ensure_resource( + k8s_api, + k8s_base_uri=k8_base_uri, + namespace=self.k8s_namespace(options), + kind="secret", + name=secret_name, + data=secret + ) - err = ensure_resource(k8s_api, k8s_base_uri=k8_base_uri, namespace=k8_namespace, kind="secret", name=secret_name, data={ - 'apiVersion': 'v1', - 'kind': 'Secret', - 'metadata': { - 'name': secret_name, - }, - 'data': { - 'combined.pem': base64encode('%s\n%s' % (body, private_key)), - 'ca.crt': base64encode(cert_chain), - 'service.key': base64encode(private_key), - 'service.crt': base64encode(body), - } - }) except Exception as e: current_app.logger.exception("Exception in upload") raise e @@ -159,27 +234,63 @@ class KubernetesDestinationPlugin(DestinationPlugin): current_app.logger.debug("Error deploying resource: %s", err) raise Exception("Error uploading secret: " + err) + def k8s_bearer(self, options): + bearer = self.get_option('kubernetesAuthToken', options) + if not bearer: + bearer_file = self.get_option('kubernetesAuthTokenFile', options) + with open(bearer_file, "r") as file: + bearer = file.readline() + if bearer: + current_app.logger.debug("Using token read from %s", bearer_file) + else: + raise Exception("Unable to locate token in options or from %s", bearer_file) + else: + current_app.logger.debug("Using token from options") + return bearer + + def k8s_cert(self, options): + cert_file = self.get_option('kubernetesServerCertificateFile', options) + cert = self.get_option('kubernetesServerCertificate', options) + if cert: + cert_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'k8.cert') + with open(cert_file, "w") as text_file: + text_file.write(cert) + current_app.logger.debug("Using certificate from options") + else: + current_app.logger.debug("Using certificate from %s", cert_file) + return cert_file + + def k8s_namespace(self, options): + namespace = self.get_option('kubernetesNamespace', options) + if not namespace: + namespace_file = self.get_option('kubernetesNamespaceFile', options) + with open(namespace_file, "r") as file: + namespace = file.readline() + if namespace: + current_app.logger.debug("Using namespace %s from %s", namespace, namespace_file) + else: + raise Exception("Unable to locate namespace in options or from %s", namespace_file) + else: + current_app.logger.debug("Using namespace %s from options", namespace) + return namespace + class K8sSession(requests.Session): - def __init__(self, bearer, cert): + def __init__(self, bearer, cert_file): super(K8sSession, self).__init__() self.headers.update({ 'Authorization': 'Bearer %s' % bearer }) - k8_ca = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'k8.cert') + self.verify = cert_file - with open(k8_ca, "w") as text_file: - text_file.write(cert) - - self.verify = k8_ca - - def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=30, allow_redirects=True, proxies=None, - hooks=None, stream=None, verify=None, cert=None, json=None): + def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, + timeout=30, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, + json=None): """ This method overrides the default timeout to be 10s. """ - return super(K8sSession, self).request(method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, - verify, cert, json) + return super(K8sSession, self).request(method, url, params, data, headers, cookies, files, auth, timeout, + allow_redirects, proxies, hooks, stream, verify, cert, json) From e7313da03e5d234b8829c7981d654a6e04dfb6b3 Mon Sep 17 00:00:00 2001 From: Wesley Hartford Date: Tue, 18 Dec 2018 22:24:48 -0500 Subject: [PATCH 004/357] Minor changes for code review suggestions. --- lemur/plugins/lemur_kubernetes/plugin.py | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/lemur/plugins/lemur_kubernetes/plugin.py b/lemur/plugins/lemur_kubernetes/plugin.py index a640a677..4601592a 100644 --- a/lemur/plugins/lemur_kubernetes/plugin.py +++ b/lemur/plugins/lemur_kubernetes/plugin.py @@ -35,14 +35,8 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): if 200 <= create_resp.status_code <= 299: return None - - else: - json = create_resp.json() - if 'reason' in json: - if json['reason'] != 'AlreadyExists': - return create_resp.content - else: - return create_resp.content + elif create_resp.json().get('reason', '') != 'AlreadyExists': + return create_resp.content url = _resolve_uri(k8s_base_uri, namespace, kind, name) current_app.logger.debug("K8S PUT request URL: %s", url) @@ -53,7 +47,7 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): if not 200 <= update_resp.status_code <= 299: return update_resp.content - return None + return def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION,): @@ -152,8 +146,8 @@ class KubernetesDestinationPlugin(DestinationPlugin): } }) except Exception as e: - current_app.logger.exception("Exception in upload") - raise e + current_app.logger.exception("Exception in upload: {}".format(e), exc_info=True) + raise if err is not None: current_app.logger.debug("Error deploying resource: %s", err) From fbf48316b1c39bdbbbb6a6a673be51770462edd2 Mon Sep 17 00:00:00 2001 From: Wesley Hartford Date: Tue, 18 Dec 2018 22:43:32 -0500 Subject: [PATCH 005/357] Minor changes for code review suggestions. --- lemur/plugins/lemur_kubernetes/plugin.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/lemur/plugins/lemur_kubernetes/plugin.py b/lemur/plugins/lemur_kubernetes/plugin.py index b111ac2b..30b864eb 100644 --- a/lemur/plugins/lemur_kubernetes/plugin.py +++ b/lemur/plugins/lemur_kubernetes/plugin.py @@ -197,20 +197,14 @@ class KubernetesDestinationPlugin(DestinationPlugin): try: k8_base_uri = self.get_option('kubernetesURL', options) secret_format = self.get_option('secretFormat', options) - k8s_api = K8sSession( self.k8s_bearer(options), self.k8s_cert(options) ) - cn = common_name(parse_certificate(body)) - secret_name_format = self.get_option('secretNameFormat', options) - secret_name = secret_name_format.format(common_name=cn) - secret = build_secret(secret_format, secret_name, body, private_key, cert_chain) - err = ensure_resource( k8s_api, k8s_base_uri=k8_base_uri, @@ -225,7 +219,7 @@ class KubernetesDestinationPlugin(DestinationPlugin): raise if err is not None: - current_app.logger.debug("Error deploying resource: %s", err) + current_app.logger.error("Error deploying resource: %s", err) raise Exception("Error uploading secret: " + err) def k8s_bearer(self, options): From f02178c154922ea67c1b5b6cba64dafca7da6c39 Mon Sep 17 00:00:00 2001 From: sirferl Date: Thu, 20 Dec 2018 11:54:47 +0100 Subject: [PATCH 006/357] added ADCS issuer and source plugin --- lemur/plugins/lemur_adcs/__init__.py | 6 ++ lemur/plugins/lemur_adcs/plugin.py | 120 +++++++++++++++++++++++++++ requirements-dev.txt | 1 + requirements-docs.txt | 18 ++-- requirements-tests.txt | 4 +- requirements.in | 3 +- requirements.txt | 7 +- setup.py | 4 +- 8 files changed, 147 insertions(+), 16 deletions(-) create mode 100644 lemur/plugins/lemur_adcs/__init__.py create mode 100644 lemur/plugins/lemur_adcs/plugin.py diff --git a/lemur/plugins/lemur_adcs/__init__.py b/lemur/plugins/lemur_adcs/__init__.py new file mode 100644 index 00000000..6b61e936 --- /dev/null +++ b/lemur/plugins/lemur_adcs/__init__.py @@ -0,0 +1,6 @@ +"""Set the version information.""" +try: + VERSION = __import__('pkg_resources') \ + .get_distribution(__name__).version +except Exception as e: + VERSION = 'unknown' diff --git a/lemur/plugins/lemur_adcs/plugin.py b/lemur/plugins/lemur_adcs/plugin.py new file mode 100644 index 00000000..48a3e85b --- /dev/null +++ b/lemur/plugins/lemur_adcs/plugin.py @@ -0,0 +1,120 @@ +from lemur.plugins.bases import IssuerPlugin, SourcePlugin +import requests +import datetime +import lemur_adcs as ADCS +from certsrv import Certsrv +import ssl +from OpenSSL import crypto +from flask import current_app + +class ADCSIssuerPlugin(IssuerPlugin): + title = 'ADCS' + slug = 'adcs-issuer' + description = 'Enables the creation of certificates by ADCS (Active Direcory Certificate Services)' + version = ADCS.VERSION + + author = 'sirferl' + author_url = 'https://github.com/sirferl/lemur' + + def __init__(self, *args, **kwargs): + """Initialize the issuer with the appropriate details.""" + self.session = requests.Session() + super(ADCSIssuerPlugin, self).__init__(*args, **kwargs) + + @staticmethod + def create_authority(options): + """Create an authority. + Creates an authority, this authority is then used by Lemur to + allow a user to specify which Certificate Authority they want + to sign their certificate. + + :param options: + :return: + """ + role = {'username': '', 'password': '', 'name': 'adcs'} + return constants.ADCS_ROOT, constants.ADCS_ISSUING, [role] + + def create_certificate(self, csr, issuer_options): + adcs_server = current_app.config.get('ADCS_SERVER') + adcs_user = current_app.config.get('ADCS_USER') + adcs_pwd = current_app.config.get('ADCS_PWD') + adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD') + ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method = adcs_auth_method) + current_app.logger.info("Requesting CSR: {0}".format(csr)) + current_app.logger.info("Issuer options: {0}".format(issuer_options)) + cert, req_id = ca_server.get_cert(csr, ADCS_TEMPLATE, encoding='b64').decode('utf-8').replace('\r\n', '\n') + chain = ca_server.get_ca_cert(encoding='b64').decode('utf-8').replace('\r\n', '\n') + return cert, chain, req_id + + def revoke_certificate(self, certificate, comments): + # requests.put('a third party') + raise NotImplementedError('Not implemented\n', self,certificate, comments) + + def get_ordered_certificate(self, order_id): + # requests.get('already existing certificate') + raise NotImplementedError('Not implemented\n',self, order_id) + + def canceled_ordered_certificate(self, pending_cert, **kwargs): + # requests.put('cancel an order that has yet to be issued') + raise NotImplementedError('Not implemented\n',self, pending_cert, **kwargs) + +class ADCSSourcePlugin(SourcePlugin): + title = 'ADCS' + slug = 'adcs-source' + description = 'Enables the collecion of certificates' + version = ADCS.VERSION + + author = 'sirferl' + author_url = 'https://github.com/sirferl/lemur' + options = [ + { + 'name': 'dummy', + 'type': 'str', + 'required': False, + 'validation': '/^[0-9]{12,12}$/', + 'helpMessage': 'Just to prevent error' + } + + ] + + def get_certificates(self,options, **kwargs): + adcs_server = current_app.config.get('ADCS_SERVER') + adcs_user = current_app.config.get('ADCS_USER') + adcs_pwd = current_app.config.get('ADCS_PWD') + adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD') + adcs_start = current_app.config.get('ADCS_START') + adcs_stop = current_app.config.get('ADCS_STOP') + ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method = adcs_auth_method) + out_certlist = [] + for id in range(adcs_start,adcs_stop): + try: + cert = ca_server.get_existing_cert(id, encoding='b64').decode('utf-8').replace('\r\n', '\n') + except Exception as err: + if '{0}'.format(err).find("CERTSRV_E_PROPERTY_EMPTY"): + #this error indicates end of certificate list(?), so we stop + break + else: + # We do nothing in case there is no certificate returned with the current id for other reasons + current_app.logger.info("Error with id {0}: {1}".format(id, err)) + else: + #we have a certificate + pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, cert) + #loop through extensions to see if we find "TLS Web Server Authentication" + for e_id in range(0,pubkey.get_extension_count()-1): + try: + extension = '{0}'.format(pubkey.get_extension(e_id)) + except: + extensionn = '' + if extension.find("TLS Web Server Authentication") != -1: + out_certlist.append ( { + 'name': format(pubkey.get_subject().CN), + 'body' : cert}) + break + + return out_certlist + + + def get_endpoints(self, options, **kwargs): + # There are no endpoints in the ADCS + raise NotImplementedError('Not implemented\n',self, options, **kwargs) + diff --git a/requirements-dev.txt b/requirements-dev.txt index 7b427b20..d8c24e4d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -15,6 +15,7 @@ flake8==3.5.0 identify==1.1.7 # via pre-commit idna==2.8 # via requests importlib-metadata==0.7 # via pre-commit +importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 diff --git a/requirements-docs.txt b/requirements-docs.txt index 3f036915..80f38e5f 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -14,11 +14,11 @@ arrow==0.12.1 asn1crypto==0.24.0 asyncpool==1.0 babel==2.6.0 # via sphinx -bcrypt==3.1.4 +bcrypt==3.1.5 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.60 -botocore==1.12.60 +boto3==1.9.67 +botocore==1.12.67 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 @@ -35,13 +35,13 @@ flask-cors==3.0.7 flask-mail==0.9.1 flask-migrate==2.3.1 flask-principal==0.4.0 -flask-restful==0.3.6 +flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.3.2 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 -idna==2.7 +idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 @@ -66,7 +66,7 @@ pyasn1-modules==0.2.2 pyasn1==0.4.4 pycparser==2.19 pygments==2.3.1 # via sphinx -pyjwt==1.7.0 +pyjwt==1.7.1 pynacl==1.3.0 pyopenssl==18.0.0 pyparsing==2.3.0 # via packaging @@ -78,17 +78,17 @@ pyyaml==3.13 raven[flask]==6.9.0 redis==2.10.6 requests-toolbelt==0.8.0 -requests[security]==2.20.1 +requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 -six==1.11.0 +six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.2 sphinx==1.8.2 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.9 -sqlalchemy==1.2.14 +sqlalchemy==1.2.15 tabulate==0.8.2 urllib3==1.24.1 vine==1.1.4 diff --git a/requirements-tests.txt b/requirements-tests.txt index 59c626f7..47b83988 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.2.1 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.67 # via moto +boto3==1.9.69 # via moto boto==2.49.0 # via moto -botocore==1.12.67 # via boto3, moto, s3transfer +botocore==1.12.69 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests diff --git a/requirements.in b/requirements.in index 9824650b..0aea4591 100644 --- a/requirements.in +++ b/requirements.in @@ -8,6 +8,7 @@ boto3 botocore celery[redis] certifi +certsrv CloudFlare cryptography dnspython3 @@ -42,4 +43,4 @@ retrying six SQLAlchemy-Utils tabulate -xmltodict \ No newline at end of file +xmltodict diff --git a/requirements.txt b/requirements.txt index 7ee9a167..e88bcb90 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,10 +15,11 @@ asyncpool==1.0 bcrypt==3.1.5 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.67 -botocore==1.12.67 +boto3==1.9.69 +botocore==1.12.69 celery[redis]==4.2.1 certifi==2018.11.29 +certsrv==2.1.0 cffi==1.11.5 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask @@ -70,7 +71,7 @@ python-editor==1.0.3 # via alembic python-ldap==3.1.0 pytz==2018.7 # via acme, celery, flask-restful, pyrfc3339 pyyaml==3.13 # via cloudflare -raven[flask]==6.9.0 +raven[flask]==6.10.0 redis==2.10.6 requests-toolbelt==0.8.0 # via acme requests[security]==2.21.0 diff --git a/setup.py b/setup.py index 1511b013..882edb02 100644 --- a/setup.py +++ b/setup.py @@ -154,7 +154,9 @@ setup( 'digicert_cis_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertCISIssuerPlugin', 'digicert_cis_source = lemur.plugins.lemur_digicert.plugin:DigiCertCISSourcePlugin', 'csr_export = lemur.plugins.lemur_csr.plugin:CSRExportPlugin', - 'sftp_destination = lemur.plugins.lemur_sftp.plugin:SFTPDestinationPlugin' + 'sftp_destination = lemur.plugins.lemur_sftp.plugin:SFTPDestinationPlugin', + 'adcs_issuer = lemur.plugins.lemur_adcs.plugin:ADCSIssuerPlugin', + 'adcs_source = lemur.plugins.lemur_adcs.plugin:ADCSSourcePlugin' ], }, classifiers=[ From 0f2e30cdae07f154c5b5809dc6e5ccee1c5e2158 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Fri, 21 Dec 2018 12:06:52 +0200 Subject: [PATCH 007/357] Deduplicate rows before notification associations unique constraint migration --- lemur/migrations/versions/449c3d5c7299_.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lemur/migrations/versions/449c3d5c7299_.py b/lemur/migrations/versions/449c3d5c7299_.py index 1dcb7ab5..0bc30db1 100644 --- a/lemur/migrations/versions/449c3d5c7299_.py +++ b/lemur/migrations/versions/449c3d5c7299_.py @@ -21,6 +21,14 @@ COLUMNS = ["notification_id", "certificate_id"] def upgrade(): + connection = op.get_bind() + # Delete duplicate entries + connection.execute("""\ + DELETE FROM certificate_notification_associations WHERE ctid NOT IN ( + -- Select the first tuple ID for each (notification_id, certificate_id) combination and keep that + SELECT min(ctid) FROM certificate_notification_associations GROUP BY notification_id, certificate_id + ) + """) op.create_unique_constraint(CONSTRAINT_NAME, TABLE, COLUMNS) From 72f6fdb17d3ad0bba4796fdc668db739246aa36b Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Wed, 19 Dec 2018 17:59:48 +0200 Subject: [PATCH 008/357] Properly handle Unicode in issuer name sanitization If the point of sanitization is to get rid of all non-alphanumeric characters then Unicode characters should probably be forbidden too. We can re-use the same sanitization function as used for cert 'name' --- lemur/common/defaults.py | 38 +++++++++++++++++------------------- lemur/tests/conftest.py | 7 ++++++- lemur/tests/test_defaults.py | 32 ++++++++++++++++++++++++++++++ 3 files changed, 56 insertions(+), 21 deletions(-) diff --git a/lemur/common/defaults.py b/lemur/common/defaults.py index e9bbc6e6..72e863c1 100644 --- a/lemur/common/defaults.py +++ b/lemur/common/defaults.py @@ -7,18 +7,21 @@ from lemur.extensions import sentry from lemur.constants import SAN_NAMING_TEMPLATE, DEFAULT_NAMING_TEMPLATE -def text_to_slug(value): - """Normalize a string to a "slug" value, stripping character accents and removing non-alphanum characters.""" +def text_to_slug(value, joiner='-'): + """ + Normalize a string to a "slug" value, stripping character accents and removing non-alphanum characters. + A series of non-alphanumeric characters is replaced with the joiner character. + """ # Strip all character accents: decompose Unicode characters and then drop combining chars. value = ''.join(c for c in unicodedata.normalize('NFKD', value) if not unicodedata.combining(c)) - # Replace all remaining non-alphanumeric characters with '-'. Multiple characters get collapsed into a single dash. - # Except, keep 'xn--' used in IDNA domain names as is. - value = re.sub(r'[^A-Za-z0-9.]+(? Date: Fri, 21 Dec 2018 12:33:47 -0800 Subject: [PATCH 009/357] Update plugin.py --- lemur/plugins/lemur_kubernetes/plugin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lemur/plugins/lemur_kubernetes/plugin.py b/lemur/plugins/lemur_kubernetes/plugin.py index 8de155c3..30b864eb 100644 --- a/lemur/plugins/lemur_kubernetes/plugin.py +++ b/lemur/plugins/lemur_kubernetes/plugin.py @@ -73,6 +73,7 @@ def _resolve_uri(k8s_base_uri, namespace, kind, name=None, api_ver=DEFAULT_API_V def base64encode(string): return base64.b64encode(string.encode()).decode() + def build_secret(secret_format, secret_name, body, private_key, cert_chain): secret = { 'apiVersion': 'v1', @@ -101,6 +102,7 @@ def build_secret(secret_format, secret_name, body, private_key, cert_chain): } return secret + class KubernetesDestinationPlugin(DestinationPlugin): title = 'Kubernetes' slug = 'kubernetes-destination' From 4ec8490c558de6de64098d778d91cc1f79035caf Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 00:04:13 +0100 Subject: [PATCH 010/357] Create Dockerfile --- docker/Dockerfile | 66 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 66 insertions(+) create mode 100644 docker/Dockerfile diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 00000000..60aa473e --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,66 @@ +FROM alpine:3.8 as builder + +ARG VERSION + +ENV VERSION master +#ENV VERSION 0.7.0 + +RUN apk --update add python3 + +RUN apk --update add --virtual build-dependencies \ + git \ + tar \ + curl \ + python3-dev \ + npm \ + bash \ + musl-dev \ + gcc \ + autoconf \ + automake \ + make \ + nasm \ + zlib-dev \ + postgresql-dev \ + libressl-dev \ + libffi-dev \ + cyrus-sasl-dev \ + openldap-dev + +#RUN git clone https://github.com/Netflix/lemur + +RUN mkdir -p /opt/lemur && curl -sSL https://github.com/Netflix/lemur/archive/$VERSION.tar.gz | tar xz -C /opt/lemur --strip-components=1 + +RUN ls -lha /opt/lemur/ + +WORKDIR /opt/lemur + +RUN pip3 install --upgrade pip + +RUN npm install --unsafe-perm +RUN pip3 install setuptools +RUN pip3 install -e . +RUN node_modules/.bin/gulp build +RUN node_modules/.bin/gulp package --urlContextPath=$(urlContextPath) + +RUN apk del build-dependencies + +##################### + +RUN apk add --update libldap postgresql-client bash nginx supervisor + +RUN mkdir -p /run/nginx/ + +WORKDIR / + +COPY entrypoint / + +RUN chmod +x /entrypoint + +COPY lemur.py /root/.lemur/lemur.conf.py +COPY supervisor.conf / +COPY default.conf /etc/nginx/conf.d/ + +ENTRYPOINT ["/entrypoint"] + +CMD ["/usr/bin/supervisord","-c","supervisor.conf"] From fc6caecc0bbf93bf8b7614111ecb757f2a6eca51 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:37:09 +0100 Subject: [PATCH 011/357] Update Dockerfile --- docker/Dockerfile | 35 ++++++++++++++++++++++++++++++----- 1 file changed, 30 insertions(+), 5 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 60aa473e..54b517b8 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,10 +1,17 @@ FROM alpine:3.8 as builder -ARG VERSION - ENV VERSION master #ENV VERSION 0.7.0 +ENV uid 1337 +ENV gid 1337 +ENV user lemur +ENV group lemur + +#RUN adduser -D -S -u ${uid} ${user} -G ${group} + +RUN addgroup -S ${group} -g ${gid} && adduser -D -S ${user} -G ${group} -u ${uid} + RUN apk --update add python3 RUN apk --update add --virtual build-dependencies \ @@ -35,19 +42,29 @@ RUN ls -lha /opt/lemur/ WORKDIR /opt/lemur +RUN npm install --unsafe-perm + RUN pip3 install --upgrade pip -RUN npm install --unsafe-perm RUN pip3 install setuptools RUN pip3 install -e . + +#RUN node_modules/.bin/gulp build --urlContextPath=/arnold/foo + RUN node_modules/.bin/gulp build + +#RUN node_modules/.bin/gulp build -h + RUN node_modules/.bin/gulp package --urlContextPath=$(urlContextPath) RUN apk del build-dependencies + ##################### -RUN apk add --update libldap postgresql-client bash nginx supervisor +RUN apk add --update libldap postgresql-client bash nginx supervisor curl + +#RUN python3 /opt/lemur/lemur/manage.py reset_password -u lemur RUN mkdir -p /run/nginx/ @@ -57,10 +74,18 @@ COPY entrypoint / RUN chmod +x /entrypoint -COPY lemur.py /root/.lemur/lemur.conf.py +#RUN mkdir -p /conf + +COPY lemur.py /conf/lemur.conf.py + COPY supervisor.conf / COPY default.conf /etc/nginx/conf.d/ +HEALTHCHECK --interval=12s --timeout=12s --start-period=30s \ + CMD curl --fail http://localhost:80/api/1/healthcheck |grep -q ok || exit 1 + ENTRYPOINT ["/entrypoint"] +#CMD ["python3","/lemur/lemur/manage.py","start","-b","0.0.0.0:8000"] + CMD ["/usr/bin/supervisord","-c","supervisor.conf"] From 7eb6617a2801bfccbe290898e64d16b7aba345be Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:37:30 +0100 Subject: [PATCH 012/357] Create supervisor.conf --- docker/supervisor.conf | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 docker/supervisor.conf diff --git a/docker/supervisor.conf b/docker/supervisor.conf new file mode 100644 index 00000000..e04e4002 --- /dev/null +++ b/docker/supervisor.conf @@ -0,0 +1,31 @@ +[supervisord] +nodaemon=true +user=root +logfile=/dev/stdout +logfile_maxbytes=0 +pidfile = /tmp/supervisord.pid + + +[program:lemur] +command=python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py start -b 0.0.0.0:8000 +user=root +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes = 0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 + +[program:nginx] +command=nginx -g "daemon off;" +user=root +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes = 0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 + +[program:dcron] +command=crond -f +user=root +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes = 0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 From c25c703723a2eca125230570aa6ce406aa508d85 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:37:46 +0100 Subject: [PATCH 013/357] Create entrypoint --- docker/entrypoint | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100644 docker/entrypoint diff --git a/docker/entrypoint b/docker/entrypoint new file mode 100644 index 00000000..386cdc08 --- /dev/null +++ b/docker/entrypoint @@ -0,0 +1,32 @@ +#!/bin/sh + +#echo $POSTGRES_USER +#echo $POSTGRES_PASSWORD +#echo $POSTGRES_HOST +#echo $POSTGRES_PORT +#echo $POSTGRES_DB + +export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB" + +#echo $SQLALCHEMY_DATABASE_URI + +PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;;' +PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' + +# echo "from django.contrib.auth.models import User; User.objects.create_superuser('ronald', 'koko', 'koko')" | python /opt/lemur/lemur/manage.py shell + + +echo "running init" +python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py init -p password +echo "done" + + +cron="${custom_cron:-"*/5 * * * *"}" + +echo "${cron} /opt/check/exec.sh" >> /etc/crontabs/root + +#0 22 * * * lemur export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; python3 /opt/lemur/lemur/manage.py notify expirations +#*/15 * * * * lemur export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; python3 /opt/lemur/lemur/manage.py source sync -s all +#0 22 * * * lemur export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; python3 /opt/lemur/lemur/manage.py certificate check_revoked + +exec "$@" From 6d5782b44c832bfe5858cc5caab1c3b7d2315ae3 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:38:05 +0100 Subject: [PATCH 014/357] Create lemur.conf.py --- docker/lemur.conf.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 docker/lemur.conf.py diff --git a/docker/lemur.conf.py b/docker/lemur.conf.py new file mode 100644 index 00000000..753b39af --- /dev/null +++ b/docker/lemur.conf.py @@ -0,0 +1,31 @@ +import os +_basedir = os.path.abspath(os.path.dirname(__file__)) + +CORS = os.environ.get("CORS") == "True" +debug = os.environ.get("DEBUG") == "True" + +SECRET_KEY = repr(os.environ.get('SECRET_KEY','Hrs8kCDNPuT9vtshsSWzlrYW+d+PrAXvg/HwbRE6M3vzSJTTrA/ZEw==')) + +LEMUR_TOKEN_SECRET = repr(os.environ.get('LEMUR_TOKEN_SECRET','YVKT6nNHnWRWk28Lra1OPxMvHTqg1ZXvAcO7bkVNSbrEuDQPABM0VQ==')) +LEMUR_ENCRYPTION_KEYS = repr(os.environ.get('LEMUR_ENCRYPTION_KEYS','Ls-qg9j3EMFHyGB_NL0GcQLI6622n9pSyGM_Pu0GdCo=')) + +LEMUR_WHITELISTED_DOMAINS = [] + +LEMUR_EMAIL = '' +LEMUR_SECURITY_TEAM_EMAIL = [] + + +LEMUR_DEFAULT_COUNTRY = repr(os.environ.get('LEMUR_DEFAULT_COUNTRY','')) +LEMUR_DEFAULT_STATE = repr(os.environ.get('LEMUR_DEFAULT_STATE','')) +LEMUR_DEFAULT_LOCATION = repr(os.environ.get('LEMUR_DEFAULT_LOCATION','')) +LEMUR_DEFAULT_ORGANIZATION = repr(os.environ.get('LEMUR_DEFAULT_ORGANIZATION','')) +LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = repr(os.environ.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT','')) + +ACTIVE_PROVIDERS = [] + +METRIC_PROVIDERS = [] + +LOG_LEVEL = str(os.environ.get('LOG_LEVEL','DEBUG')) +LOG_FILE = str(os.environ.get('LOG_FILE','lemur.log')) + +SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI','postgresql://lemur:lemur@localhost:5432/lemur') From 5567bb2eaafc5678bb14d88508e371cad1efd188 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:43:04 +0100 Subject: [PATCH 015/357] Update Dockerfile --- docker/Dockerfile | 80 +++++++++++++++-------------------------------- 1 file changed, 25 insertions(+), 55 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 54b517b8..d665da0e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,20 +1,22 @@ -FROM alpine:3.8 as builder +FROM alpine:3.8 +ARG VERSION ENV VERSION master -#ENV VERSION 0.7.0 ENV uid 1337 ENV gid 1337 ENV user lemur ENV group lemur -#RUN adduser -D -S -u ${uid} ${user} -G ${group} +COPY entrypoint / +COPY lemur.conf.py /conf/lemur.conf.py +COPY supervisor.conf / +COPY default.conf /etc/nginx/conf.d/ -RUN addgroup -S ${group} -g ${gid} && adduser -D -S ${user} -G ${group} -u ${uid} - -RUN apk --update add python3 - -RUN apk --update add --virtual build-dependencies \ +RUN addgroup -S ${group} -g ${gid} && \ + adduser -D -S ${user} -G ${group} -u ${uid} && \ + apk --update add python3 libldap postgresql-client bash nginx supervisor curl && \ + apk --update add --virtual build-dependencies \ git \ tar \ curl \ @@ -32,60 +34,28 @@ RUN apk --update add --virtual build-dependencies \ libressl-dev \ libffi-dev \ cyrus-sasl-dev \ - openldap-dev - -#RUN git clone https://github.com/Netflix/lemur - -RUN mkdir -p /opt/lemur && curl -sSL https://github.com/Netflix/lemur/archive/$VERSION.tar.gz | tar xz -C /opt/lemur --strip-components=1 - -RUN ls -lha /opt/lemur/ - + openldap-dev && \ + mkdir -p /opt/lemur && curl -sSL https://github.com/Netflix/lemur/archive/$VERSION.tar.gz | tar xz -C /opt/lemur --strip-components=1 && \ + pip3 install --upgrade pip && \ + pip3 install --upgrade setuptools && \ + chmod +x /entrypoint && \ + mkdir -p /run/nginx/ + WORKDIR /opt/lemur -RUN npm install --unsafe-perm - -RUN pip3 install --upgrade pip - -RUN pip3 install setuptools -RUN pip3 install -e . - -#RUN node_modules/.bin/gulp build --urlContextPath=/arnold/foo - -RUN node_modules/.bin/gulp build - -#RUN node_modules/.bin/gulp build -h - -RUN node_modules/.bin/gulp package --urlContextPath=$(urlContextPath) - -RUN apk del build-dependencies - - -##################### - -RUN apk add --update libldap postgresql-client bash nginx supervisor curl - -#RUN python3 /opt/lemur/lemur/manage.py reset_password -u lemur - -RUN mkdir -p /run/nginx/ +RUN npm install --unsafe-perm && \ + pip3 install -e . && \ + node_modules/.bin/gulp build && \ + node_modules/.bin/gulp package --urlContextPath=$(urlContextPath) && \ + apk del build-dependencies WORKDIR / -COPY entrypoint / - -RUN chmod +x /entrypoint - -#RUN mkdir -p /conf - -COPY lemur.py /conf/lemur.conf.py - -COPY supervisor.conf / -COPY default.conf /etc/nginx/conf.d/ - HEALTHCHECK --interval=12s --timeout=12s --start-period=30s \ - CMD curl --fail http://localhost:80/api/1/healthcheck |grep -q ok || exit 1 + CMD curl --fail http://localhost:80/api/1/healthcheck | grep -q ok || exit 1 + +USER lemur ENTRYPOINT ["/entrypoint"] -#CMD ["python3","/lemur/lemur/manage.py","start","-b","0.0.0.0:8000"] - CMD ["/usr/bin/supervisord","-c","supervisor.conf"] From 390157168546c2c0b32f69eba7ff786eee55448e Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:44:05 +0100 Subject: [PATCH 016/357] Update Dockerfile --- docker/Dockerfile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index d665da0e..0953b230 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -8,6 +8,7 @@ ENV gid 1337 ENV user lemur ENV group lemur + COPY entrypoint / COPY lemur.conf.py /conf/lemur.conf.py COPY supervisor.conf / @@ -39,7 +40,8 @@ RUN addgroup -S ${group} -g ${gid} && \ pip3 install --upgrade pip && \ pip3 install --upgrade setuptools && \ chmod +x /entrypoint && \ - mkdir -p /run/nginx/ + mkdir -p /run/nginx/ && \ + chown -R $user:$group /opt/lemur/ WORKDIR /opt/lemur From d8377ffc57c6a9e281223a72a775e0024d5b09bd Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:44:27 +0100 Subject: [PATCH 017/357] Update supervisor.conf --- docker/supervisor.conf | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/docker/supervisor.conf b/docker/supervisor.conf index e04e4002..b6355b6c 100644 --- a/docker/supervisor.conf +++ b/docker/supervisor.conf @@ -5,10 +5,9 @@ logfile=/dev/stdout logfile_maxbytes=0 pidfile = /tmp/supervisord.pid - [program:lemur] -command=python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py start -b 0.0.0.0:8000 -user=root +command=python3 /opt/lemur/lemur/manage.py start -b 0.0.0.0:8000 +user=lemur stdout_logfile=/dev/stdout stdout_logfile_maxbytes = 0 stderr_logfile=/dev/stderr From 4edda34e2dfb6868db4aa7053daea029a3cbcca2 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:47:27 +0100 Subject: [PATCH 018/357] Update entrypoint --- docker/entrypoint | 28 ++++++++-------------------- 1 file changed, 8 insertions(+), 20 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index 386cdc08..a3b4e20c 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -1,32 +1,20 @@ #!/bin/sh -#echo $POSTGRES_USER -#echo $POSTGRES_PASSWORD -#echo $POSTGRES_HOST -#echo $POSTGRES_PORT -#echo $POSTGRES_DB - export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB" -#echo $SQLALCHEMY_DATABASE_URI - PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;;' PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' -# echo "from django.contrib.auth.models import User; User.objects.create_superuser('ronald', 'koko', 'koko')" | python /opt/lemur/lemur/manage.py shell - - -echo "running init" +echo "Running init" python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py init -p password -echo "done" +echo "Done" +cron_notify="${CRON_NOTIFY:-"0 22 * * *"}" +cron_sync="${CRON_SYNC:-"*/15 * * * *"}" +cron_check_revoked="${CRON_CHECK_REVOKED:-"0 22 * * *"}" -cron="${custom_cron:-"*/5 * * * *"}" - -echo "${cron} /opt/check/exec.sh" >> /etc/crontabs/root - -#0 22 * * * lemur export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; python3 /opt/lemur/lemur/manage.py notify expirations -#*/15 * * * * lemur export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; python3 /opt/lemur/lemur/manage.py source sync -s all -#0 22 * * * lemur export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; python3 /opt/lemur/lemur/manage.py certificate check_revoked +echo "${cron_notify} lemur python3 /opt/lemur/lemur/manage.py notify expirations" >> /etc/crontabs/root +echo "${cron_sync} lemur python3 /opt/lemur/lemur/manage.py source sync -s all" >> /etc/crontabs/root +echo "${cron_check_revoked} lemur /opt/lemur/lemur/manage.py certificate check_revoked" >> /etc/crontabs/root exec "$@" From ce634bfd08d91069699a3f1f208cf5899ab3f4f3 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 21:49:03 +0100 Subject: [PATCH 019/357] Create default.conf --- docker/default.conf | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 docker/default.conf diff --git a/docker/default.conf b/docker/default.conf new file mode 100644 index 00000000..d71a93d3 --- /dev/null +++ b/docker/default.conf @@ -0,0 +1,26 @@ +add_header X-Frame-Options DENY; +add_header X-Content-Type-Options nosniff; +add_header X-XSS-Protection "1; mode=block"; + +server { + listen 80; + access_log /dev/stdout; + error_log /dev/stderr; + + location /api { + proxy_pass http://127.0.0.1:8000; + proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504; + proxy_redirect off; + proxy_buffering off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location / { + root /opt/lemur/lemur/static/dist; + include mime.types; + index index.html; + } + +} From f8008e8614cdc35f62f42de00ba1c356b29999f0 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:01:28 +0100 Subject: [PATCH 020/357] Update Dockerfile --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 0953b230..0befdc57 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -56,7 +56,7 @@ WORKDIR / HEALTHCHECK --interval=12s --timeout=12s --start-period=30s \ CMD curl --fail http://localhost:80/api/1/healthcheck | grep -q ok || exit 1 -USER lemur +USER root ENTRYPOINT ["/entrypoint"] From 58296cff5aa3b0d75a353a9c95c735678db2a4b6 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:25:11 +0100 Subject: [PATCH 021/357] Update entrypoint --- docker/entrypoint | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index a3b4e20c..eced8695 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -2,19 +2,28 @@ export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB" -PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;;' +PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;' + +echo "Create Postgres trgm extension" PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' +echo "Done" echo "Running init" -python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py init -p password +python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py init +echo "Done" + +echo "Creating user" +echo "something that will create user" | python3 /opt/lemur/lemur/manage.py shell echo "Done" cron_notify="${CRON_NOTIFY:-"0 22 * * *"}" cron_sync="${CRON_SYNC:-"*/15 * * * *"}" cron_check_revoked="${CRON_CHECK_REVOKED:-"0 22 * * *"}" +echo "Populating crontab" echo "${cron_notify} lemur python3 /opt/lemur/lemur/manage.py notify expirations" >> /etc/crontabs/root echo "${cron_sync} lemur python3 /opt/lemur/lemur/manage.py source sync -s all" >> /etc/crontabs/root echo "${cron_check_revoked} lemur /opt/lemur/lemur/manage.py certificate check_revoked" >> /etc/crontabs/root +echo "Done" exec "$@" From 60b84a29b515639bf076a60d5e345adea5f84aaa Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:28:02 +0100 Subject: [PATCH 022/357] Update Dockerfile --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 0befdc57..e3bb4552 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -10,7 +10,7 @@ ENV group lemur COPY entrypoint / -COPY lemur.conf.py /conf/lemur.conf.py +COPY lemur.conf.py /home/lemur/.lemur/lemur.conf.py COPY supervisor.conf / COPY default.conf /etc/nginx/conf.d/ From 692671a5431d2db17d2cf8d8f7b1c0503f0ed604 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:43:55 +0100 Subject: [PATCH 023/357] Update entrypoint --- docker/entrypoint | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index eced8695..2b275e60 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -8,13 +8,21 @@ echo "Create Postgres trgm extension" PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' echo "Done" +# if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then +# echo "Creating config" +# https://github.com/Netflix/lemur/issues/2257 +# python3 /opt/lemur/lemur/manage.py create_config +# echo "Done" +# fi + echo "Running init" python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py init echo "Done" -echo "Creating user" -echo "something that will create user" | python3 /opt/lemur/lemur/manage.py shell -echo "Done" +# echo "Creating user" +# https://github.com/Netflix/lemur/issues/ +# echo "something that will create user" | python3 /opt/lemur/lemur/manage.py shell +# echo "Done" cron_notify="${CRON_NOTIFY:-"0 22 * * *"}" cron_sync="${CRON_SYNC:-"*/15 * * * *"}" From a4ce379bced46a095f95c29c03ff9aae832afa05 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:46:41 +0100 Subject: [PATCH 024/357] Update lemur.conf.py --- docker/lemur.conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/lemur.conf.py b/docker/lemur.conf.py index 753b39af..a5f7e8b6 100644 --- a/docker/lemur.conf.py +++ b/docker/lemur.conf.py @@ -26,6 +26,6 @@ ACTIVE_PROVIDERS = [] METRIC_PROVIDERS = [] LOG_LEVEL = str(os.environ.get('LOG_LEVEL','DEBUG')) -LOG_FILE = str(os.environ.get('LOG_FILE','lemur.log')) +LOG_FILE = str(os.environ.get('LOG_FILE','/home/lemur/.lemur/lemur.log')) SQLALCHEMY_DATABASE_URI = os.environ.get('SQLALCHEMY_DATABASE_URI','postgresql://lemur:lemur@localhost:5432/lemur') From 2ae6c3a7147bcd23175932ac7bcd057d99ed48b2 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:48:28 +0100 Subject: [PATCH 025/357] Update Dockerfile --- docker/Dockerfile | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index e3bb4552..c2cc805f 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -36,12 +36,15 @@ RUN addgroup -S ${group} -g ${gid} && \ libffi-dev \ cyrus-sasl-dev \ openldap-dev && \ - mkdir -p /opt/lemur && curl -sSL https://github.com/Netflix/lemur/archive/$VERSION.tar.gz | tar xz -C /opt/lemur --strip-components=1 && \ + mkdir -p /opt/lemur /home/lemur/.lemur/ && \ + curl -sSL https://github.com/Netflix/lemur/archive/$VERSION.tar.gz | tar xz -C /opt/lemur --strip-components=1 && \ pip3 install --upgrade pip && \ pip3 install --upgrade setuptools && \ chmod +x /entrypoint && \ mkdir -p /run/nginx/ && \ - chown -R $user:$group /opt/lemur/ + touch /home/lemur/.lemur/lemur.log && \ + chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/ && \ + ln -s /home/lemur/.lemur/lemur.log /dev/stdout WORKDIR /opt/lemur From 7348fd37e86e5276cfe67a31f8693deafdf672d3 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:50:22 +0100 Subject: [PATCH 026/357] Update Dockerfile --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index c2cc805f..8305cdd5 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -44,7 +44,7 @@ RUN addgroup -S ${group} -g ${gid} && \ mkdir -p /run/nginx/ && \ touch /home/lemur/.lemur/lemur.log && \ chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/ && \ - ln -s /home/lemur/.lemur/lemur.log /dev/stdout + ln -s /dev/stdout /home/lemur/.lemur/lemur.log WORKDIR /opt/lemur From 97f6cdccfcd84848f9ca1f2de8df9bf03645010a Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 22:58:06 +0100 Subject: [PATCH 027/357] Update Dockerfile --- docker/Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 8305cdd5..d3d0d78b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -42,7 +42,6 @@ RUN addgroup -S ${group} -g ${gid} && \ pip3 install --upgrade setuptools && \ chmod +x /entrypoint && \ mkdir -p /run/nginx/ && \ - touch /home/lemur/.lemur/lemur.log && \ chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/ && \ ln -s /dev/stdout /home/lemur/.lemur/lemur.log From d5d42415013f52322f54d632cf11474ad356af7f Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 23:20:29 +0100 Subject: [PATCH 028/357] Update entrypoint --- docker/entrypoint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint b/docker/entrypoint index 2b275e60..3604fce5 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -16,7 +16,7 @@ echo "Done" # fi echo "Running init" -python3 /opt/lemur/lemur/manage.py -c /conf/lemur.conf.py init +su lemur -c "python3 /opt/lemur/lemur/manage.py init" echo "Done" # echo "Creating user" From abd29f8462211f1f48e7b34991fc6ebc671973b1 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 23:53:39 +0100 Subject: [PATCH 029/357] Update entrypoint --- docker/entrypoint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint b/docker/entrypoint index 3604fce5..0b39bfed 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -1,6 +1,6 @@ #!/bin/sh -export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB" +echo 'export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB' >> /etc/environment PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;' From ba20c0742083a4de25a319ad0387a8e40c604a0e Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 23:54:31 +0100 Subject: [PATCH 030/357] Update entrypoint --- docker/entrypoint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint b/docker/entrypoint index 0b39bfed..3604fce5 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -1,6 +1,6 @@ #!/bin/sh -echo 'export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB' >> /etc/environment +export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB" PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;' From e488c0ddcf8c4ff4c7a126e661673758c0132ea8 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Sun, 30 Dec 2018 23:57:14 +0100 Subject: [PATCH 031/357] Update Dockerfile --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index d3d0d78b..546e325e 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -16,7 +16,7 @@ COPY default.conf /etc/nginx/conf.d/ RUN addgroup -S ${group} -g ${gid} && \ adduser -D -S ${user} -G ${group} -u ${uid} && \ - apk --update add python3 libldap postgresql-client bash nginx supervisor curl && \ + apk --update add python3 libldap postgresql-client nginx supervisor curl tzdata bash && \ apk --update add --virtual build-dependencies \ git \ tar \ From aefdead50a95b35a7b852f5e7cd1a4b7befe3e67 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 00:04:58 +0100 Subject: [PATCH 032/357] Update entrypoint --- docker/entrypoint | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/entrypoint b/docker/entrypoint index 3604fce5..d0d8ab8b 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -17,6 +17,7 @@ echo "Done" echo "Running init" su lemur -c "python3 /opt/lemur/lemur/manage.py init" +#export LEMUR_CONF=/home/lemur/.lemur/lemur.conf.py ; python3 /opt/lemur/lemur/manage.py init echo "Done" # echo "Creating user" From 25c4672845088e1324caa23e577796b5cd763842 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 10:41:19 +0100 Subject: [PATCH 033/357] Update supervisor.conf --- docker/supervisor.conf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/supervisor.conf b/docker/supervisor.conf index b6355b6c..311d997f 100644 --- a/docker/supervisor.conf +++ b/docker/supervisor.conf @@ -6,7 +6,7 @@ logfile_maxbytes=0 pidfile = /tmp/supervisord.pid [program:lemur] -command=python3 /opt/lemur/lemur/manage.py start -b 0.0.0.0:8000 +command=/usr/bin/python3 /opt/lemur/lemur/manage.py start -b 0.0.0.0:8000 user=lemur stdout_logfile=/dev/stdout stdout_logfile_maxbytes = 0 @@ -14,7 +14,7 @@ stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 [program:nginx] -command=nginx -g "daemon off;" +command=/usr/sbin/nginx -g "daemon off;" user=root stdout_logfile=/dev/stdout stdout_logfile_maxbytes = 0 @@ -22,7 +22,7 @@ stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 [program:dcron] -command=crond -f +command=/usr/sbin/crond -f user=root stdout_logfile=/dev/stdout stdout_logfile_maxbytes = 0 From 239acb5f95a2b0fc6a4e7ffeb4bb514f6f3ac401 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 12:49:21 +0100 Subject: [PATCH 034/357] Update supervisor.conf --- docker/supervisor.conf | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docker/supervisor.conf b/docker/supervisor.conf index 311d997f..185b07d1 100644 --- a/docker/supervisor.conf +++ b/docker/supervisor.conf @@ -1,4 +1,5 @@ [supervisord] +environment=LEMUR_CONF=/home/lemur/.lemur/lemur.conf.py nodaemon=true user=root logfile=/dev/stdout @@ -6,8 +7,9 @@ logfile_maxbytes=0 pidfile = /tmp/supervisord.pid [program:lemur] -command=/usr/bin/python3 /opt/lemur/lemur/manage.py start -b 0.0.0.0:8000 +command=/usr/bin/python3 manage.py start -b 0.0.0.0:8000 user=lemur +directory=/opt/lemur/lemur stdout_logfile=/dev/stdout stdout_logfile_maxbytes = 0 stderr_logfile=/dev/stderr @@ -21,7 +23,7 @@ stdout_logfile_maxbytes = 0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 -[program:dcron] +[program:cron] command=/usr/sbin/crond -f user=root stdout_logfile=/dev/stdout From ca6f2b782b03f8c1f8a65a1b73507108d6a222de Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 12:52:07 +0100 Subject: [PATCH 035/357] Update supervisor.conf --- docker/supervisor.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/supervisor.conf b/docker/supervisor.conf index 185b07d1..fed01581 100644 --- a/docker/supervisor.conf +++ b/docker/supervisor.conf @@ -1,5 +1,4 @@ [supervisord] -environment=LEMUR_CONF=/home/lemur/.lemur/lemur.conf.py nodaemon=true user=root logfile=/dev/stdout @@ -7,6 +6,7 @@ logfile_maxbytes=0 pidfile = /tmp/supervisord.pid [program:lemur] +environment=LEMUR_CONF=/home/lemur/.lemur/lemur.conf.py command=/usr/bin/python3 manage.py start -b 0.0.0.0:8000 user=lemur directory=/opt/lemur/lemur From c94557f2edd8ddb006618e8095532c090aa1c10c Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 13:21:13 +0100 Subject: [PATCH 036/357] Update entrypoint --- docker/entrypoint | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docker/entrypoint b/docker/entrypoint index d0d8ab8b..dce3773d 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -8,6 +8,11 @@ echo "Create Postgres trgm extension" PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' echo "Done" + +# if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then +# openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -subj "/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Org/CN=FAKE +# fi + # if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then # echo "Creating config" # https://github.com/Netflix/lemur/issues/2257 From 666f180482b17a578925566d118401d1390e63ae Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 13:21:30 +0100 Subject: [PATCH 037/357] Update Dockerfile --- docker/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 546e325e..d2ae56a3 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -16,7 +16,7 @@ COPY default.conf /etc/nginx/conf.d/ RUN addgroup -S ${group} -g ${gid} && \ adduser -D -S ${user} -G ${group} -u ${uid} && \ - apk --update add python3 libldap postgresql-client nginx supervisor curl tzdata bash && \ + apk --update add python3 libldap postgresql-client nginx supervisor curl tzdata openssl bash && \ apk --update add --virtual build-dependencies \ git \ tar \ From d6a374130cb033929c4c834b690af7a6d4fef229 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 13:33:58 +0100 Subject: [PATCH 038/357] Update entrypoint --- docker/entrypoint | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index dce3773d..82fe1780 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -9,9 +9,9 @@ PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTG echo "Done" -# if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then -# openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -subj "/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Org/CN=FAKE -# fi +if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then + openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -subj "/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Org/CN=FAKE" +fi # if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then # echo "Creating config" From 341756d7c0fde73c58e9970393067fc1d79b74de Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 14:07:56 +0100 Subject: [PATCH 039/357] Update entrypoint --- docker/entrypoint | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index 82fe1780..1c895b16 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -8,10 +8,12 @@ echo "Create Postgres trgm extension" PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' echo "Done" - -if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then - openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -subj "/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Org/CN=FAKE" -fi +if [ -z ${SKIP_SSL} ]; then + if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then + openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -subj "/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Org/CN=FAKE" + fi + cp default.conf default_ssl.conf +then # if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then # echo "Creating config" From 6b1d2bfb60578dabbc390a64b0f7efc74834b475 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 14:55:13 +0100 Subject: [PATCH 040/357] Create default-ssl.conf --- docker/default-ssl.conf | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 docker/default-ssl.conf diff --git a/docker/default-ssl.conf b/docker/default-ssl.conf new file mode 100644 index 00000000..8b791c45 --- /dev/null +++ b/docker/default-ssl.conf @@ -0,0 +1,31 @@ +add_header X-Frame-Options DENY; +add_header X-Content-Type-Options nosniff; +add_header X-XSS-Protection "1; mode=block"; + +server { + listen 443; + server_name _; + access_log /dev/stdout; + error_log /dev/stderr; + ssl_certificate /etc/nginx/ssl/server.crt; + ssl_certificate_key /etc/nginx/ssl/server.key; + ssl_protocols TLSv1 TLSv1.1 TLSv1.2; + ssl_ciphers HIGH:!aNULL:!MD5; + + location /api { + proxy_pass http://127.0.0.1:8000; + proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504; + proxy_redirect off; + proxy_buffering off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location / { + root /opt/lemur/lemur/static/dist; + include mime.types; + index index.html; + } + +} From 542e9539199d4c3a51c77ee9911e28cff7afcf90 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Wed, 20 Jun 2018 18:42:34 +0300 Subject: [PATCH 041/357] Check that stored private keys match certificates This is done in two places: * Certificate import validator -- throws validation errors. * Certificate model constructor -- to ensure integrity of Lemur's data even when issuer plugins or other code paths have bugs. --- lemur/certificates/models.py | 14 ++++- lemur/certificates/schemas.py | 26 ++++++++-- lemur/common/utils.py | 15 ++++++ lemur/common/validators.py | 33 ++++++------ lemur/tests/conftest.py | 9 +++- lemur/tests/factories.py | 5 ++ lemur/tests/test_certificates.py | 88 ++++++++++++++++++++++++++++++-- lemur/tests/test_validators.py | 22 ++++++-- 8 files changed, 181 insertions(+), 31 deletions(-) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index e2ac2cba..3eaba746 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -19,7 +19,7 @@ from sqlalchemy.sql.expression import case, extract from sqlalchemy_utils.types.arrow import ArrowType from werkzeug.utils import cached_property -from lemur.common import defaults, utils +from lemur.common import defaults, utils, validators from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS from lemur.database import db from lemur.domains.models import Domain @@ -186,6 +186,18 @@ class Certificate(db.Model): for domain in defaults.domains(cert): self.domains.append(Domain(name=domain)) + # Check integrity before saving anything into the database. + # For user-facing API calls, validation should also be done in schema validators. + self.check_integrity() + + def check_integrity(self): + """ + Integrity checks: Does the cert have a matching private key? + """ + if self.private_key: + validators.verify_private_key_match(utils.parse_private_key(self.private_key), self.parsed_cert, + error_class=AssertionError) + @cached_property def parsed_cert(self): assert self.body, "Certificate body not set" diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index bf18eac9..6b457086 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -10,7 +10,7 @@ from marshmallow import fields, validate, validates_schema, post_load, pre_load from marshmallow.exceptions import ValidationError from lemur.authorities.schemas import AuthorityNestedOutputSchema -from lemur.common import validators, missing +from lemur.common import missing, utils, validators from lemur.common.fields import ArrowDateTime, Hex from lemur.common.schema import LemurInputSchema, LemurOutputSchema from lemur.constants import CERTIFICATE_KEY_TYPES @@ -242,8 +242,8 @@ class CertificateUploadInputSchema(CertificateCreationSchema): authority = fields.Nested(AssociatedAuthoritySchema, required=False) notify = fields.Boolean(missing=True) external_id = fields.String(missing=None, allow_none=True) - private_key = fields.String(validate=validators.private_key) - body = fields.String(required=True, validate=validators.public_certificate) + private_key = fields.String() + body = fields.String(required=True) chain = fields.String(validate=validators.public_certificate, missing=None, allow_none=True) # TODO this could be multiple certificates @@ -258,6 +258,26 @@ class CertificateUploadInputSchema(CertificateCreationSchema): if not data.get('private_key'): raise ValidationError('Destinations require private key.') + @validates_schema + def validate_cert_private_key(self, data): + cert = None + key = None + if data.get('body'): + try: + cert = utils.parse_certificate(data['body']) + except ValueError: + raise ValidationError("Public certificate presented is not valid.", field_names=['body']) + + if data.get('private_key'): + try: + key = utils.parse_private_key(data['private_key']) + except ValueError: + raise ValidationError("Private key presented is not valid.", field_names=['private_key']) + + if cert and key: + # Throws ValidationError + validators.verify_private_key_match(key, cert) + class CertificateExportInputSchema(LemurInputSchema): plugin = fields.Nested(PluginInputSchema) diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 7ea9d7f2..62e59d69 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -13,6 +13,7 @@ import sqlalchemy from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import rsa, ec +from cryptography.hazmat.primitives.serialization import load_pem_private_key from flask_restful.reqparse import RequestParser from sqlalchemy import and_, func @@ -52,6 +53,20 @@ def parse_certificate(body): return x509.load_pem_x509_certificate(body, default_backend()) +def parse_private_key(private_key): + """ + Parses a PEM-format private key (RSA, DSA, ECDSA or any other supported algorithm). + + Raises ValueError for an invalid string. + + :param private_key: String containing PEM private key + """ + if isinstance(private_key, str): + private_key = private_key.encode('utf8') + + return load_pem_private_key(private_key, password=None, backend=default_backend()) + + def parse_csr(csr): """ Helper function that parses a CSR. diff --git a/lemur/common/validators.py b/lemur/common/validators.py index 47a94a30..90169553 100644 --- a/lemur/common/validators.py +++ b/lemur/common/validators.py @@ -2,14 +2,12 @@ import re from cryptography import x509 from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import serialization from cryptography.x509 import NameOID from flask import current_app from marshmallow.exceptions import ValidationError from lemur.auth.permissions import SensitiveDomainPermission from lemur.common.utils import parse_certificate, is_weekend -from lemur.domains import service as domain_service def public_certificate(body): @@ -26,22 +24,6 @@ def public_certificate(body): raise ValidationError('Public certificate presented is not valid.') -def private_key(key): - """ - User to validate that a given string is a RSA private key - - :param key: - :return: :raise ValueError: - """ - try: - if isinstance(key, bytes): - serialization.load_pem_private_key(key, None, backend=default_backend()) - else: - serialization.load_pem_private_key(key.encode('utf-8'), None, backend=default_backend()) - except Exception: - raise ValidationError('Private key presented is not valid.') - - def common_name(value): """If the common name could be a domain name, apply domain validation rules.""" # Common name could be a domain name, or a human-readable name of the subject (often used in CA names or client @@ -66,6 +48,9 @@ def sensitive_domain(domain): raise ValidationError('Domain {0} does not match whitelisted domain patterns. ' 'Contact an administrator to issue the certificate.'.format(domain)) + # Avoid circular import. + from lemur.domains import service as domain_service + if any(d.sensitive for d in domain_service.get_by_name(domain)): raise ValidationError('Domain {0} has been marked as sensitive. ' 'Contact an administrator to issue the certificate.'.format(domain)) @@ -141,3 +126,15 @@ def dates(data): raise ValidationError('Validity end must not be after {0}'.format(data['authority'].authority_certificate.not_after)) return data + + +def verify_private_key_match(key, cert, error_class=ValidationError): + """ + Checks that the supplied private key matches the certificate. + + :param cert: Parsed certificate + :param key: Parsed private key + :param error_class: Exception class to raise on error + """ + if key.public_key().public_numbers() != cert.public_key().public_numbers(): + raise error_class("Private key does not match certificate.") diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index d292e6d6..9a48eb94 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -15,7 +15,7 @@ from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY from .factories import ApiKeyFactory, AuthorityFactory, NotificationFactory, DestinationFactory, \ CertificateFactory, UserFactory, RoleFactory, SourceFactory, EndpointFactory, \ - RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory + RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory, CryptoAuthorityFactory def pytest_runtest_setup(item): @@ -91,6 +91,13 @@ def authority(session): return a +@pytest.fixture +def crypto_authority(session): + a = CryptoAuthorityFactory() + session.commit() + return a + + @pytest.fixture def async_authority(session): a = AsyncAuthorityFactory() diff --git a/lemur/tests/factories.py b/lemur/tests/factories.py index cae2c354..3717c64d 100644 --- a/lemur/tests/factories.py +++ b/lemur/tests/factories.py @@ -168,6 +168,11 @@ class AsyncAuthorityFactory(AuthorityFactory): authority_certificate = SubFactory(CertificateFactory) +class CryptoAuthorityFactory(AuthorityFactory): + """Authority factory based on 'cryptography' plugin.""" + plugin = {'slug': 'cryptography-issuer'} + + class DestinationFactory(BaseFactory): """Destination factory.""" plugin_name = 'test-destination' diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index 87416a7a..a1df1c0d 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -18,7 +18,7 @@ from lemur.domains.models import Domain from lemur.tests.vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN, CSR_STR, \ - INTERMEDIATE_CERT_STR, SAN_CERT_STR, SAN_CERT_KEY + INTERMEDIATE_CERT_STR, SAN_CERT_STR, SAN_CERT_KEY, ROOTCA_KEY, ROOTCA_CERT_STR def test_get_or_increase_name(session, certificate): @@ -365,6 +365,85 @@ def test_certificate_sensitive_name(client, authority, session, logged_in_user): assert errors['common_name'][0].startswith("Domain sensitive.example.com has been marked as sensitive") +def test_certificate_upload_schema_ok(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'name': 'Jane', + 'owner': 'pwner@example.com', + 'body': SAN_CERT_STR, + 'privateKey': SAN_CERT_KEY, + 'chain': INTERMEDIATE_CERT_STR, + 'external_id': '1234', + } + data, errors = CertificateUploadInputSchema().load(data) + assert not errors + + +def test_certificate_upload_schema_minimal(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'owner': 'pwner@example.com', + 'body': SAN_CERT_STR, + } + data, errors = CertificateUploadInputSchema().load(data) + assert not errors + + +def test_certificate_upload_schema_long_chain(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'owner': 'pwner@example.com', + 'body': SAN_CERT_STR, + 'chain': INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR + } + data, errors = CertificateUploadInputSchema().load(data) + assert not errors + + +def test_certificate_upload_schema_invalid_body(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'owner': 'pwner@example.com', + 'body': 'Hereby I certify that this is a valid body', + } + data, errors = CertificateUploadInputSchema().load(data) + assert errors == {'body': ['Public certificate presented is not valid.']} + + +def test_certificate_upload_schema_invalid_pkey(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'owner': 'pwner@example.com', + 'body': SAN_CERT_STR, + 'privateKey': 'Look at me Im a private key!!111', + } + data, errors = CertificateUploadInputSchema().load(data) + assert errors == {'private_key': ['Private key presented is not valid.']} + + +def test_certificate_upload_schema_invalid_chain(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'body': SAN_CERT_STR, + 'chain': 'CHAINSAW', + 'owner': 'pwner@example.com', + } + data, errors = CertificateUploadInputSchema().load(data) + assert errors == {'chain': ['Public certificate presented is not valid.']} + + +def test_certificate_upload_schema_wrong_pkey(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'body': SAN_CERT_STR, + 'privateKey': ROOTCA_KEY, + 'chain': INTERMEDIATE_CERT_STR, + 'owner': 'pwner@example.com', + } + data, errors = CertificateUploadInputSchema().load(data) + assert errors == {'_schema': ['Private key does not match certificate.']} + + def test_create_basic_csr(client): csr_config = dict( common_name='example.com', @@ -462,8 +541,11 @@ def test_create_certificate(issuer_plugin, authority, user): assert cert.name == 'ACustomName1' -def test_reissue_certificate(issuer_plugin, authority, certificate): +def test_reissue_certificate(issuer_plugin, crypto_authority, certificate, logged_in_user): from lemur.certificates.service import reissue_certificate + + # test-authority would return a mismatching private key, so use 'cryptography-issuer' plugin instead. + certificate.authority = crypto_authority new_cert = reissue_certificate(certificate) assert new_cert @@ -487,7 +569,7 @@ def test_import(user): assert str(cert.not_after) == '2047-12-31T22:00:00+00:00' assert str(cert.not_before) == '2017-12-31T22:00:00+00:00' assert cert.issuer == 'LemurTrustUnittestsClass1CA2018' - assert cert.name == 'SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231-AFF2DB4F8D2D4D8E80FA382AE27C2333-2' + assert cert.name.startswith('SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231') cert = import_certificate(body=SAN_CERT_STR, chain=INTERMEDIATE_CERT_STR, private_key=SAN_CERT_KEY, owner='joe@example.com', name='ACustomName2', creator=user['user']) assert cert.name == 'ACustomName2' diff --git a/lemur/tests/test_validators.py b/lemur/tests/test_validators.py index 815b7c9d..c3d5357d 100644 --- a/lemur/tests/test_validators.py +++ b/lemur/tests/test_validators.py @@ -1,16 +1,28 @@ -import pytest from datetime import datetime -from .vectors import SAN_CERT_KEY + +import pytest from marshmallow.exceptions import ValidationError +from lemur.common.utils import parse_private_key +from lemur.common.validators import verify_private_key_match +from lemur.tests.vectors import INTERMEDIATE_CERT, SAN_CERT, SAN_CERT_KEY + def test_private_key(session): - from lemur.common.validators import private_key + parse_private_key(SAN_CERT_KEY) - private_key(SAN_CERT_KEY) + with pytest.raises(ValueError): + parse_private_key('invalid_private_key') + + +def test_validate_private_key(session): + key = parse_private_key(SAN_CERT_KEY) + + verify_private_key_match(key, SAN_CERT) with pytest.raises(ValidationError): - private_key('invalid_private_key') + # Wrong key for certificate + verify_private_key_match(key, INTERMEDIATE_CERT) def test_sub_alt_type(session): From 7fb0631ff025ebd09b7f95a8c68b90010cd32e23 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 15:37:19 +0100 Subject: [PATCH 042/357] Update entrypoint --- docker/entrypoint | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index 1c895b16..ebfa9bfa 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -1,18 +1,27 @@ #!/bin/sh -export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB" +if [ -z "${POSTGRES_USER}" ] || [ -z "${POSTGRES_PASSWORD}" ] || [ -z "${POSTGRES_HOST}" ] || [ -z "${POSTGRES_DB}" ];the + echo " # Vars not set" + exit 1 +fi + +export POSTGRES_PORT="${POSTGRES_PORT:-5432}" + +echo 'export SQLALCHEMY_DATABASE_URI="postgresql://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$POSTGRES_PORT/$POSTGRES_DB"' >> /etc/profile + +source /etc/profile PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'select 1;' -echo "Create Postgres trgm extension" +echo " # Create Postgres trgm extension" PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' -echo "Done" +echo " # Done" if [ -z ${SKIP_SSL} ]; then if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then - openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -days 365 -subj "/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Org/CN=FAKE" + openssl req -x509 -newkey rsa:4096 -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=FAKE/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" fi - cp default.conf default_ssl.conf + mv /etc/nginx/conf.d/default-ssl.conf.a /etc/nginx/conf.d/default-ssl.conf then # if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then @@ -22,10 +31,9 @@ then # echo "Done" # fi -echo "Running init" +echo " # Running init" su lemur -c "python3 /opt/lemur/lemur/manage.py init" -#export LEMUR_CONF=/home/lemur/.lemur/lemur.conf.py ; python3 /opt/lemur/lemur/manage.py init -echo "Done" +echo " # Done" # echo "Creating user" # https://github.com/Netflix/lemur/issues/ @@ -36,10 +44,10 @@ cron_notify="${CRON_NOTIFY:-"0 22 * * *"}" cron_sync="${CRON_SYNC:-"*/15 * * * *"}" cron_check_revoked="${CRON_CHECK_REVOKED:-"0 22 * * *"}" -echo "Populating crontab" +echo " # Populating crontab" echo "${cron_notify} lemur python3 /opt/lemur/lemur/manage.py notify expirations" >> /etc/crontabs/root echo "${cron_sync} lemur python3 /opt/lemur/lemur/manage.py source sync -s all" >> /etc/crontabs/root echo "${cron_check_revoked} lemur /opt/lemur/lemur/manage.py certificate check_revoked" >> /etc/crontabs/root -echo "Done" +echo " # Done" exec "$@" From 728be37de9a969f164de3f750efece77e9c43938 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 15:37:48 +0100 Subject: [PATCH 043/357] Update Dockerfile --- docker/Dockerfile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index d2ae56a3..b105b1fb 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -13,6 +13,7 @@ COPY entrypoint / COPY lemur.conf.py /home/lemur/.lemur/lemur.conf.py COPY supervisor.conf / COPY default.conf /etc/nginx/conf.d/ +COPY default-ssl.conf /etc/nginx/conf.d/ RUN addgroup -S ${group} -g ${gid} && \ adduser -D -S ${user} -G ${group} -u ${uid} && \ @@ -41,7 +42,7 @@ RUN addgroup -S ${group} -g ${gid} && \ pip3 install --upgrade pip && \ pip3 install --upgrade setuptools && \ chmod +x /entrypoint && \ - mkdir -p /run/nginx/ && \ + mkdir -p /run/nginx/ /etc/nginx/ssl/ && \ chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/ && \ ln -s /dev/stdout /home/lemur/.lemur/lemur.log From 4faedf3e5b8280161169c488e89337fcc3ee2683 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 16:58:51 +0100 Subject: [PATCH 044/357] Update entrypoint --- docker/entrypoint | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index ebfa9bfa..f97e2cdb 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -1,6 +1,6 @@ #!/bin/sh -if [ -z "${POSTGRES_USER}" ] || [ -z "${POSTGRES_PASSWORD}" ] || [ -z "${POSTGRES_HOST}" ] || [ -z "${POSTGRES_DB}" ];the +if [ -z "${POSTGRES_USER}" ] || [ -z "${POSTGRES_PASSWORD}" ] || [ -z "${POSTGRES_HOST}" ] || [ -z "${POSTGRES_DB}" ];then echo " # Vars not set" exit 1 fi @@ -22,7 +22,7 @@ if [ -z ${SKIP_SSL} ]; then openssl req -x509 -newkey rsa:4096 -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=FAKE/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" fi mv /etc/nginx/conf.d/default-ssl.conf.a /etc/nginx/conf.d/default-ssl.conf -then +fi # if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then # echo "Creating config" From 809ca0fcfe28198aae8b28f521fd0a2ee88b5494 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 17:13:31 +0100 Subject: [PATCH 045/357] Update Dockerfile --- docker/Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index b105b1fb..8ebb5241 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -43,8 +43,7 @@ RUN addgroup -S ${group} -g ${gid} && \ pip3 install --upgrade setuptools && \ chmod +x /entrypoint && \ mkdir -p /run/nginx/ /etc/nginx/ssl/ && \ - chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/ && \ - ln -s /dev/stdout /home/lemur/.lemur/lemur.log + chown -R $user:$group /opt/lemur/ /home/lemur/.lemur/ WORKDIR /opt/lemur From 628aaf2748a46fc302fc73a61149ec4c2c9629a5 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 17:36:52 +0100 Subject: [PATCH 046/357] Update entrypoint --- docker/entrypoint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint b/docker/entrypoint index f97e2cdb..b2850963 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -19,7 +19,7 @@ echo " # Done" if [ -z ${SKIP_SSL} ]; then if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then - openssl req -x509 -newkey rsa:4096 -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=FAKE/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" + openssl req -x509 -newkey rsa:4096 -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=US/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" fi mv /etc/nginx/conf.d/default-ssl.conf.a /etc/nginx/conf.d/default-ssl.conf fi From c0f6e5a134274a3fa329645738755c29a27e2e04 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 18:03:39 +0100 Subject: [PATCH 047/357] Update default-ssl.conf --- docker/default-ssl.conf | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/docker/default-ssl.conf b/docker/default-ssl.conf index 8b791c45..2235b88d 100644 --- a/docker/default-ssl.conf +++ b/docker/default-ssl.conf @@ -2,6 +2,30 @@ add_header X-Frame-Options DENY; add_header X-Content-Type-Options nosniff; add_header X-XSS-Protection "1; mode=block"; +server { + listen 80; + server_name _; + access_log /dev/stdout; + error_log /dev/stderr; + + location /api { + proxy_pass http://127.0.0.1:8000; + proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504; + proxy_redirect off; + proxy_buffering off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + } + + location / { + root /opt/lemur/lemur/static/dist; + include mime.types; + index index.html; + } + +} + server { listen 443; server_name _; From 918af0873f8ba4102b0a5283f4c2f140e7a2508b Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 18:35:17 +0100 Subject: [PATCH 048/357] Update default-ssl.conf --- docker/default-ssl.conf | 24 ------------------------ 1 file changed, 24 deletions(-) diff --git a/docker/default-ssl.conf b/docker/default-ssl.conf index 2235b88d..8b791c45 100644 --- a/docker/default-ssl.conf +++ b/docker/default-ssl.conf @@ -2,30 +2,6 @@ add_header X-Frame-Options DENY; add_header X-Content-Type-Options nosniff; add_header X-XSS-Protection "1; mode=block"; -server { - listen 80; - server_name _; - access_log /dev/stdout; - error_log /dev/stderr; - - location /api { - proxy_pass http://127.0.0.1:8000; - proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504; - proxy_redirect off; - proxy_buffering off; - proxy_set_header Host $host; - proxy_set_header X-Real-IP $remote_addr; - proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; - } - - location / { - root /opt/lemur/lemur/static/dist; - include mime.types; - index index.html; - } - -} - server { listen 443; server_name _; From ff0dbdcc5a1b1f2fefcb2fceab3dd6f695ab0dff Mon Sep 17 00:00:00 2001 From: Lukas M Date: Mon, 31 Dec 2018 18:36:02 +0100 Subject: [PATCH 049/357] Update entrypoint --- docker/entrypoint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint b/docker/entrypoint index b2850963..565c0fd6 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -1,7 +1,7 @@ #!/bin/sh if [ -z "${POSTGRES_USER}" ] || [ -z "${POSTGRES_PASSWORD}" ] || [ -z "${POSTGRES_HOST}" ] || [ -z "${POSTGRES_DB}" ];then - echo " # Vars not set" + echo "Database vars not set" exit 1 fi From 3cc63c6618846bc1e15b56458c8ce5aeca247641 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:05:45 +0100 Subject: [PATCH 050/357] Update entrypoint --- docker/entrypoint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint b/docker/entrypoint index 565c0fd6..d7ace70a 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -17,7 +17,7 @@ echo " # Create Postgres trgm extension" PGPASSWORD=$POSTGRES_PASSWORD psql -h $POSTGRES_HOST -p $POSTGRES_PORT -U $POSTGRES_USER -d $POSTGRES_DB --command 'CREATE EXTENSION pg_trgm;' echo " # Done" -if [ -z ${SKIP_SSL} ]; then +if [ -z "${SKIP_SSL}" ]; then if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then openssl req -x509 -newkey rsa:4096 -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=US/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" fi From 0d0c295f82705a8173a4530f3b9393898bfe9c37 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:33:49 +0100 Subject: [PATCH 051/357] Update entrypoint --- docker/entrypoint | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/entrypoint b/docker/entrypoint index d7ace70a..18ab0da5 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -42,12 +42,12 @@ echo " # Done" cron_notify="${CRON_NOTIFY:-"0 22 * * *"}" cron_sync="${CRON_SYNC:-"*/15 * * * *"}" -cron_check_revoked="${CRON_CHECK_REVOKED:-"0 22 * * *"}" +cron_revoked="${CRON_CHECK_REVOKED:-"0 22 * * *"}" echo " # Populating crontab" -echo "${cron_notify} lemur python3 /opt/lemur/lemur/manage.py notify expirations" >> /etc/crontabs/root -echo "${cron_sync} lemur python3 /opt/lemur/lemur/manage.py source sync -s all" >> /etc/crontabs/root -echo "${cron_check_revoked} lemur /opt/lemur/lemur/manage.py certificate check_revoked" >> /etc/crontabs/root +echo "${cron_notify} lemur python3 /opt/lemur/lemur/manage.py notify expirations" > /etc/crontabs/lemur_notify +echo "${cron_sync} lemur python3 /opt/lemur/lemur/manage.py source sync -s all" > /etc/crontabs/lemur_sync +echo "${cron_revoked} lemur python3 /opt/lemur/lemur/manage.py certificate check_revoked" > /etc/crontabs/lemur_revoked echo " # Done" exec "$@" From bb4b781d246297e298143c9153e10088d0d8660d Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:46:56 +0100 Subject: [PATCH 052/357] Update entrypoint --- docker/entrypoint | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/entrypoint b/docker/entrypoint index 18ab0da5..ad1d310c 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -22,6 +22,7 @@ if [ -z "${SKIP_SSL}" ]; then openssl req -x509 -newkey rsa:4096 -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=US/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" fi mv /etc/nginx/conf.d/default-ssl.conf.a /etc/nginx/conf.d/default-ssl.conf + mv /etc/nginx/conf.d/default.conf /etc/nginx/conf.d/default.conf.a fi # if [ ! -f /home/lemur/.lemur/lemur.conf.py ]; then From 28382ce728d25c190d5dca14d88a65d69d0c6802 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:48:42 +0100 Subject: [PATCH 053/357] Update default-ssl.conf --- docker/default-ssl.conf | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docker/default-ssl.conf b/docker/default-ssl.conf index 8b791c45..86c770df 100644 --- a/docker/default-ssl.conf +++ b/docker/default-ssl.conf @@ -2,6 +2,12 @@ add_header X-Frame-Options DENY; add_header X-Content-Type-Options nosniff; add_header X-XSS-Protection "1; mode=block"; +server { + listen 80; + server_name _; + return 301 https://$host$request_uri; +} + server { listen 443; server_name _; From 4570fcf7fa07cd42b249e67926f1a4bfc5e24990 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:49:24 +0100 Subject: [PATCH 054/357] Rename docker/default-ssl.conf to docker/nginx/default-ssl.conf --- docker/{ => nginx}/default-ssl.conf | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docker/{ => nginx}/default-ssl.conf (100%) diff --git a/docker/default-ssl.conf b/docker/nginx/default-ssl.conf similarity index 100% rename from docker/default-ssl.conf rename to docker/nginx/default-ssl.conf From 248c0d226f827e0c612450baacf27100670079ad Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:49:36 +0100 Subject: [PATCH 055/357] Rename docker/default.conf to docker/nginx/default.conf --- docker/{ => nginx}/default.conf | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docker/{ => nginx}/default.conf (100%) diff --git a/docker/default.conf b/docker/nginx/default.conf similarity index 100% rename from docker/default.conf rename to docker/nginx/default.conf From 949ebfa2850f02f1e2f875706192fe9dddb8f299 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:49:49 +0100 Subject: [PATCH 056/357] Update Dockerfile --- docker/Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 8ebb5241..7fa61700 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -12,8 +12,8 @@ ENV group lemur COPY entrypoint / COPY lemur.conf.py /home/lemur/.lemur/lemur.conf.py COPY supervisor.conf / -COPY default.conf /etc/nginx/conf.d/ -COPY default-ssl.conf /etc/nginx/conf.d/ +COPY nginx/default.conf /etc/nginx/conf.d/ +COPY nginx/default-ssl.conf /etc/nginx/conf.d/ RUN addgroup -S ${group} -g ${gid} && \ adduser -D -S ${user} -G ${group} -u ${uid} && \ From 6c1129c946a4b47bf966e9c003335122995dc6c6 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:50:14 +0100 Subject: [PATCH 057/357] Rename docker/lemur.conf.py to docker/src/lemur.conf.py --- docker/{ => src}/lemur.conf.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename docker/{ => src}/lemur.conf.py (100%) diff --git a/docker/lemur.conf.py b/docker/src/lemur.conf.py similarity index 100% rename from docker/lemur.conf.py rename to docker/src/lemur.conf.py From 125a885742a19c0eb2f821007d168b0b22b98f45 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 11:50:48 +0100 Subject: [PATCH 058/357] Update Dockerfile --- docker/Dockerfile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker/Dockerfile b/docker/Dockerfile index 7fa61700..f7d1caf7 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -8,9 +8,8 @@ ENV gid 1337 ENV user lemur ENV group lemur - COPY entrypoint / -COPY lemur.conf.py /home/lemur/.lemur/lemur.conf.py +COPY src/lemur.conf.py /home/lemur/.lemur/lemur.conf.py COPY supervisor.conf / COPY nginx/default.conf /etc/nginx/conf.d/ COPY nginx/default-ssl.conf /etc/nginx/conf.d/ From 7cbdc09055a04c747b2ab190b7e4d5b3e2144761 Mon Sep 17 00:00:00 2001 From: Lukas M Date: Tue, 1 Jan 2019 12:09:06 +0100 Subject: [PATCH 059/357] Update entrypoint --- docker/entrypoint | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/entrypoint b/docker/entrypoint index ad1d310c..6077167a 100644 --- a/docker/entrypoint +++ b/docker/entrypoint @@ -19,7 +19,7 @@ echo " # Done" if [ -z "${SKIP_SSL}" ]; then if [ ! -f /etc/nginx/ssl/server.crt ] && [ ! -f /etc/nginx/ssl/server.key ]; then - openssl req -x509 -newkey rsa:4096 -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=US/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" + openssl req -x509 -newkey rsa:4096 -nodes -keyout /etc/nginx/ssl/server.key -out /etc/nginx/ssl/server.crt -days 365 -subj "/C=US/ST=FAKE/L=FAKE/O=FAKE/OU=FAKE/CN=FAKE" fi mv /etc/nginx/conf.d/default-ssl.conf.a /etc/nginx/conf.d/default-ssl.conf mv /etc/nginx/conf.d/default.conf /etc/nginx/conf.d/default.conf.a From 3ac5361cb2b22775c7bd2f2fe5989c919919d9af Mon Sep 17 00:00:00 2001 From: bby-bishopclark <30503374+bby-bishopclark@users.noreply.github.com> Date: Thu, 3 Jan 2019 07:58:42 -0800 Subject: [PATCH 060/357] Update index.rst Simple English gaffes noticed while perusing docs -- Setup vs set up, it's vs English, etc. --- docs/quickstart/index.rst | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docs/quickstart/index.rst b/docs/quickstart/index.rst index 70ca1312..adeadd7c 100644 --- a/docs/quickstart/index.rst +++ b/docs/quickstart/index.rst @@ -22,7 +22,7 @@ Some basic prerequisites which you'll need in order to run Lemur: Installing Build Dependencies ----------------------------- -If installing Lemur on a bare Ubuntu OS you will need to grab the following packages so that Lemur can correctly build it's dependencies: +If installing Lemur on a bare Ubuntu OS you will need to grab the following packages so that Lemur can correctly build its dependencies: .. code-block:: bash @@ -117,7 +117,7 @@ Simply run: .. note:: This command will create a default configuration under ``~/.lemur/lemur.conf.py`` you can specify this location by passing the ``config_path`` parameter to the ``create_config`` command. -You can specify ``-c`` or ``--config`` to any Lemur command to specify the current environment you are working in. Lemur will also look under the environmental variable ``LEMUR_CONF`` should that be easier to setup in your environment. +You can specify ``-c`` or ``--config`` to any Lemur command to specify the current environment you are working in. Lemur will also look under the environmental variable ``LEMUR_CONF`` should that be easier to set up in your environment. Update your configuration @@ -144,7 +144,7 @@ Before Lemur will run you need to fill in a few required variables in the config LEMUR_DEFAULT_ORGANIZATION LEMUR_DEFAULT_ORGANIZATIONAL_UNIT -Setup Postgres +Set Up Postgres -------------- For production, a dedicated database is recommended, for this guide we will assume postgres has been installed and is on the same machine that Lemur is installed on. @@ -193,10 +193,10 @@ Additional notifications can be created through the UI or API. See :ref:`Creati .. note:: It is recommended that once the ``lemur`` user is created that you create individual users for every day access. There is currently no way for a user to self enroll for Lemur access, they must have an administrator create an account for them or be enrolled automatically through SSO. This can be done through the CLI or UI. See :ref:`Creating Users ` and :ref:`Command Line Interface ` for details. -Setup a Reverse Proxy +Set Up a Reverse Proxy --------------------- -By default, Lemur runs on port 8000. Even if you change this, under normal conditions you won't be able to bind to port 80. To get around this (and to avoid running Lemur as a privileged user, which you shouldn't), we need setup a simple web proxy. There are many different web servers you can use for this, we like and recommend Nginx. +By default, Lemur runs on port 8000. Even if you change this, under normal conditions you won't be able to bind to port 80. To get around this (and to avoid running Lemur as a privileged user, which you shouldn't), we need to set up a simple web proxy. There are many different web servers you can use for this, we like and recommend Nginx. Proxying with Nginx From c62bcd1456bc35198a5895588e6ab042d0213fe5 Mon Sep 17 00:00:00 2001 From: sirferl Date: Mon, 7 Jan 2019 10:02:37 +0100 Subject: [PATCH 061/357] repaired several lint errors --- lemur/plugins/lemur_adcs/plugin.py | 68 ++++++++++++++---------------- 1 file changed, 32 insertions(+), 36 deletions(-) diff --git a/lemur/plugins/lemur_adcs/plugin.py b/lemur/plugins/lemur_adcs/plugin.py index 48a3e85b..31dba7b2 100644 --- a/lemur/plugins/lemur_adcs/plugin.py +++ b/lemur/plugins/lemur_adcs/plugin.py @@ -1,12 +1,11 @@ from lemur.plugins.bases import IssuerPlugin, SourcePlugin import requests -import datetime import lemur_adcs as ADCS from certsrv import Certsrv -import ssl from OpenSSL import crypto from flask import current_app + class ADCSIssuerPlugin(IssuerPlugin): title = 'ADCS' slug = 'adcs-issuer' @@ -27,36 +26,37 @@ class ADCSIssuerPlugin(IssuerPlugin): Creates an authority, this authority is then used by Lemur to allow a user to specify which Certificate Authority they want to sign their certificate. - + :param options: :return: """ + adcs_root = current_app.config.get('ADCS_ROOT') + adcs_issuing = current_app.config.get('ADCS_ISSUING') role = {'username': '', 'password': '', 'name': 'adcs'} - return constants.ADCS_ROOT, constants.ADCS_ISSUING, [role] + return adcs_root, adcs_issuing, [role] def create_certificate(self, csr, issuer_options): adcs_server = current_app.config.get('ADCS_SERVER') adcs_user = current_app.config.get('ADCS_USER') adcs_pwd = current_app.config.get('ADCS_PWD') adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD') - ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method = adcs_auth_method) + adcs_template = current_app.config.get('ADCS_TEMPLATE') + ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method) current_app.logger.info("Requesting CSR: {0}".format(csr)) current_app.logger.info("Issuer options: {0}".format(issuer_options)) - cert, req_id = ca_server.get_cert(csr, ADCS_TEMPLATE, encoding='b64').decode('utf-8').replace('\r\n', '\n') + cert, req_id = ca_server.get_cert(csr, adcs_template, encoding='b64').decode('utf-8').replace('\r\n', '\n') chain = ca_server.get_ca_cert(encoding='b64').decode('utf-8').replace('\r\n', '\n') return cert, chain, req_id - + def revoke_certificate(self, certificate, comments): - # requests.put('a third party') - raise NotImplementedError('Not implemented\n', self,certificate, comments) - + raise NotImplementedError('Not implemented\n', self, certificate, comments) + def get_ordered_certificate(self, order_id): - # requests.get('already existing certificate') - raise NotImplementedError('Not implemented\n',self, order_id) - + raise NotImplementedError('Not implemented\n', self, order_id) + def canceled_ordered_certificate(self, pending_cert, **kwargs): - # requests.put('cancel an order that has yet to be issued') - raise NotImplementedError('Not implemented\n',self, pending_cert, **kwargs) + raise NotImplementedError('Not implemented\n', self, pending_cert, **kwargs) + class ADCSSourcePlugin(SourcePlugin): title = 'ADCS' @@ -67,54 +67,50 @@ class ADCSSourcePlugin(SourcePlugin): author = 'sirferl' author_url = 'https://github.com/sirferl/lemur' options = [ - { + { 'name': 'dummy', 'type': 'str', 'required': False, 'validation': '/^[0-9]{12,12}$/', 'helpMessage': 'Just to prevent error' } - ] - - def get_certificates(self,options, **kwargs): + + def get_certificates(self, options, **kwargs): adcs_server = current_app.config.get('ADCS_SERVER') adcs_user = current_app.config.get('ADCS_USER') adcs_pwd = current_app.config.get('ADCS_PWD') adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD') adcs_start = current_app.config.get('ADCS_START') adcs_stop = current_app.config.get('ADCS_STOP') - ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method = adcs_auth_method) + ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method) out_certlist = [] - for id in range(adcs_start,adcs_stop): - try: + for id in range(adcs_start, adcs_stop): + try: cert = ca_server.get_existing_cert(id, encoding='b64').decode('utf-8').replace('\r\n', '\n') except Exception as err: if '{0}'.format(err).find("CERTSRV_E_PROPERTY_EMPTY"): - #this error indicates end of certificate list(?), so we stop + # this error indicates end of certificate list(?), so we stop break else: # We do nothing in case there is no certificate returned with the current id for other reasons current_app.logger.info("Error with id {0}: {1}".format(id, err)) - else: - #we have a certificate + else: + # we have a certificate pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, cert) - #loop through extensions to see if we find "TLS Web Server Authentication" - for e_id in range(0,pubkey.get_extension_count()-1): + # loop through extensions to see if we find "TLS Web Server Authentication" + for e_id in range(0, pubkey.get_extension_count() - 1): try: extension = '{0}'.format(pubkey.get_extension(e_id)) - except: + except Exception: extensionn = '' - if extension.find("TLS Web Server Authentication") != -1: - out_certlist.append ( { + if extension.find("TLS Web Server Authentication") != -1: + out_certlist.append({ 'name': format(pubkey.get_subject().CN), - 'body' : cert}) + 'body': cert}) break - return out_certlist - - def get_endpoints(self, options, **kwargs): + def get_endpoints(self, options, **kwargs): # There are no endpoints in the ADCS - raise NotImplementedError('Not implemented\n',self, options, **kwargs) - + raise NotImplementedError('Not implemented\n', self, options, **kwargs) From a43476bc8702f56ac4261beff09fa8ca59a1f42d Mon Sep 17 00:00:00 2001 From: sirferl Date: Mon, 7 Jan 2019 11:04:27 +0100 Subject: [PATCH 062/357] minor errors after lint fix --- lemur/plugins/lemur_adcs/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_adcs/plugin.py b/lemur/plugins/lemur_adcs/plugin.py index 31dba7b2..db068eb3 100644 --- a/lemur/plugins/lemur_adcs/plugin.py +++ b/lemur/plugins/lemur_adcs/plugin.py @@ -1,6 +1,6 @@ from lemur.plugins.bases import IssuerPlugin, SourcePlugin import requests -import lemur_adcs as ADCS +from lemur.plugins import lemur_adcs as ADCS from certsrv import Certsrv from OpenSSL import crypto from flask import current_app @@ -9,7 +9,7 @@ from flask import current_app class ADCSIssuerPlugin(IssuerPlugin): title = 'ADCS' slug = 'adcs-issuer' - description = 'Enables the creation of certificates by ADCS (Active Direcory Certificate Services)' + description = 'Enables the creation of certificates by ADCS (Active Directory Certificate Services)' version = ADCS.VERSION author = 'sirferl' From af88ad0f0da9d2dcee00f4455faf2b345594d905 Mon Sep 17 00:00:00 2001 From: sirferl Date: Mon, 7 Jan 2019 11:35:56 +0100 Subject: [PATCH 063/357] changed broken kombu ref. from 4.2.2 to 4.2.1 because travis build fails --- requirements-docs.txt | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index 80f38e5f..9c3cef5e 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -49,7 +49,7 @@ jinja2==2.10 jmespath==0.9.3 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.2.2 +kombu==4.2.1 # 4.2.2 was broken sirferl lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 diff --git a/requirements.txt b/requirements.txt index e88bcb90..c1658efc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -47,7 +47,7 @@ jinja2==2.10 jmespath==0.9.3 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.2.2 # via celery +kombu==4.2.1 # via celery - 4.2.2. was removed sirferl lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako From faa91ef2a71aac12cbf68910e172dd7beec96ad5 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 8 Jan 2019 09:47:46 -0800 Subject: [PATCH 064/357] Update requirements with Kombu fix --- requirements-dev.txt | 16 +++++++++------- requirements-docs.txt | 40 ++++++++++++++++++++-------------------- requirements-tests.txt | 10 +++++----- requirements.in | 1 + requirements.txt | 24 ++++++++++++------------ 5 files changed, 47 insertions(+), 44 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7b427b20..e9e47ed5 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -8,18 +8,19 @@ aspy.yaml==1.1.1 # via pre-commit bleach==3.0.2 # via readme-renderer cached-property==1.5.1 # via pre-commit certifi==2018.11.29 # via requests -cfgv==1.1.0 # via pre-commit +cfgv==1.4.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.1.7 # via pre-commit +identify==1.1.8 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.7 # via pre-commit +importlib-metadata==0.8 # via pre-commit +importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 -pkginfo==1.4.2 # via twine -pre-commit==1.12.0 +pkginfo==1.5.0 # via twine +pre-commit==1.13.0 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer @@ -29,8 +30,9 @@ requests-toolbelt==0.8.0 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.28.1 # via twine +tqdm==4.29.0 # via twine twine==1.12.1 urllib3==1.24.1 # via requests -virtualenv==16.1.0 # via pre-commit +virtualenv==16.2.0 # via pre-commit webencodings==0.5.1 # via bleach +zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 3f036915..bb1fe767 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,21 +4,21 @@ # # pip-compile --no-index --output-file requirements-docs.txt requirements-docs.in # -acme==0.29.1 +acme==0.30.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.5 amqp==2.3.2 aniso8601==4.0.1 -arrow==0.12.1 +arrow==0.13.0 asn1crypto==0.24.0 asyncpool==1.0 babel==2.6.0 # via sphinx -bcrypt==3.1.4 +bcrypt==3.1.5 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.60 -botocore==1.12.60 +boto3==1.9.75 +botocore==1.12.75 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 @@ -35,13 +35,13 @@ flask-cors==3.0.7 flask-mail==0.9.1 flask-migrate==2.3.1 flask-principal==0.4.0 -flask-restful==0.3.6 +flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.3.2 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 -idna==2.7 +idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 @@ -49,12 +49,12 @@ jinja2==2.10 jmespath==0.9.3 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.2.2 +kombu==4.2.1 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 marshmallow-sqlalchemy==0.15.0 -marshmallow==2.16.3 +marshmallow==2.17.0 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==18.0 # via sphinx @@ -62,35 +62,35 @@ paramiko==2.4.2 pbr==5.1.1 pem==18.2.0 psycopg2==2.7.6.1 -pyasn1-modules==0.2.2 -pyasn1==0.4.4 +pyasn1-modules==0.2.3 +pyasn1==0.4.5 pycparser==2.19 pygments==2.3.1 # via sphinx -pyjwt==1.7.0 +pyjwt==1.7.1 pynacl==1.3.0 pyopenssl==18.0.0 pyparsing==2.3.0 # via packaging pyrfc3339==1.1 python-dateutil==2.7.5 python-editor==1.0.3 -pytz==2018.7 +pytz==2018.9 pyyaml==3.13 -raven[flask]==6.9.0 +raven[flask]==6.10.0 redis==2.10.6 requests-toolbelt==0.8.0 -requests[security]==2.20.1 +requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 -six==1.11.0 +six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.2 -sphinx==1.8.2 +sphinx==1.8.3 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx -sqlalchemy-utils==0.33.9 -sqlalchemy==1.2.14 +sqlalchemy-utils==0.33.10 +sqlalchemy==1.2.15 tabulate==0.8.2 urllib3==1.24.1 -vine==1.1.4 +vine==1.2.0 werkzeug==0.14.1 xmltodict==0.11.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 59c626f7..a11de6ec 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.2.1 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.67 # via moto +boto3==1.9.75 # via moto boto==2.49.0 # via moto -botocore==1.12.67 # via boto3, moto, s3transfer +botocore==1.12.75 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests @@ -34,7 +34,7 @@ jsondiff==1.1.1 # via moto jsonpickle==1.0 # via aws-xray-sdk markupsafe==1.1.0 # via jinja2 mock==2.0.0 # via moto -more-itertools==4.3.0 # via pytest +more-itertools==5.0.0 # via pytest moto==1.3.7 nose==1.3.7 pbr==5.1.1 # via mock @@ -46,10 +46,10 @@ pycryptodome==3.7.2 # via python-jose pyflakes==2.0.0 pytest-flask==0.14.0 pytest-mock==1.10.0 -pytest==4.0.2 +pytest==4.1.0 python-dateutil==2.7.5 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto -pytz==2018.7 # via moto +pytz==2018.9 # via moto pyyaml==3.13 # via pyaml requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses diff --git a/requirements.in b/requirements.in index 9824650b..e427c9a2 100644 --- a/requirements.in +++ b/requirements.in @@ -25,6 +25,7 @@ future gunicorn inflection jinja2 +kombu<=4.2.2 # Kombu 4.2.2 breaks requirements lockfile marshmallow-sqlalchemy marshmallow diff --git a/requirements.txt b/requirements.txt index 7ee9a167..e3918631 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,19 +4,19 @@ # # pip-compile --no-index --output-file requirements.txt requirements.in # -acme==0.29.1 +acme==0.30.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.5 # via flask-migrate amqp==2.3.2 # via kombu aniso8601==4.0.1 # via flask-restful -arrow==0.12.1 +arrow==0.13.0 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.5 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.67 -botocore==1.12.67 +boto3==1.9.75 +botocore==1.12.75 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 # via bcrypt, cryptography, pynacl @@ -46,20 +46,20 @@ jinja2==2.10 jmespath==0.9.3 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.2.2 # via celery +kombu==4.2.1 lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako marshmallow-sqlalchemy==0.15.0 -marshmallow==2.16.3 +marshmallow==2.17.0 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 pbr==5.1.1 # via mock pem==18.2.0 psycopg2==2.7.6.1 -pyasn1-modules==0.2.2 # via python-ldap -pyasn1==0.4.4 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap +pyasn1-modules==0.2.3 # via python-ldap +pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap pycparser==2.19 # via cffi pyjwt==1.7.1 pynacl==1.3.0 # via paramiko @@ -68,19 +68,19 @@ pyrfc3339==1.1 # via acme python-dateutil==2.7.5 # via alembic, arrow, botocore python-editor==1.0.3 # via alembic python-ldap==3.1.0 -pytz==2018.7 # via acme, celery, flask-restful, pyrfc3339 +pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 pyyaml==3.13 # via cloudflare -raven[flask]==6.9.0 +raven[flask]==6.10.0 redis==2.10.6 requests-toolbelt==0.8.0 # via acme requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 # via boto3 six==1.12.0 -sqlalchemy-utils==0.33.9 +sqlalchemy-utils==0.33.10 sqlalchemy==1.2.15 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.2 urllib3==1.24.1 # via botocore, requests -vine==1.1.4 # via amqp +vine==1.2.0 # via amqp werkzeug==0.14.1 # via flask xmltodict==0.11.0 From c95fde702376cd99d8cdb4d8b1bbaf89f0913666 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 8 Jan 2019 09:55:53 -0800 Subject: [PATCH 065/357] Better fix for kombu is to unpin it and modify makefile --- Makefile | 2 +- requirements-docs.txt | 2 +- requirements.in | 1 - requirements.txt | 2 +- 4 files changed, 3 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 19a69236..f859f554 100644 --- a/Makefile +++ b/Makefile @@ -113,10 +113,10 @@ endif @echo "--> Updating Python requirements" pip install --upgrade pip pip install --upgrade pip-tools + pip-compile --output-file requirements.txt requirements.in -U --no-index pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-index - pip-compile --output-file requirements.txt requirements.in -U --no-index @echo "--> Done updating Python requirements" @echo "--> Removing python-ldap from requirements-docs.txt" grep -v "python-ldap" requirements-docs.txt > tempreqs && mv tempreqs requirements-docs.txt diff --git a/requirements-docs.txt b/requirements-docs.txt index bb1fe767..19ebb0ea 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -49,7 +49,7 @@ jinja2==2.10 jmespath==0.9.3 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.2.1 +kombu==4.2.2.post1 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 diff --git a/requirements.in b/requirements.in index e427c9a2..9824650b 100644 --- a/requirements.in +++ b/requirements.in @@ -25,7 +25,6 @@ future gunicorn inflection jinja2 -kombu<=4.2.2 # Kombu 4.2.2 breaks requirements lockfile marshmallow-sqlalchemy marshmallow diff --git a/requirements.txt b/requirements.txt index e3918631..59871284 100644 --- a/requirements.txt +++ b/requirements.txt @@ -46,7 +46,7 @@ jinja2==2.10 jmespath==0.9.3 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.2.1 +kombu==4.2.2.post1 # via celery lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako From a1ca61d81365b6bfb6a26973ff7fe73337cea32c Mon Sep 17 00:00:00 2001 From: sirferl Date: Wed, 9 Jan 2019 09:50:26 +0100 Subject: [PATCH 066/357] changed a too long comment --- lemur/plugins/lemur_adcs/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_adcs/plugin.py b/lemur/plugins/lemur_adcs/plugin.py index db068eb3..b7698474 100644 --- a/lemur/plugins/lemur_adcs/plugin.py +++ b/lemur/plugins/lemur_adcs/plugin.py @@ -93,7 +93,7 @@ class ADCSSourcePlugin(SourcePlugin): # this error indicates end of certificate list(?), so we stop break else: - # We do nothing in case there is no certificate returned with the current id for other reasons + # We do nothing in case there is no certificate returned for other reasons current_app.logger.info("Error with id {0}: {1}".format(id, err)) else: # we have a certificate From 3ee12cc50be99bb9e6b9b074f606468d9e2aa742 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 10 Jan 2019 09:26:15 -0800 Subject: [PATCH 067/357] Update requirements --- requirements-dev.txt | 7 +++---- requirements-docs.txt | 6 +++--- requirements-tests.txt | 8 ++++---- requirements.txt | 6 +++--- 4 files changed, 13 insertions(+), 14 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e9e47ed5..21156588 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,8 +5,7 @@ # pip-compile --no-index --output-file requirements-dev.txt requirements-dev.in # aspy.yaml==1.1.1 # via pre-commit -bleach==3.0.2 # via readme-renderer -cached-property==1.5.1 # via pre-commit +bleach==3.1.0 # via readme-renderer certifi==2018.11.29 # via requests cfgv==1.4.0 # via pre-commit chardet==3.0.4 # via requests @@ -19,8 +18,8 @@ importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 -pkginfo==1.5.0 # via twine -pre-commit==1.13.0 +pkginfo==1.5.0.1 # via twine +pre-commit==1.14.0 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer diff --git a/requirements-docs.txt b/requirements-docs.txt index 19ebb0ea..f3182456 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -9,7 +9,7 @@ alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.5 amqp==2.3.2 -aniso8601==4.0.1 +aniso8601==4.1.0 arrow==0.13.0 asn1crypto==0.24.0 asyncpool==1.0 @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.5 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.75 -botocore==1.12.75 +boto3==1.9.76 +botocore==1.12.76 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 diff --git a/requirements-tests.txt b/requirements-tests.txt index a11de6ec..490d74d1 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.2.1 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.75 # via moto +boto3==1.9.76 # via moto boto==2.49.0 # via moto -botocore==1.12.75 # via boto3, moto, s3transfer +botocore==1.12.76 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests @@ -38,7 +38,7 @@ more-itertools==5.0.0 # via pytest moto==1.3.7 nose==1.3.7 pbr==5.1.1 # via mock -pluggy==0.8.0 # via pytest +pluggy==0.8.1 # via pytest py==1.7.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi @@ -60,5 +60,5 @@ text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests websocket-client==0.54.0 # via docker werkzeug==0.14.1 # via flask, moto, pytest-flask -wrapt==1.10.11 # via aws-xray-sdk +wrapt==1.11.0 # via aws-xray-sdk xmltodict==0.11.0 # via moto diff --git a/requirements.txt b/requirements.txt index 59871284..bc72db0a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,15 +8,15 @@ acme==0.30.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.5 # via flask-migrate amqp==2.3.2 # via kombu -aniso8601==4.0.1 # via flask-restful +aniso8601==4.1.0 # via flask-restful arrow==0.13.0 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.5 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.75 -botocore==1.12.75 +boto3==1.9.76 +botocore==1.12.76 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 # via bcrypt, cryptography, pynacl From 0e02e6da799af16120b9ddb54c7542a68aa4365f Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Fri, 11 Jan 2019 11:13:43 -0800 Subject: [PATCH 068/357] Be more forgiving to throttling --- lemur/plugins/lemur_aws/elb.py | 18 +++++++++--------- lemur/plugins/lemur_aws/iam.py | 8 ++++---- lemur/plugins/lemur_aws/sts.py | 16 +++++++++++++--- 3 files changed, 26 insertions(+), 16 deletions(-) diff --git a/lemur/plugins/lemur_aws/elb.py b/lemur/plugins/lemur_aws/elb.py index 4c4ce97f..b4391dd8 100644 --- a/lemur/plugins/lemur_aws/elb.py +++ b/lemur/plugins/lemur_aws/elb.py @@ -95,7 +95,7 @@ def get_all_elbs_v2(**kwargs): @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs): """ Get a listener ARN from an endpoint. @@ -113,7 +113,7 @@ def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs): @sts_client('elb') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def get_elbs(**kwargs): """ Fetches one page elb objects for a given account and region. @@ -123,7 +123,7 @@ def get_elbs(**kwargs): @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def get_elbs_v2(**kwargs): """ Fetches one page of elb objects for a given account and region. @@ -136,7 +136,7 @@ def get_elbs_v2(**kwargs): @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def describe_listeners_v2(**kwargs): """ Fetches one page of listener objects for a given elb arn. @@ -149,7 +149,7 @@ def describe_listeners_v2(**kwargs): @sts_client('elb') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): """ Fetching all policies currently associated with an ELB. @@ -161,7 +161,7 @@ def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def describe_ssl_policies_v2(policy_names, **kwargs): """ Fetching all policies currently associated with an ELB. @@ -173,7 +173,7 @@ def describe_ssl_policies_v2(policy_names, **kwargs): @sts_client('elb') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def describe_load_balancer_types(policies, **kwargs): """ Describe the policies with policy details. @@ -185,7 +185,7 @@ def describe_load_balancer_types(policies, **kwargs): @sts_client('elb') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def attach_certificate(name, port, certificate_id, **kwargs): """ Attaches a certificate to a listener, throws exception @@ -205,7 +205,7 @@ def attach_certificate(name, port, certificate_id, **kwargs): @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=1000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def attach_certificate_v2(listener_arn, port, certificates, **kwargs): """ Attaches a certificate to a listener, throws exception diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index b2a07798..7010c909 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -52,7 +52,7 @@ def create_arn_from_cert(account_number, region, certificate_name): @sts_client('iam') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=100) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): """ Upload a certificate to AWS @@ -95,7 +95,7 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): @sts_client('iam') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=100) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def delete_cert(cert_name, **kwargs): """ Delete a certificate from AWS @@ -112,7 +112,7 @@ def delete_cert(cert_name, **kwargs): @sts_client('iam') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=100) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def get_certificate(name, **kwargs): """ Retrieves an SSL certificate. @@ -126,7 +126,7 @@ def get_certificate(name, **kwargs): @sts_client('iam') -@retry(retry_on_exception=retry_throttled, stop_max_attempt_number=7, wait_exponential_multiplier=100) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000) def get_certificates(**kwargs): """ Fetches one page of certificate objects for a given account. diff --git a/lemur/plugins/lemur_aws/sts.py b/lemur/plugins/lemur_aws/sts.py index 001ea2c8..6253ad7a 100644 --- a/lemur/plugins/lemur_aws/sts.py +++ b/lemur/plugins/lemur_aws/sts.py @@ -9,14 +9,22 @@ from functools import wraps import boto3 +from botocore.config import Config from flask import current_app +config = Config( + retries=dict( + max_attempts=20 + ) +) + + def sts_client(service, service_type='client'): def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): - sts = boto3.client('sts') + sts = boto3.client('sts', config=config) arn = 'arn:aws:iam::{0}:role/{1}'.format( kwargs.pop('account_number'), current_app.config.get('LEMUR_INSTANCE_PROFILE', 'Lemur') @@ -31,7 +39,8 @@ def sts_client(service, service_type='client'): region_name=kwargs.pop('region', 'us-east-1'), aws_access_key_id=role['Credentials']['AccessKeyId'], aws_secret_access_key=role['Credentials']['SecretAccessKey'], - aws_session_token=role['Credentials']['SessionToken'] + aws_session_token=role['Credentials']['SessionToken'], + config=config ) kwargs['client'] = client elif service_type == 'resource': @@ -40,7 +49,8 @@ def sts_client(service, service_type='client'): region_name=kwargs.pop('region', 'us-east-1'), aws_access_key_id=role['Credentials']['AccessKeyId'], aws_secret_access_key=role['Credentials']['SecretAccessKey'], - aws_session_token=role['Credentials']['SessionToken'] + aws_session_token=role['Credentials']['SessionToken'], + config=config ) kwargs['resource'] = resource return f(*args, **kwargs) From c4e6e7c59bae61855ea1e0ea514fc8da5566b962 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 14 Jan 2019 08:02:27 -0800 Subject: [PATCH 069/357] Optimize DB cert filtering --- lemur/certificates/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index c9a2fa24..e4503324 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -307,7 +307,7 @@ def render(args): if filt: terms = filt.split(';') - term = '%{0}%'.format(terms[1]) + term = '{0}%'.format(terms[1]) # Exact matches for quotes. Only applies to name, issuer, and cn if terms[1].startswith('"') and terms[1].endswith('"'): term = terms[1][1:-1] From 31a86687e72e02883a9d80abe345b5b5b64d2667 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 14 Jan 2019 09:20:02 -0800 Subject: [PATCH 070/357] Reduce the expense of joins --- lemur/certificates/service.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index e4503324..1b203260 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -20,7 +20,6 @@ from lemur.common.utils import generate_private_key, truthiness from lemur.destinations.models import Destination from lemur.domains.models import Domain from lemur.extensions import metrics, sentry, signals -from lemur.models import certificate_associations from lemur.notifications.models import Notification from lemur.pending_certificates.models import PendingCertificate from lemur.plugins.base import plugins @@ -341,13 +340,13 @@ def render(args): elif 'id' in terms: query = query.filter(Certificate.id == cast(terms[1], Integer)) elif 'name' in terms: - query = query.outerjoin(certificate_associations).outerjoin(Domain).filter( + query = query.filter( or_( Certificate.name.ilike(term), - Domain.name.ilike(term), + Certificate.domains.any(Domain.name.ilike(term)), Certificate.cn.ilike(term), ) - ).group_by(Certificate.id) + ) else: query = database.filter(query, Certificate, terms) From 3567a768d5a0e281d41d7c3f8bdb73bf3c6a7728 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 14 Jan 2019 13:35:55 -0800 Subject: [PATCH 071/357] Compare certificate hashes to determine if Lemur already has a synced certificate --- lemur/common/utils.py | 11 +++++++++++ lemur/sources/service.py | 5 +++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 62e59d69..0504c958 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -12,6 +12,7 @@ import string import sqlalchemy from cryptography import x509 from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import rsa, ec from cryptography.hazmat.primitives.serialization import load_pem_private_key from flask_restful.reqparse import RequestParser @@ -226,3 +227,13 @@ def truthiness(s): """If input string resembles something truthy then return True, else False.""" return s.lower() in ('true', 'yes', 'on', 't', '1') + + +def find_matching_certificates_by_hash(cert, matching_certs): + """Given a Cryptography-formatted certificate cert, and Lemur-formatted certificates (matching_certs), + determine if any of the certificate hashes match and return the matches.""" + matching = [] + for c in matching_certs: + if parse_certificate(c.body).fingerprint(hashes.SHA256()) == cert.fingerprint(hashes.SHA256()): + matching.append(c) + return matching diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 227f1bce..55d2ee62 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -17,7 +17,7 @@ from lemur.endpoints import service as endpoint_service from lemur.destinations import service as destination_service from lemur.certificates.schemas import CertificateUploadInputSchema -from lemur.common.utils import parse_certificate +from lemur.common.utils import find_matching_certificates_by_hash, parse_certificate from lemur.common.defaults import serial from lemur.plugins.base import plugins @@ -126,7 +126,8 @@ def sync_certificates(source, user): if not exists: cert = parse_certificate(certificate['body']) - exists = certificate_service.get_by_serial(serial(cert)) + matching_serials = certificate_service.get_by_serial(serial(cert)) + exists = find_matching_certificates_by_hash(cert, matching_serials) if not certificate.get('owner'): certificate['owner'] = user.email From d3284a4006a87940ca485adea33957c116176c02 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 14 Jan 2019 17:52:06 -0800 Subject: [PATCH 072/357] adjusting the query to filter authorities based on matching CN --- lemur/authorities/service.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lemur/authorities/service.py b/lemur/authorities/service.py index 024cb42a..41c381e3 100644 --- a/lemur/authorities/service.py +++ b/lemur/authorities/service.py @@ -15,6 +15,7 @@ from lemur import database from lemur.common.utils import truthiness from lemur.extensions import metrics from lemur.authorities.models import Authority +from lemur.certificates.models import Certificate from lemur.roles import service as role_service from lemur.certificates.service import upload @@ -179,7 +180,12 @@ def render(args): if 'active' in filt: query = query.filter(Authority.active == truthiness(terms[1])) elif 'cn' in filt: - query = query.join(Authority.active == truthiness(terms[1])) + term = '%{0}%'.format(terms[1]) + sub_query = database.session_query(Certificate.root_authority_id) \ + .filter(Certificate.cn.ilike(term)) \ + .subquery() + + query = query.filter(Authority.id.in_(sub_query)) else: query = database.filter(query, Authority, terms) From 7f88c24e8374f669ba1e12c3d5ff06892de3b04c Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 17 Jan 2019 14:56:04 -0800 Subject: [PATCH 073/357] Fix LetsEncrypt Dyn flow for duplicate CN/SAN --- lemur/common/utils.py | 11 +++++++++++ lemur/plugins/lemur_acme/dyn.py | 8 ++++++-- lemur/sources/service.py | 5 +++-- 3 files changed, 20 insertions(+), 4 deletions(-) diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 62e59d69..f26f07df 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -12,6 +12,7 @@ import string import sqlalchemy from cryptography import x509 from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import rsa, ec from cryptography.hazmat.primitives.serialization import load_pem_private_key from flask_restful.reqparse import RequestParser @@ -226,3 +227,13 @@ def truthiness(s): """If input string resembles something truthy then return True, else False.""" return s.lower() in ('true', 'yes', 'on', 't', '1') + + +def find_matching_certificates_by_hash(cert, matching_certs): + """Given a Cryptography-formatted certificate cert, and Lemur-formatted certificates (matching_certs), + determine if any of the certificate hashes match and return the matches.""" + matching = [] + for c in matching_certs: + if parse_certificate(c).fingerprint(hashes.SHA256()) == cert.body.fingerprint(hashes.SHA256()): + matching.append(c) + return matching diff --git a/lemur/plugins/lemur_acme/dyn.py b/lemur/plugins/lemur_acme/dyn.py index 9bab3a65..5d419f7f 100644 --- a/lemur/plugins/lemur_acme/dyn.py +++ b/lemur/plugins/lemur_acme/dyn.py @@ -5,7 +5,7 @@ import dns.exception import dns.name import dns.query import dns.resolver -from dyn.tm.errors import DynectCreateError +from dyn.tm.errors import DynectCreateError, DynectGetError from dyn.tm.session import DynectSession from dyn.tm.zones import Node, Zone, get_all_zones from flask import current_app @@ -119,7 +119,11 @@ def delete_txt_record(change_id, account_number, domain, token): zone = Zone(zone_name) node = Node(zone_name, fqdn) - all_txt_records = node.get_all_records_by_type('TXT') + try: + all_txt_records = node.get_all_records_by_type('TXT') + except DynectGetError: + # No Text Records remain or host is not in the zone anymore because all records have been deleted. + return for txt_record in all_txt_records: if txt_record.txtdata == ("{}".format(token)): current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn)) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 227f1bce..55d2ee62 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -17,7 +17,7 @@ from lemur.endpoints import service as endpoint_service from lemur.destinations import service as destination_service from lemur.certificates.schemas import CertificateUploadInputSchema -from lemur.common.utils import parse_certificate +from lemur.common.utils import find_matching_certificates_by_hash, parse_certificate from lemur.common.defaults import serial from lemur.plugins.base import plugins @@ -126,7 +126,8 @@ def sync_certificates(source, user): if not exists: cert = parse_certificate(certificate['body']) - exists = certificate_service.get_by_serial(serial(cert)) + matching_serials = certificate_service.get_by_serial(serial(cert)) + exists = find_matching_certificates_by_hash(cert, matching_serials) if not certificate.get('owner'): certificate['owner'] = user.email From d689f5cda3aad42ff7b6363c40332160ca3a395a Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 17 Jan 2019 14:59:57 -0800 Subject: [PATCH 074/357] Fix LetsEncrypt for duplicate CN/SAN --- requirements-dev.txt | 5 ++--- requirements-docs.txt | 18 +++++++++--------- requirements-tests.txt | 8 ++++---- requirements.txt | 16 ++++++++-------- 4 files changed, 23 insertions(+), 24 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 21156588..c1f55581 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,12 +14,11 @@ flake8==3.5.0 identify==1.1.8 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit -importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.14.0 +pre-commit==1.14.2 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer @@ -29,7 +28,7 @@ requests-toolbelt==0.8.0 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.29.0 # via twine +tqdm==4.29.1 # via twine twine==1.12.1 urllib3==1.24.1 # via requests virtualenv==16.2.0 # via pre-commit diff --git a/requirements-docs.txt b/requirements-docs.txt index f3182456..a7df5395 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -7,18 +7,18 @@ acme==0.30.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.0.5 -amqp==2.3.2 +alembic==1.0.6 +amqp==2.4.0 aniso8601==4.1.0 arrow==0.13.0 asn1crypto==0.24.0 asyncpool==1.0 babel==2.6.0 # via sphinx -bcrypt==3.1.5 +bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.76 -botocore==1.12.76 +boto3==1.9.80 +botocore==1.12.80 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 @@ -54,7 +54,7 @@ lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 marshmallow-sqlalchemy==0.15.0 -marshmallow==2.17.0 +marshmallow==2.18.0 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==18.0 # via sphinx @@ -69,7 +69,7 @@ pygments==2.3.1 # via sphinx pyjwt==1.7.1 pynacl==1.3.0 pyopenssl==18.0.0 -pyparsing==2.3.0 # via packaging +pyparsing==2.3.1 # via packaging pyrfc3339==1.1 python-dateutil==2.7.5 python-editor==1.0.3 @@ -87,8 +87,8 @@ sphinx-rtd-theme==0.4.2 sphinx==1.8.3 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx -sqlalchemy-utils==0.33.10 -sqlalchemy==1.2.15 +sqlalchemy-utils==0.33.11 +sqlalchemy==1.2.16 tabulate==0.8.2 urllib3==1.24.1 vine==1.2.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 490d74d1..2d54dce6 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.2.1 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.76 # via moto +boto3==1.9.80 # via moto boto==2.49.0 # via moto -botocore==1.12.76 # via boto3, moto, s3transfer +botocore==1.12.80 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests @@ -18,7 +18,7 @@ click==7.0 # via flask coverage==4.5.2 cryptography==2.4.2 # via moto docker-pycreds==0.4.0 # via docker -docker==3.6.0 # via moto +docker==3.7.0 # via moto docutils==0.14 # via botocore ecdsa==0.13 # via python-jose factory-boy==2.11.1 @@ -46,7 +46,7 @@ pycryptodome==3.7.2 # via python-jose pyflakes==2.0.0 pytest-flask==0.14.0 pytest-mock==1.10.0 -pytest==4.1.0 +pytest==4.1.1 python-dateutil==2.7.5 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto diff --git a/requirements.txt b/requirements.txt index bc72db0a..79268c8a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,17 +6,17 @@ # acme==0.30.0 alembic-autogenerate-enums==0.0.2 -alembic==1.0.5 # via flask-migrate -amqp==2.3.2 # via kombu +alembic==1.0.6 # via flask-migrate +amqp==2.4.0 # via kombu aniso8601==4.1.0 # via flask-restful arrow==0.13.0 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 -bcrypt==3.1.5 # via flask-bcrypt, paramiko +bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.76 -botocore==1.12.76 +boto3==1.9.80 +botocore==1.12.80 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 # via bcrypt, cryptography, pynacl @@ -51,7 +51,7 @@ lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako marshmallow-sqlalchemy==0.15.0 -marshmallow==2.17.0 +marshmallow==2.18.0 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 @@ -77,8 +77,8 @@ requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 # via boto3 six==1.12.0 -sqlalchemy-utils==0.33.10 -sqlalchemy==1.2.15 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy-utils==0.33.11 +sqlalchemy==1.2.16 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.2 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp From cb35f19d6ca4b0d84ed2f96e1e28ca482556411d Mon Sep 17 00:00:00 2001 From: Ronald Moesbergen Date: Mon, 21 Jan 2019 10:22:03 +0100 Subject: [PATCH 075/357] Add 'delete_cert' to enum log_type in logs table --- lemur/logs/models.py | 2 +- lemur/migrations/versions/9f79024fe67b_.py | 22 ++++++++++++++++++++++ 2 files changed, 23 insertions(+), 1 deletion(-) create mode 100644 lemur/migrations/versions/9f79024fe67b_.py diff --git a/lemur/logs/models.py b/lemur/logs/models.py index d4239e59..9f982c24 100644 --- a/lemur/logs/models.py +++ b/lemur/logs/models.py @@ -18,6 +18,6 @@ class Log(db.Model): __tablename__ = 'logs' id = Column(Integer, primary_key=True) certificate_id = Column(Integer, ForeignKey('certificates.id')) - log_type = Column(Enum('key_view', 'create_cert', 'update_cert', 'revoke_cert', name='log_type'), nullable=False) + log_type = Column(Enum('key_view', 'create_cert', 'update_cert', 'revoke_cert', 'delete_cert', name='log_type'), nullable=False) logged_at = Column(ArrowType(), PassiveDefault(func.now()), nullable=False) user_id = Column(Integer, ForeignKey('users.id'), nullable=False) diff --git a/lemur/migrations/versions/9f79024fe67b_.py b/lemur/migrations/versions/9f79024fe67b_.py new file mode 100644 index 00000000..ad22d5f3 --- /dev/null +++ b/lemur/migrations/versions/9f79024fe67b_.py @@ -0,0 +1,22 @@ +""" Add delete_cert to log_type enum + +Revision ID: 9f79024fe67b +Revises: ee827d1e1974 +Create Date: 2019-01-03 15:36:59.181911 + +""" + +# revision identifiers, used by Alembic. +revision = '9f79024fe67b' +down_revision = 'ee827d1e1974' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + op.sync_enum_values('public', 'log_type', ['create_cert', 'key_view', 'revoke_cert', 'update_cert'], ['create_cert', 'delete_cert', 'key_view', 'revoke_cert', 'update_cert']) + + +def downgrade(): + op.sync_enum_values('public', 'log_type', ['create_cert', 'delete_cert', 'key_view', 'revoke_cert', 'update_cert'], ['create_cert', 'key_view', 'revoke_cert', 'update_cert']) From 4c4fbf3e48d3644ac2869d7d5e1688248fb6f597 Mon Sep 17 00:00:00 2001 From: Ronald Moesbergen Date: Mon, 21 Jan 2019 10:25:28 +0100 Subject: [PATCH 076/357] Implement certificates delete API call by marking a cert as 'deleted' in the database. Only certificates that have expired can be deleted. --- lemur/certificates/views.py | 46 ++++++++++++++++++++++++++++++++ lemur/tests/conftest.py | 12 ++++++++- lemur/tests/factories.py | 7 ++++- lemur/tests/test_certificates.py | 19 ++++++++++--- 4 files changed, 78 insertions(+), 6 deletions(-) diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 54c60924..948c44d6 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -6,6 +6,7 @@ .. moduleauthor:: Kevin Glisson """ import base64 +import arrow from builtins import str from flask import Blueprint, make_response, jsonify, g @@ -660,6 +661,51 @@ class Certificates(AuthenticatedResource): log_service.create(g.current_user, 'update_cert', certificate=cert) return cert + def delete(self, certificate_id, data=None): + """ + .. http:delete:: /certificates/1 + + Delete a certificate + + **Example request**: + + .. sourcecode:: http + + DELETE /certificates/1 HTTP/1.1 + Host: example.com + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + + :reqheader Authorization: OAuth token to authenticate + :statuscode 204: no error + :statuscode 403: unauthenticated + :statusoode 404: certificate not found + + """ + cert = service.get(certificate_id) + + if not cert: + return dict(message="Cannot find specified certificate"), 404 + + # allow creators + if g.current_user != cert.user: + owner_role = role_service.get_by_name(cert.owner) + permission = CertificatePermission(owner_role, [x.name for x in cert.roles]) + + if not permission.can(): + return dict(message='You are not authorized to delete this certificate'), 403 + + if arrow.get(cert.not_after) > arrow.utcnow(): + return dict(message='Certificate is still valid, only expired certificates can be deleted'), 412 + + service.update(certificate_id, deleted=True) + log_service.create(g.current_user, 'delete_cert', certificate=cert) + return '', 204 + class NotificationCertificatesList(AuthenticatedResource): """ Defines the 'certificates' endpoint """ diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index 9a48eb94..3f5fa2d8 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -15,7 +15,8 @@ from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY from .factories import ApiKeyFactory, AuthorityFactory, NotificationFactory, DestinationFactory, \ CertificateFactory, UserFactory, RoleFactory, SourceFactory, EndpointFactory, \ - RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory, CryptoAuthorityFactory + RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory, InvalidCertificateFactory, \ + CryptoAuthorityFactory def pytest_runtest_setup(item): @@ -168,6 +169,15 @@ def pending_certificate(session): return p +@pytest.fixture +def invalid_certificate(session): + u = UserFactory() + a = AsyncAuthorityFactory() + i = InvalidCertificateFactory(user=u, authority=a) + session.commit() + return i + + @pytest.fixture def admin_user(session): u = UserFactory() diff --git a/lemur/tests/factories.py b/lemur/tests/factories.py index 3717c64d..a4af3d43 100644 --- a/lemur/tests/factories.py +++ b/lemur/tests/factories.py @@ -20,7 +20,7 @@ from lemur.policies.models import RotationPolicy from lemur.api_keys.models import ApiKey from .vectors import SAN_CERT_STR, SAN_CERT_KEY, CSR_STR, INTERMEDIATE_CERT_STR, ROOTCA_CERT_STR, INTERMEDIATE_KEY, \ - WILDCARD_CERT_KEY + WILDCARD_CERT_KEY, INVALID_CERT_STR class BaseFactory(SQLAlchemyModelFactory): @@ -137,6 +137,11 @@ class CACertificateFactory(CertificateFactory): private_key = INTERMEDIATE_KEY +class InvalidCertificateFactory(CertificateFactory): + body = INVALID_CERT_STR + private_key = '' + + class AuthorityFactory(BaseFactory): """Authority factory.""" name = Sequence(lambda n: 'authority{0}'.format(n)) diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index a1df1c0d..4d412563 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -647,15 +647,26 @@ def test_certificate_put_with_data(client, certificate, issuer_plugin): @pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 412), + (VALID_ADMIN_API_TOKEN, 412), + ('', 401) ]) def test_certificate_delete(client, token, status): assert client.delete(api.url_for(Certificates, certificate_id=1), headers=token).status_code == status +@pytest.mark.parametrize("token,status", [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 204), + (VALID_ADMIN_API_TOKEN, 204), + ('', 401) +]) +def test_invalid_certificate_delete(client, invalid_certificate, token, status): + assert client.delete( + api.url_for(Certificates, certificate_id=invalid_certificate.id), headers=token).status_code == status + + @pytest.mark.parametrize("token,status", [ (VALID_USER_HEADER_TOKEN, 405), (VALID_ADMIN_HEADER_TOKEN, 405), From 4b893ab5b49b622a1634ef54e7323b219390bf0f Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Fri, 5 Jan 2018 13:08:07 +0200 Subject: [PATCH 077/357] Expose full certificate RFC 4514 Distinguished Name string Using rfc4514_string() method added in cryptography version 2.5. --- lemur/certificates/models.py | 4 ++++ lemur/certificates/schemas.py | 1 + lemur/static/app/angular/certificates/view/view.tpl.html | 2 ++ lemur/tests/test_certificates.py | 6 ++++++ requirements.txt | 2 +- 5 files changed, 14 insertions(+), 1 deletion(-) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index 3eaba746..34305cc2 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -227,6 +227,10 @@ class Certificate(db.Model): def location(self): return defaults.location(self.parsed_cert) + @property + def distinguished_name(self): + return self.parsed_cert.subject.rfc4514_string() + @property def key_type(self): if isinstance(self.parsed_cert.public_key(), rsa.RSAPublicKey): diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index 6b457086..946bd541 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -206,6 +206,7 @@ class CertificateOutputSchema(LemurOutputSchema): cn = fields.String() common_name = fields.String(attribute='cn') + distinguished_name = fields.String() not_after = fields.DateTime() validity_end = ArrowDateTime(attribute='not_after') diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index ba17ffa6..28b4e08e 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -83,6 +83,8 @@
+
Distinguished Name
+
{{ certificate.distinguishedName }}
Certificate Authority
{{ certificate.authority ? certificate.authority.name : "Imported" }} ({{ certificate.issuer }})
Serial
diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index a1df1c0d..db2d27cf 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -619,6 +619,12 @@ def test_certificate_get_body(client): response_body = client.get(api.url_for(Certificates, certificate_id=1), headers=VALID_USER_HEADER_TOKEN).json assert response_body['serial'] == '211983098819107449768450703123665283596' assert response_body['serialHex'] == '9F7A75B39DAE4C3F9524C68B06DA6A0C' + assert response_body['distinguishedName'] == ('CN=LemurTrust Unittests Class 1 CA 2018,' + 'O=LemurTrust Enterprises Ltd,' + 'OU=Unittesting Operations Center,' + 'C=EE,' + 'ST=N/A,' + 'L=Earth') @pytest.mark.parametrize("token,status", [ diff --git a/requirements.txt b/requirements.txt index 79268c8a..d700de42 100644 --- a/requirements.txt +++ b/requirements.txt @@ -23,7 +23,7 @@ cffi==1.11.5 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.1.0 -cryptography==2.4.2 +cryptography==2.5 dnspython3==1.15.0 dnspython==1.15.0 # via dnspython3 docutils==0.14 # via botocore From a9724e73830be5c6ee00f6cd81bf2aff6865b071 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 24 Jan 2019 17:23:40 -0800 Subject: [PATCH 078/357] Resolving the 2 years error from UI during cert creation: Though a CA would accept two year validity, we were getting error for being beyond 2 years. This is because our current conversion is just current date plus 2 years, 1/25/2019 + 2 years ==> 1/25/2019 This is more strictly seen two years and 1 day extra, violating the 2 year's limit. --- lemur/common/missing.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lemur/common/missing.py b/lemur/common/missing.py index a4bbba77..508019b2 100644 --- a/lemur/common/missing.py +++ b/lemur/common/missing.py @@ -16,6 +16,9 @@ def convert_validity_years(data): data['validity_start'] = now.isoformat() end = now.replace(years=+int(data['validity_years'])) + # some CAs want to see exactly two years validity, and not two years plus one day, as is the case currently + # 1/25/2019 + 2 years ==> 1/25/2019 (two years and 1 day extra, violating the 2 year's limit) + end = end.replace(days=-1) if not current_app.config.get('LEMUR_ALLOW_WEEKEND_EXPIRATION', True): if is_weekend(end): end = end.replace(days=-2) From c47fa0f9a23689f0fce6e02364f12288bbf7c7db Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 24 Jan 2019 17:52:22 -0800 Subject: [PATCH 079/357] adjusting the tests to reflect on the new full year convert limit! --- lemur/tests/test_missing.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lemur/tests/test_missing.py b/lemur/tests/test_missing.py index 4f2c20c6..80a7df48 100644 --- a/lemur/tests/test_missing.py +++ b/lemur/tests/test_missing.py @@ -6,12 +6,12 @@ from freezegun import freeze_time def test_convert_validity_years(session): from lemur.common.missing import convert_validity_years - with freeze_time("2016-01-01"): + with freeze_time("2016-01-02"): data = convert_validity_years(dict(validity_years=2)) assert data['validity_start'] == arrow.utcnow().isoformat() - assert data['validity_end'] == arrow.utcnow().replace(years=+2).isoformat() + assert data['validity_end'] == arrow.utcnow().replace(years=+2, days=-1).isoformat() - with freeze_time("2015-01-10"): + with freeze_time("2015-01-11"): data = convert_validity_years(dict(validity_years=1)) - assert data['validity_end'] == arrow.utcnow().replace(years=+1, days=-2).isoformat() + assert data['validity_end'] == arrow.utcnow().replace(years=+1, days=-3).isoformat() From b4d1b80e04c6ead46635977fc9d21161718eb6e5 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 29 Jan 2019 10:13:44 -0500 Subject: [PATCH 080/357] Adding support for cfssl auth mode signing --- lemur/plugins/lemur_cfssl/plugin.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/lemur/plugins/lemur_cfssl/plugin.py b/lemur/plugins/lemur_cfssl/plugin.py index 030f290a..ead633bc 100644 --- a/lemur/plugins/lemur_cfssl/plugin.py +++ b/lemur/plugins/lemur_cfssl/plugin.py @@ -10,6 +10,9 @@ import json import requests +import base64 +import hmac +import hashlib from flask import current_app @@ -48,6 +51,21 @@ class CfsslIssuerPlugin(IssuerPlugin): data = {'certificate_request': csr} data = json.dumps(data) + try: + hex_key = current_app.config.get('CFSSL_KEY') + key=bytes.fromhex(hex_key) + except: + #unable to find CFSSL_KEY in config, continue using normal sign method + pass + else: + data=data.encode() + + token = base64.b64encode(hmac.new(key,data,digestmod=hashlib.sha256).digest()) + data = base64.b64encode(data) + + data = json.dumps({'token': token.decode('utf-8'), 'request': data.decode('utf-8')}) + + url = "{0}{1}".format(current_app.config.get('CFSSL_URL'), '/api/v1/cfssl/authsign') response = self.session.post(url, data=data.encode(encoding='utf_8', errors='strict')) if response.status_code > 399: metrics.send('cfssl_create_certificate_failure', 'counter', 1) From 254a3079f2ceb7408b42d3ec9626cbf69d4abb7e Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 29 Jan 2019 11:01:55 -0500 Subject: [PATCH 081/357] fix whitespace --- lemur/plugins/lemur_cfssl/plugin.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lemur/plugins/lemur_cfssl/plugin.py b/lemur/plugins/lemur_cfssl/plugin.py index ead633bc..d2abc2aa 100644 --- a/lemur/plugins/lemur_cfssl/plugin.py +++ b/lemur/plugins/lemur_cfssl/plugin.py @@ -53,14 +53,14 @@ class CfsslIssuerPlugin(IssuerPlugin): try: hex_key = current_app.config.get('CFSSL_KEY') - key=bytes.fromhex(hex_key) + key = bytes.fromhex(hex_key) except: #unable to find CFSSL_KEY in config, continue using normal sign method pass else: - data=data.encode() + data = data.encode() - token = base64.b64encode(hmac.new(key,data,digestmod=hashlib.sha256).digest()) + token = base64.b64encode(hmac.new(key, data, digestmod=hashlib.sha256).digest()) data = base64.b64encode(data) data = json.dumps({'token': token.decode('utf-8'), 'request': data.decode('utf-8')}) From c68a9cf80acd651ad18fe48a6c7d0e0a43ef7f29 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 29 Jan 2019 11:10:56 -0500 Subject: [PATCH 082/357] fixing linting issues --- lemur/plugins/lemur_cfssl/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_cfssl/plugin.py b/lemur/plugins/lemur_cfssl/plugin.py index d2abc2aa..4bfefc85 100644 --- a/lemur/plugins/lemur_cfssl/plugin.py +++ b/lemur/plugins/lemur_cfssl/plugin.py @@ -54,8 +54,8 @@ class CfsslIssuerPlugin(IssuerPlugin): try: hex_key = current_app.config.get('CFSSL_KEY') key = bytes.fromhex(hex_key) - except: - #unable to find CFSSL_KEY in config, continue using normal sign method + except (ValueError, NameError): + # unable to find CFSSL_KEY in config, continue using normal sign method pass else: data = data.encode() From d2317acfc550b35a1ad40b449c37d66e5f258cf8 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 29 Jan 2019 15:17:40 -0800 Subject: [PATCH 083/357] allowing create_user with noninteractive PW;updating reqs --- lemur/manage.py | 19 +++++++++++-------- requirements-dev.txt | 11 ++++++----- requirements-docs.txt | 24 ++++++++++++------------ requirements-tests.txt | 18 +++++++++--------- requirements.txt | 24 ++++++++++++------------ 5 files changed, 50 insertions(+), 46 deletions(-) diff --git a/lemur/manage.py b/lemur/manage.py index b972e8a5..184b9aa6 100755 --- a/lemur/manage.py +++ b/lemur/manage.py @@ -273,10 +273,11 @@ class CreateUser(Command): Option('-u', '--username', dest='username', required=True), Option('-e', '--email', dest='email', required=True), Option('-a', '--active', dest='active', default=True), - Option('-r', '--roles', dest='roles', action='append', default=[]) + Option('-r', '--roles', dest='roles', action='append', default=[]), + Option('-p', '--password', dest='password', default=None) ) - def run(self, username, email, active, roles): + def run(self, username, email, active, roles, password): role_objs = [] for r in roles: role_obj = role_service.get_by_name(r) @@ -286,14 +287,16 @@ class CreateUser(Command): sys.stderr.write("[!] Cannot find role {0}\n".format(r)) sys.exit(1) - password1 = prompt_pass("Password") - password2 = prompt_pass("Confirm Password") + if not password: + password1 = prompt_pass("Password") + password2 = prompt_pass("Confirm Password") + password = password1 - if password1 != password2: - sys.stderr.write("[!] Passwords do not match!\n") - sys.exit(1) + if password1 != password2: + sys.stderr.write("[!] Passwords do not match!\n") + sys.exit(1) - user_service.create(username, password1, email, active, None, role_objs) + user_service.create(username, password, email, active, None, role_objs) sys.stdout.write("[+] Created new user: {0}\n".format(username)) diff --git a/requirements-dev.txt b/requirements-dev.txt index c1f55581..ac35f3e9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,16 +4,17 @@ # # pip-compile --no-index --output-file requirements-dev.txt requirements-dev.in # -aspy.yaml==1.1.1 # via pre-commit +aspy.yaml==1.1.2 # via pre-commit bleach==3.1.0 # via readme-renderer certifi==2018.11.29 # via requests cfgv==1.4.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.1.8 # via pre-commit +identify==1.2.1 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit +importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 @@ -24,13 +25,13 @@ pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer pyyaml==3.13 # via aspy.yaml, pre-commit readme-renderer==24.0 # via twine -requests-toolbelt==0.8.0 # via twine +requests-toolbelt==0.9.0 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.29.1 # via twine +tqdm==4.30.0 # via twine twine==1.12.1 urllib3==1.24.1 # via requests -virtualenv==16.2.0 # via pre-commit +virtualenv==16.3.0 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index a7df5395..15085766 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,10 +4,10 @@ # # pip-compile --no-index --output-file requirements-docs.txt requirements-docs.in # -acme==0.30.0 +acme==0.30.2 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.0.6 +alembic==1.0.7 amqp==2.4.0 aniso8601==4.1.0 arrow==0.13.0 @@ -17,15 +17,15 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.80 -botocore==1.12.80 +boto3==1.9.86 +botocore==1.12.86 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 chardet==3.0.4 click==7.0 cloudflare==2.1.0 -cryptography==2.4.2 +cryptography==2.5 dnspython3==1.15.0 dnspython==1.15.0 docutils==0.14 @@ -57,18 +57,18 @@ marshmallow-sqlalchemy==0.15.0 marshmallow==2.18.0 mock==2.0.0 ndg-httpsclient==0.5.1 -packaging==18.0 # via sphinx +packaging==19.0 # via sphinx paramiko==2.4.2 pbr==5.1.1 pem==18.2.0 -psycopg2==2.7.6.1 -pyasn1-modules==0.2.3 +psycopg2==2.7.7 +pyasn1-modules==0.2.4 pyasn1==0.4.5 pycparser==2.19 pygments==2.3.1 # via sphinx pyjwt==1.7.1 pynacl==1.3.0 -pyopenssl==18.0.0 +pyopenssl==19.0.0 pyparsing==2.3.1 # via packaging pyrfc3339==1.1 python-dateutil==2.7.5 @@ -77,7 +77,7 @@ pytz==2018.9 pyyaml==3.13 raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.8.0 +requests-toolbelt==0.9.0 requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 @@ -88,8 +88,8 @@ sphinx==1.8.3 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.16 -tabulate==0.8.2 +sqlalchemy==1.2.17 +tabulate==0.8.3 urllib3==1.24.1 vine==1.2.0 werkzeug==0.14.1 diff --git a/requirements-tests.txt b/requirements-tests.txt index 2d54dce6..c326e951 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,30 +8,30 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.2.1 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.80 # via moto +boto3==1.9.86 # via moto boto==2.49.0 # via moto -botocore==1.12.80 # via boto3, moto, s3transfer +botocore==1.12.86 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.2 -cryptography==2.4.2 # via moto +cryptography==2.5 # via moto docker-pycreds==0.4.0 # via docker docker==3.7.0 # via moto docutils==0.14 # via botocore ecdsa==0.13 # via python-jose factory-boy==2.11.1 -faker==1.0.1 +faker==1.0.2 flask==1.0.2 # via pytest-flask freezegun==0.3.11 future==0.17.1 # via python-jose -idna==2.8 # via cryptography, requests +idna==2.8 # via requests itsdangerous==1.1.0 # via flask jinja2==2.10 # via flask, moto jmespath==0.9.3 # via boto3, botocore jsondiff==1.1.1 # via moto -jsonpickle==1.0 # via aws-xray-sdk +jsonpickle==1.1 # via aws-xray-sdk markupsafe==1.1.0 # via jinja2 mock==2.0.0 # via moto more-itertools==5.0.0 # via pytest @@ -42,8 +42,8 @@ pluggy==0.8.1 # via pytest py==1.7.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi -pycryptodome==3.7.2 # via python-jose -pyflakes==2.0.0 +pycryptodome==3.7.3 # via python-jose +pyflakes==2.1.0 pytest-flask==0.14.0 pytest-mock==1.10.0 pytest==4.1.1 @@ -60,5 +60,5 @@ text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests websocket-client==0.54.0 # via docker werkzeug==0.14.1 # via flask, moto, pytest-flask -wrapt==1.11.0 # via aws-xray-sdk +wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.11.0 # via moto diff --git a/requirements.txt b/requirements.txt index 79268c8a..c595e509 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile --no-index --output-file requirements.txt requirements.in # -acme==0.30.0 +acme==0.30.2 alembic-autogenerate-enums==0.0.2 -alembic==1.0.6 # via flask-migrate +alembic==1.0.7 # via flask-migrate amqp==2.4.0 # via kombu aniso8601==4.1.0 # via flask-restful arrow==0.13.0 @@ -15,15 +15,15 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.80 -botocore==1.12.80 +boto3==1.9.86 +botocore==1.12.86 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.1.0 -cryptography==2.4.2 +cryptography==2.5 dnspython3==1.15.0 dnspython==1.15.0 # via dnspython3 docutils==0.14 # via botocore @@ -39,7 +39,7 @@ flask-sqlalchemy==2.3.2 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 -idna==2.8 # via cryptography, requests +idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask jinja2==2.10 @@ -57,13 +57,13 @@ ndg-httpsclient==0.5.1 paramiko==2.4.2 pbr==5.1.1 # via mock pem==18.2.0 -psycopg2==2.7.6.1 -pyasn1-modules==0.2.3 # via python-ldap +psycopg2==2.7.7 +pyasn1-modules==0.2.4 # via python-ldap pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap pycparser==2.19 # via cffi pyjwt==1.7.1 pynacl==1.3.0 # via paramiko -pyopenssl==18.0.0 +pyopenssl==19.0.0 pyrfc3339==1.1 # via acme python-dateutil==2.7.5 # via alembic, arrow, botocore python-editor==1.0.3 # via alembic @@ -72,14 +72,14 @@ pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 pyyaml==3.13 # via cloudflare raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.8.0 # via acme +requests-toolbelt==0.9.0 # via acme requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.16 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils -tabulate==0.8.2 +sqlalchemy==1.2.17 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp werkzeug==0.14.1 # via flask From 48ad20facaba794a8a14c249af5fb83f206b7006 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 29 Jan 2019 16:17:08 -0800 Subject: [PATCH 084/357] moving the 2 year validity issue to the Verisign plugin, and address it there --- lemur/common/missing.py | 4 +--- lemur/plugins/lemur_verisign/plugin.py | 15 ++++++++++++--- lemur/tests/test_missing.py | 8 ++++---- 3 files changed, 17 insertions(+), 10 deletions(-) diff --git a/lemur/common/missing.py b/lemur/common/missing.py index 508019b2..5c7dffac 100644 --- a/lemur/common/missing.py +++ b/lemur/common/missing.py @@ -16,9 +16,7 @@ def convert_validity_years(data): data['validity_start'] = now.isoformat() end = now.replace(years=+int(data['validity_years'])) - # some CAs want to see exactly two years validity, and not two years plus one day, as is the case currently - # 1/25/2019 + 2 years ==> 1/25/2019 (two years and 1 day extra, violating the 2 year's limit) - end = end.replace(days=-1) + if not current_app.config.get('LEMUR_ALLOW_WEEKEND_EXPIRATION', True): if is_weekend(end): end = end.replace(days=-2) diff --git a/lemur/plugins/lemur_verisign/plugin.py b/lemur/plugins/lemur_verisign/plugin.py index 3e672a43..3f16f997 100644 --- a/lemur/plugins/lemur_verisign/plugin.py +++ b/lemur/plugins/lemur_verisign/plugin.py @@ -111,10 +111,19 @@ def process_options(options): data['subject_alt_names'] = ",".join(get_additional_names(options)) + if options.get('validity_end') > arrow.utcnow().replace(years=2): + raise Exception("Verisign issued certificates cannot exceed two years in validity") + if options.get('validity_end'): - period = get_default_issuance(options) - data['specificEndDate'] = options['validity_end'].format("MM/DD/YYYY") - data['validityPeriod'] = period + # VeriSign (Symantec) only accepts strictly smaller than 2 year end date + if options.get('validity_end') < arrow.utcnow().replace(years=2).replace(days=-1): + period = get_default_issuance(options) + data['specificEndDate'] = options['validity_end'].format("MM/DD/YYYY") + data['validityPeriod'] = period + else: + # allowing Symantec website setting the end date, given the validity period + data['validityPeriod'] = str(get_default_issuance(options)) + options.pop('validity_end', None) elif options.get('validity_years'): if options['validity_years'] in [1, 2]: diff --git a/lemur/tests/test_missing.py b/lemur/tests/test_missing.py index 80a7df48..4f2c20c6 100644 --- a/lemur/tests/test_missing.py +++ b/lemur/tests/test_missing.py @@ -6,12 +6,12 @@ from freezegun import freeze_time def test_convert_validity_years(session): from lemur.common.missing import convert_validity_years - with freeze_time("2016-01-02"): + with freeze_time("2016-01-01"): data = convert_validity_years(dict(validity_years=2)) assert data['validity_start'] == arrow.utcnow().isoformat() - assert data['validity_end'] == arrow.utcnow().replace(years=+2, days=-1).isoformat() + assert data['validity_end'] == arrow.utcnow().replace(years=+2).isoformat() - with freeze_time("2015-01-11"): + with freeze_time("2015-01-10"): data = convert_validity_years(dict(validity_years=1)) - assert data['validity_end'] == arrow.utcnow().replace(years=+1, days=-3).isoformat() + assert data['validity_end'] == arrow.utcnow().replace(years=+1, days=-2).isoformat() From e24a94d798bd69a0110b1e5ddf532192621ca754 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Wed, 26 Dec 2018 19:49:56 +0200 Subject: [PATCH 085/357] Enforce that PEM strings (certs, keys, CSR) are internally passed as str, not bytes This was already true in most places but not 100%, leading to lots of redundant checks and conversions. --- lemur/certificates/service.py | 10 +--------- lemur/common/utils.py | 17 +++++++---------- lemur/plugins/lemur_aws/iam.py | 3 +-- lemur/plugins/lemur_cryptography/plugin.py | 11 ++++------- lemur/plugins/lemur_csr/plugin.py | 11 +++-------- lemur/plugins/lemur_java/plugin.py | 18 +++++------------- lemur/plugins/lemur_openssl/plugin.py | 11 +++-------- lemur/tests/conftest.py | 7 +++---- 8 files changed, 27 insertions(+), 61 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 1b203260..0f37d70e 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -221,11 +221,6 @@ def upload(**kwargs): else: kwargs['roles'] = roles - if kwargs.get('private_key'): - private_key = kwargs['private_key'] - if not isinstance(private_key, bytes): - kwargs['private_key'] = private_key.encode('utf-8') - cert = Certificate(**kwargs) cert.authority = kwargs.get('authority') cert = database.create(cert) @@ -432,10 +427,7 @@ def create_csr(**csr_config): encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, # would like to use PKCS8 but AWS ELBs don't like it encryption_algorithm=serialization.NoEncryption() - ) - - if isinstance(private_key, bytes): - private_key = private_key.decode('utf-8') + ).decode('utf-8') csr = request.public_bytes( encoding=serialization.Encoding.PEM diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 0504c958..32271e89 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -48,24 +48,22 @@ def parse_certificate(body): :param body: :return: """ - if isinstance(body, str): - body = body.encode('utf-8') + assert isinstance(body, str) - return x509.load_pem_x509_certificate(body, default_backend()) + return x509.load_pem_x509_certificate(body.encode('utf-8'), default_backend()) def parse_private_key(private_key): """ Parses a PEM-format private key (RSA, DSA, ECDSA or any other supported algorithm). - Raises ValueError for an invalid string. + Raises ValueError for an invalid string. Raises AssertionError when passed value is not str-type. :param private_key: String containing PEM private key """ - if isinstance(private_key, str): - private_key = private_key.encode('utf8') + assert isinstance(private_key, str) - return load_pem_private_key(private_key, password=None, backend=default_backend()) + return load_pem_private_key(private_key.encode('utf8'), password=None, backend=default_backend()) def parse_csr(csr): @@ -75,10 +73,9 @@ def parse_csr(csr): :param csr: :return: """ - if isinstance(csr, str): - csr = csr.encode('utf-8') + assert isinstance(csr, str) - return x509.load_pem_x509_csr(csr, default_backend()) + return x509.load_pem_x509_csr(csr.encode('utf-8'), default_backend()) def get_authority_key(body): diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index 7010c909..49816c2b 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -64,6 +64,7 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): :param path: :return: """ + assert isinstance(private_key, str) client = kwargs.pop('client') if not path or path == '/': @@ -72,8 +73,6 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): name = name + '-' + path.strip('/') try: - if isinstance(private_key, bytes): - private_key = private_key.decode("utf-8") if cert_chain: return client.upload_server_certificate( Path=path, diff --git a/lemur/plugins/lemur_cryptography/plugin.py b/lemur/plugins/lemur_cryptography/plugin.py index fe9d7bb3..97060391 100644 --- a/lemur/plugins/lemur_cryptography/plugin.py +++ b/lemur/plugins/lemur_cryptography/plugin.py @@ -14,6 +14,7 @@ from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization +from lemur.common.utils import parse_private_key from lemur.plugins.bases import IssuerPlugin from lemur.plugins import lemur_cryptography as cryptography_issuer @@ -40,7 +41,8 @@ def issue_certificate(csr, options, private_key=None): if options.get("authority"): # Issue certificate signed by an existing lemur_certificates authority issuer_subject = options['authority'].authority_certificate.subject - issuer_private_key = options['authority'].authority_certificate.private_key + assert private_key is None, "Private would be ignored, authority key used instead" + private_key = options['authority'].authority_certificate.private_key chain_cert_pem = options['authority'].authority_certificate.body authority_key_identifier_public = options['authority'].authority_certificate.public_key authority_key_identifier_subject = x509.SubjectKeyIdentifier.from_public_key(authority_key_identifier_public) @@ -52,7 +54,6 @@ def issue_certificate(csr, options, private_key=None): else: # Issue certificate that is self-signed (new lemur_certificates root authority) issuer_subject = csr.subject - issuer_private_key = private_key chain_cert_pem = "" authority_key_identifier_public = csr.public_key() authority_key_identifier_subject = None @@ -112,11 +113,7 @@ def issue_certificate(csr, options, private_key=None): # FIXME: Not implemented in lemur/schemas.py yet https://github.com/Netflix/lemur/issues/662 pass - private_key = serialization.load_pem_private_key( - bytes(str(issuer_private_key).encode('utf-8')), - password=None, - backend=default_backend() - ) + private_key = parse_private_key(private_key) cert = builder.sign(private_key, hashes.SHA256(), default_backend()) cert_pem = cert.public_bytes( diff --git a/lemur/plugins/lemur_csr/plugin.py b/lemur/plugins/lemur_csr/plugin.py index e06035d1..13f42084 100644 --- a/lemur/plugins/lemur_csr/plugin.py +++ b/lemur/plugins/lemur_csr/plugin.py @@ -38,14 +38,9 @@ def create_csr(cert, chain, csr_tmp, key): :param csr_tmp: :param key: """ - if isinstance(cert, bytes): - cert = cert.decode('utf-8') - - if isinstance(chain, bytes): - chain = chain.decode('utf-8') - - if isinstance(key, bytes): - key = key.decode('utf-8') + assert isinstance(cert, str) + assert isinstance(chain, str) + assert isinstance(key, str) with mktempfile() as key_tmp: with open(key_tmp, 'w') as f: diff --git a/lemur/plugins/lemur_java/plugin.py b/lemur/plugins/lemur_java/plugin.py index 151794da..5aab5342 100644 --- a/lemur/plugins/lemur_java/plugin.py +++ b/lemur/plugins/lemur_java/plugin.py @@ -59,11 +59,8 @@ def split_chain(chain): def create_truststore(cert, chain, jks_tmp, alias, passphrase): - if isinstance(cert, bytes): - cert = cert.decode('utf-8') - - if isinstance(chain, bytes): - chain = chain.decode('utf-8') + assert isinstance(cert, str) + assert isinstance(chain, str) with mktempfile() as cert_tmp: with open(cert_tmp, 'w') as f: @@ -98,14 +95,9 @@ def create_truststore(cert, chain, jks_tmp, alias, passphrase): def create_keystore(cert, chain, jks_tmp, key, alias, passphrase): - if isinstance(cert, bytes): - cert = cert.decode('utf-8') - - if isinstance(chain, bytes): - chain = chain.decode('utf-8') - - if isinstance(key, bytes): - key = key.decode('utf-8') + assert isinstance(cert, str) + assert isinstance(chain, str) + assert isinstance(key, str) # Create PKCS12 keystore from private key and public certificate with mktempfile() as cert_tmp: diff --git a/lemur/plugins/lemur_openssl/plugin.py b/lemur/plugins/lemur_openssl/plugin.py index d50b4e43..9ddce925 100644 --- a/lemur/plugins/lemur_openssl/plugin.py +++ b/lemur/plugins/lemur_openssl/plugin.py @@ -44,14 +44,9 @@ def create_pkcs12(cert, chain, p12_tmp, key, alias, passphrase): :param alias: :param passphrase: """ - if isinstance(cert, bytes): - cert = cert.decode('utf-8') - - if isinstance(chain, bytes): - chain = chain.decode('utf-8') - - if isinstance(key, bytes): - key = key.decode('utf-8') + assert isinstance(cert, str) + assert isinstance(chain, str) + assert isinstance(key, str) with mktempfile() as key_tmp: with open(key_tmp, 'w') as f: diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index 9a48eb94..3790358e 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -3,12 +3,11 @@ import os import datetime import pytest from cryptography import x509 -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.serialization import load_pem_private_key from flask import current_app from flask_principal import identity_changed, Identity from lemur import create_app +from lemur.common.utils import parse_private_key from lemur.database import db as _db from lemur.auth.service import create_token from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY @@ -235,12 +234,12 @@ def logged_in_admin(session, app): @pytest.fixture def private_key(): - return load_pem_private_key(SAN_CERT_KEY.encode(), password=None, backend=default_backend()) + return parse_private_key(SAN_CERT_KEY) @pytest.fixture def issuer_private_key(): - return load_pem_private_key(INTERMEDIATE_KEY.encode(), password=None, backend=default_backend()) + return parse_private_key(INTERMEDIATE_KEY) @pytest.fixture From 44a060b15932a160b1ee8a676a8af8ae40e976b1 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 4 Feb 2019 15:36:39 -0800 Subject: [PATCH 086/357] adding support for creating a source while creating a new dst, while the destination is from AWS --- lemur/destinations/service.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/lemur/destinations/service.py b/lemur/destinations/service.py index ed6fcb0f..705f45e7 100644 --- a/lemur/destinations/service.py +++ b/lemur/destinations/service.py @@ -6,11 +6,13 @@ .. moduleauthor:: Kevin Glisson """ from sqlalchemy import func +from flask import current_app from lemur import database from lemur.models import certificate_destination_associations from lemur.destinations.models import Destination from lemur.certificates.models import Certificate +from lemur.sources import service as sources_service def create(label, plugin_name, options, description=None): @@ -28,6 +30,13 @@ def create(label, plugin_name, options, description=None): del option['value']['plugin_object'] destination = Destination(label=label, options=options, plugin_name=plugin_name, description=description) + current_app.logger.info("Destination: %s created", label) + + # add the destination as source, to avoid new destinations that are not in source, as long as an AWS destination + if plugin_name == 'aws-destination': + sources_service.create(label=label, plugin_name=plugin_name, options=options, description=description) + current_app.logger.info("Source: %s created", label) + return database.create(destination) From f249a82d71ac70f146958fa9c1da1d93a911e43d Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 4 Feb 2019 16:10:48 -0800 Subject: [PATCH 087/357] renaming destination to source. --- lemur/destinations/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/destinations/service.py b/lemur/destinations/service.py index 705f45e7..94ca3977 100644 --- a/lemur/destinations/service.py +++ b/lemur/destinations/service.py @@ -34,7 +34,7 @@ def create(label, plugin_name, options, description=None): # add the destination as source, to avoid new destinations that are not in source, as long as an AWS destination if plugin_name == 'aws-destination': - sources_service.create(label=label, plugin_name=plugin_name, options=options, description=description) + sources_service.create(label=label, plugin_name='aws-source', options=options, description=description) current_app.logger.info("Source: %s created", label) return database.create(destination) From 51248c193803727c10e9d4c67de8e465210ee3f2 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Thu, 20 Dec 2018 18:13:59 +0200 Subject: [PATCH 088/357] Use special issuer values and in special cases This way it's easy to find/distinguish selfsigned certificates stored in Lemur. --- lemur/common/defaults.py | 13 +++++++++++-- lemur/common/utils.py | 37 +++++++++++++++++++++++++++++++++++- lemur/tests/conftest.py | 8 ++++++++ lemur/tests/test_defaults.py | 14 +++++++++++++- lemur/tests/test_utils.py | 12 ++++++++++++ lemur/tests/vectors.py | 1 + 6 files changed, 81 insertions(+), 4 deletions(-) diff --git a/lemur/common/defaults.py b/lemur/common/defaults.py index 72e863c1..6b259f6b 100644 --- a/lemur/common/defaults.py +++ b/lemur/common/defaults.py @@ -3,6 +3,8 @@ import unicodedata from cryptography import x509 from flask import current_app + +from lemur.common.utils import is_selfsigned from lemur.extensions import sentry from lemur.constants import SAN_NAMING_TEMPLATE, DEFAULT_NAMING_TEMPLATE @@ -229,15 +231,22 @@ def issuer(cert): """ Gets a sane issuer slug from a given certificate, stripping non-alphanumeric characters. - :param cert: + For self-signed certificates, the special value '' is returned. + If issuer cannot be determined, '' is returned. + + :param cert: Parsed certificate object :return: Issuer slug """ + # If certificate is self-signed, we return a special value -- there really is no distinct "issuer" for it + if is_selfsigned(cert): + return '' + # Try Common Name or fall back to Organization name attrs = (cert.issuer.get_attributes_for_oid(x509.OID_COMMON_NAME) or cert.issuer.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)) if not attrs: current_app.logger.error("Unable to get issuer! Cert serial {:x}".format(cert.serial_number)) - return "Unknown" + return '' return text_to_slug(attrs[0].value, '') diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 32271e89..f3ac5fe7 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -11,9 +11,10 @@ import string import sqlalchemy from cryptography import x509 +from cryptography.exceptions import InvalidSignature, UnsupportedAlgorithm from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes -from cryptography.hazmat.primitives.asymmetric import rsa, ec +from cryptography.hazmat.primitives.asymmetric import rsa, ec, padding from cryptography.hazmat.primitives.serialization import load_pem_private_key from flask_restful.reqparse import RequestParser from sqlalchemy import and_, func @@ -143,6 +144,40 @@ def generate_private_key(key_type): ) +def check_cert_signature(cert, issuer_public_key): + """ + Check a certificate's signature against an issuer public key. + On success, returns None; on failure, raises UnsupportedAlgorithm or InvalidSignature. + """ + if isinstance(issuer_public_key, rsa.RSAPublicKey): + # RSA requires padding, just to make life difficult for us poor developers :( + if cert.signature_algorithm_oid == x509.SignatureAlgorithmOID.RSASSA_PSS: + # In 2005, IETF devised a more secure padding scheme to replace PKCS #1 v1.5. To make sure that + # nobody can easily support or use it, they mandated lots of complicated parameters, unlike any + # other X.509 signature scheme. + # https://tools.ietf.org/html/rfc4056 + raise UnsupportedAlgorithm("RSASSA-PSS not supported") + else: + padder = padding.PKCS1v15() + issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, padder, cert.signature_hash_algorithm) + else: + # EllipticCurvePublicKey or DSAPublicKey + issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, cert.signature_hash_algorithm) + + +def is_selfsigned(cert): + """ + Returns True if the certificate is self-signed. + Returns False for failed verification or unsupported signing algorithm. + """ + try: + check_cert_signature(cert, cert.public_key()) + # If verification was successful, it's self-signed. + return True + except InvalidSignature: + return False + + def is_weekend(date): """ Determines if a given date is on a weekend. diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index 32733e51..b3dad8b2 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -3,6 +3,8 @@ import os import datetime import pytest from cryptography import x509 +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes from flask import current_app from flask_principal import identity_changed, Identity @@ -263,6 +265,12 @@ def cert_builder(private_key): .not_valid_after(datetime.datetime(2040, 1, 1))) +@pytest.fixture +def selfsigned_cert(cert_builder, private_key): + # cert_builder uses the same cert public key as 'private_key' + return cert_builder.sign(private_key, hashes.SHA256(), default_backend()) + + @pytest.fixture(scope='function') def aws_credentials(): os.environ['AWS_ACCESS_KEY_ID'] = 'testing' diff --git a/lemur/tests/test_defaults.py b/lemur/tests/test_defaults.py index ffa19727..da9d6c79 100644 --- a/lemur/tests/test_defaults.py +++ b/lemur/tests/test_defaults.py @@ -81,6 +81,13 @@ def test_create_name(client): datetime(2015, 5, 12, 0, 0, 0), False ) == 'xn--mnchen-3ya.de-VertrauenswurdigAutoritat-20150507-20150512' + assert certificate_name( + 'selfie.example.org', + '', + datetime(2015, 5, 7, 0, 0, 0), + datetime(2025, 5, 12, 13, 37, 0), + False + ) == 'selfie.example.org-selfsigned-20150507-20250512' def test_issuer(client, cert_builder, issuer_private_key): @@ -106,4 +113,9 @@ def test_issuer(client, cert_builder, issuer_private_key): cert = (cert_builder .issuer_name(x509.Name([])) .sign(issuer_private_key, hashes.SHA256(), default_backend())) - assert issuer(cert) == 'Unknown' + assert issuer(cert) == '' + + +def test_issuer_selfsigned(selfsigned_cert): + from lemur.common.defaults import issuer + assert issuer(selfsigned_cert) == '' diff --git a/lemur/tests/test_utils.py b/lemur/tests/test_utils.py index 62d021a4..3e226f0f 100644 --- a/lemur/tests/test_utils.py +++ b/lemur/tests/test_utils.py @@ -1,5 +1,7 @@ import pytest +from lemur.tests.vectors import SAN_CERT, INTERMEDIATE_CERT, ROOTCA_CERT + def test_generate_private_key(): from lemur.common.utils import generate_private_key @@ -71,3 +73,13 @@ KFfxwrO1 -----END CERTIFICATE-----''' authority_key = get_authority_key(test_cert) assert authority_key == 'feacb541be81771293affa412d8dc9f66a3ebb80' + + +def test_is_selfsigned(selfsigned_cert): + from lemur.common.utils import is_selfsigned + + assert is_selfsigned(selfsigned_cert) is True + assert is_selfsigned(SAN_CERT) is False + assert is_selfsigned(INTERMEDIATE_CERT) is False + # Root CA certificates are also technically self-signed + assert is_selfsigned(ROOTCA_CERT) is True diff --git a/lemur/tests/vectors.py b/lemur/tests/vectors.py index 6a836b30..5da37c61 100644 --- a/lemur/tests/vectors.py +++ b/lemur/tests/vectors.py @@ -45,6 +45,7 @@ ssvobJ6Xe2D4cCVjUmsqtFEztMgdqgmlcWyGdUKeXdi7CMoeTb4uO+9qRQq46wYW n7K1z+W0Kp5yhnnPAoOioAP4vjASDx3z3RnLaZvMmcO7YdCIwhE5oGV0 -----END CERTIFICATE----- """ +ROOTCA_CERT = parse_certificate(ROOTCA_CERT_STR) ROOTCA_KEY = """\ -----BEGIN RSA PRIVATE KEY----- MIIEowIBAAKCAQEAvyVpe0tfIzri3l3PYH2r7hW86wKF58GLY+Ua52rEO5E3eXQq From 176f9bfea6f703467676797a9ecaa4da0189e082 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 5 Feb 2019 09:37:04 -0800 Subject: [PATCH 089/357] Updating requirements --- requirements-dev.txt | 4 ++-- requirements-docs.txt | 18 +++++++++--------- requirements-tests.txt | 14 +++++++------- requirements.txt | 16 ++++++++-------- 4 files changed, 26 insertions(+), 26 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index ac35f3e9..29f39314 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -19,13 +19,13 @@ invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.14.2 +pre-commit==1.14.3 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer pyyaml==3.13 # via aspy.yaml, pre-commit readme-renderer==24.0 # via twine -requests-toolbelt==0.9.0 # via twine +requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit diff --git a/requirements-docs.txt b/requirements-docs.txt index 15085766..21dc110c 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -8,7 +8,7 @@ acme==0.30.2 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.7 -amqp==2.4.0 +amqp==2.4.1 aniso8601==4.1.0 arrow==0.13.0 asn1crypto==0.24.0 @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.86 -botocore==1.12.86 +boto3==1.9.87 +botocore==1.12.87 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 @@ -53,13 +53,13 @@ kombu==4.2.2.post1 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 -marshmallow-sqlalchemy==0.15.0 +marshmallow-sqlalchemy==0.16.0 marshmallow==2.18.0 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 -pbr==5.1.1 +pbr==5.1.2 pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 @@ -71,20 +71,20 @@ pynacl==1.3.0 pyopenssl==19.0.0 pyparsing==2.3.1 # via packaging pyrfc3339==1.1 -python-dateutil==2.7.5 -python-editor==1.0.3 +python-dateutil==2.8.0 +python-editor==1.0.4 pytz==2018.9 pyyaml==3.13 raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.9.0 +requests-toolbelt==0.9.1 requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.2 -sphinx==1.8.3 +sphinx==1.8.4 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 diff --git a/requirements-tests.txt b/requirements-tests.txt index c326e951..354f4f1a 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -5,12 +5,12 @@ # pip-compile --no-index --output-file requirements-tests.txt requirements-tests.in # asn1crypto==0.24.0 # via cryptography -atomicwrites==1.2.1 # via pytest +atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.86 # via moto +boto3==1.9.87 # via moto boto==2.49.0 # via moto -botocore==1.12.86 # via boto3, moto, s3transfer +botocore==1.12.87 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests @@ -37,7 +37,7 @@ mock==2.0.0 # via moto more-itertools==5.0.0 # via pytest moto==1.3.7 nose==1.3.7 -pbr==5.1.1 # via mock +pbr==5.1.2 # via mock pluggy==0.8.1 # via pytest py==1.7.0 # via pytest pyaml==18.11.0 # via moto @@ -45,9 +45,9 @@ pycparser==2.19 # via cffi pycryptodome==3.7.3 # via python-jose pyflakes==2.1.0 pytest-flask==0.14.0 -pytest-mock==1.10.0 -pytest==4.1.1 -python-dateutil==2.7.5 # via botocore, faker, freezegun, moto +pytest-mock==1.10.1 +pytest==4.2.0 +python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto pyyaml==3.13 # via pyaml diff --git a/requirements.txt b/requirements.txt index c595e509..cb08b22d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ acme==0.30.2 alembic-autogenerate-enums==0.0.2 alembic==1.0.7 # via flask-migrate -amqp==2.4.0 # via kombu +amqp==2.4.1 # via kombu aniso8601==4.1.0 # via flask-restful arrow==0.13.0 asn1crypto==0.24.0 # via cryptography @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.86 -botocore==1.12.86 +boto3==1.9.87 +botocore==1.12.87 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 # via bcrypt, cryptography, pynacl @@ -50,12 +50,12 @@ kombu==4.2.2.post1 # via celery lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako -marshmallow-sqlalchemy==0.15.0 +marshmallow-sqlalchemy==0.16.0 marshmallow==2.18.0 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 -pbr==5.1.1 # via mock +pbr==5.1.2 # via mock pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 # via python-ldap @@ -65,14 +65,14 @@ pyjwt==1.7.1 pynacl==1.3.0 # via paramiko pyopenssl==19.0.0 pyrfc3339==1.1 # via acme -python-dateutil==2.7.5 # via alembic, arrow, botocore -python-editor==1.0.3 # via alembic +python-dateutil==2.8.0 # via alembic, arrow, botocore +python-editor==1.0.4 # via alembic python-ldap==3.1.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 pyyaml==3.13 # via cloudflare raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.9.0 # via acme +requests-toolbelt==0.9.1 # via acme requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.1.13 # via boto3 From 6d1ef933c45e643643329d8b60dcb6b67bc2e920 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 5 Feb 2019 10:48:52 -0800 Subject: [PATCH 090/357] creating a new celery task to sync sources with destinations. This is as a measure to make sure important new destinations are also present as sources. --- lemur/common/celery.py | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index f2a2f826..308adced 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -20,6 +20,9 @@ from lemur.notifications.messaging import send_pending_failure_notification from lemur.pending_certificates import service as pending_certificate_service from lemur.plugins.base import plugins from lemur.sources.cli import clean, sync, validate_sources +from lemur.destinations import service as destinations_service +from lemur.sources import service as sources_service + if current_app: flask_app = current_app @@ -226,3 +229,19 @@ def sync_source(source): """ current_app.logger.debug("Syncing source {}".format(source)) sync([source]) + + +@celery.task() +def sync_source_destination(): + """ + This celery task will sync destination and source, to make sure all new destinations are also present in source. + Some destinations do not qualify as sources, and hence should be excluded from being added as sources + """ + current_app.logger.debug("Syncing source and destination") + for dst in destinations_service.get_all(): + if dst.plugin_name == 'aws-destination' and not sources_service.get_by_label(dst.label): + sources_service.create(label=dst.label, + plugin_name='aws-source', + options=dst.options, + description=dst.description) + current_app.logger.info("Source: %s added", dst.label) From 70a70663a2f766523bed0a688456aac99d265919 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 7 Feb 2019 09:51:34 -0800 Subject: [PATCH 091/357] updating requirements --- requirements-dev.txt | 5 ++--- requirements-docs.txt | 22 +++++++++++----------- requirements-tests.txt | 16 ++++++++-------- requirements.txt | 20 ++++++++++---------- 4 files changed, 31 insertions(+), 32 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index ac35f3e9..440f932b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -14,18 +14,17 @@ flake8==3.5.0 identify==1.2.1 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit -importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.14.2 +pre-commit==1.14.3 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer pyyaml==3.13 # via aspy.yaml, pre-commit readme-renderer==24.0 # via twine -requests-toolbelt==0.9.0 # via twine +requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit diff --git a/requirements-docs.txt b/requirements-docs.txt index 15085766..194708ed 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -8,7 +8,7 @@ acme==0.30.2 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.7 -amqp==2.4.0 +amqp==2.4.1 aniso8601==4.1.0 arrow==0.13.0 asn1crypto==0.24.0 @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.86 -botocore==1.12.86 +boto3==1.9.89 +botocore==1.12.89 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 @@ -49,17 +49,17 @@ jinja2==2.10 jmespath==0.9.3 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.2.2.post1 +kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 -marshmallow-sqlalchemy==0.15.0 +marshmallow-sqlalchemy==0.16.0 marshmallow==2.18.0 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 -pbr==5.1.1 +pbr==5.1.2 pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 @@ -71,20 +71,20 @@ pynacl==1.3.0 pyopenssl==19.0.0 pyparsing==2.3.1 # via packaging pyrfc3339==1.1 -python-dateutil==2.7.5 -python-editor==1.0.3 +python-dateutil==2.8.0 +python-editor==1.0.4 pytz==2018.9 pyyaml==3.13 raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.9.0 +requests-toolbelt==0.9.1 requests[security]==2.21.0 retrying==1.3.3 -s3transfer==0.1.13 +s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.2 -sphinx==1.8.3 +sphinx==1.8.4 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 diff --git a/requirements-tests.txt b/requirements-tests.txt index c326e951..174e60ff 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -5,12 +5,12 @@ # pip-compile --no-index --output-file requirements-tests.txt requirements-tests.in # asn1crypto==0.24.0 # via cryptography -atomicwrites==1.2.1 # via pytest +atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.86 # via moto +boto3==1.9.89 # via moto boto==2.49.0 # via moto -botocore==1.12.86 # via boto3, moto, s3transfer +botocore==1.12.89 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests @@ -37,7 +37,7 @@ mock==2.0.0 # via moto more-itertools==5.0.0 # via pytest moto==1.3.7 nose==1.3.7 -pbr==5.1.1 # via mock +pbr==5.1.2 # via mock pluggy==0.8.1 # via pytest py==1.7.0 # via pytest pyaml==18.11.0 # via moto @@ -45,16 +45,16 @@ pycparser==2.19 # via cffi pycryptodome==3.7.3 # via python-jose pyflakes==2.1.0 pytest-flask==0.14.0 -pytest-mock==1.10.0 -pytest==4.1.1 -python-dateutil==2.7.5 # via botocore, faker, freezegun, moto +pytest-mock==1.10.1 +pytest==4.2.0 +python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto pyyaml==3.13 # via pyaml requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses responses==0.10.5 # via moto -s3transfer==0.1.13 # via boto3 +s3transfer==0.2.0 # via boto3 six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, more-itertools, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests diff --git a/requirements.txt b/requirements.txt index c595e509..db661030 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ acme==0.30.2 alembic-autogenerate-enums==0.0.2 alembic==1.0.7 # via flask-migrate -amqp==2.4.0 # via kombu +amqp==2.4.1 # via kombu aniso8601==4.1.0 # via flask-restful arrow==0.13.0 asn1crypto==0.24.0 # via cryptography @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.86 -botocore==1.12.86 +boto3==1.9.89 +botocore==1.12.89 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.11.5 # via bcrypt, cryptography, pynacl @@ -46,16 +46,16 @@ jinja2==2.10 jmespath==0.9.3 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.2.2.post1 # via celery +kombu==4.3.0 # via celery lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako -marshmallow-sqlalchemy==0.15.0 +marshmallow-sqlalchemy==0.16.0 marshmallow==2.18.0 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 -pbr==5.1.1 # via mock +pbr==5.1.2 # via mock pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 # via python-ldap @@ -65,17 +65,17 @@ pyjwt==1.7.1 pynacl==1.3.0 # via paramiko pyopenssl==19.0.0 pyrfc3339==1.1 # via acme -python-dateutil==2.7.5 # via alembic, arrow, botocore -python-editor==1.0.3 # via alembic +python-dateutil==2.8.0 # via alembic, arrow, botocore +python-editor==1.0.4 # via alembic python-ldap==3.1.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 pyyaml==3.13 # via cloudflare raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.9.0 # via acme +requests-toolbelt==0.9.1 # via acme requests[security]==2.21.0 retrying==1.3.3 -s3transfer==0.1.13 # via boto3 +s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 sqlalchemy==1.2.17 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils From a43c6cf954bcaff127f9703be3c91bc594968ca2 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 7 Feb 2019 09:57:42 -0800 Subject: [PATCH 092/357] Update requirements-docs.txt --- requirements-docs.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index e68bfc5e..194708ed 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -17,7 +17,6 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 - boto3==1.9.89 botocore==1.12.89 celery[redis]==4.2.1 From fd60b163423167aaf51b6a770f058d1002c006fb Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 7 Feb 2019 17:12:37 -0800 Subject: [PATCH 093/357] updating requirements, pinning pyyaml to patched version. --- requirements-docs.txt | 4 ++-- requirements.in | 2 ++ requirements.txt | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index 194708ed..4ebea0a0 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,7 +4,7 @@ # # pip-compile --no-index --output-file requirements-docs.txt requirements-docs.in # -acme==0.30.2 +acme==0.31.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.7 @@ -74,7 +74,7 @@ pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 pytz==2018.9 -pyyaml==3.13 +pyyaml==4.2b4 raven[flask]==6.10.0 redis==2.10.6 requests-toolbelt==0.9.1 diff --git a/requirements.in b/requirements.in index 0aea4591..b085f5c7 100644 --- a/requirements.in +++ b/requirements.in @@ -44,3 +44,5 @@ six SQLAlchemy-Utils tabulate xmltodict +pyyaml>=4.2b1 #high severity alert + diff --git a/requirements.txt b/requirements.txt index f391d016..fd164c3d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --no-index --output-file requirements.txt requirements.in # -acme==0.30.2 +acme==0.31.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.7 # via flask-migrate amqp==2.4.1 # via kombu @@ -70,7 +70,7 @@ python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-ldap==3.1.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==3.13 # via cloudflare +pyyaml==4.2b4 raven[flask]==6.10.0 redis==2.10.6 requests-toolbelt==0.9.1 # via acme From 73a474bd352b80a21751738506c30f3706ffc59c Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 8 Feb 2019 08:23:42 -0800 Subject: [PATCH 094/357] pinning pyyaml to ensure only using the patched version --- requirements-dev.in | 3 ++- requirements-dev.txt | 2 +- requirements-docs.txt | 5 +++-- requirements-tests.in | 1 + requirements-tests.txt | 6 +++--- requirements.txt | 6 +++--- 6 files changed, 13 insertions(+), 10 deletions(-) diff --git a/requirements-dev.in b/requirements-dev.in index 84104679..2ffc5488 100644 --- a/requirements-dev.in +++ b/requirements-dev.in @@ -4,4 +4,5 @@ flake8==3.5.0 # flake8 3.6.0 is giving erroneous "W605 invalid escape sequence" pre-commit invoke twine -nodeenv \ No newline at end of file +nodeenv +pyyaml>=4.2b1 \ No newline at end of file diff --git a/requirements-dev.txt b/requirements-dev.txt index 440f932b..fd491663 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -22,7 +22,7 @@ pre-commit==1.14.3 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer -pyyaml==3.13 # via aspy.yaml, pre-commit +pyyaml==4.2b4 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine diff --git a/requirements-docs.txt b/requirements-docs.txt index 4ebea0a0..a6c05582 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -17,10 +17,11 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.89 -botocore==1.12.89 +boto3==1.9.90 +botocore==1.12.90 celery[redis]==4.2.1 certifi==2018.11.29 +certsrv==2.1.1 cffi==1.11.5 chardet==3.0.4 click==7.0 diff --git a/requirements-tests.in b/requirements-tests.in index 02a2b0ae..dcd3d0c7 100644 --- a/requirements-tests.in +++ b/requirements-tests.in @@ -11,3 +11,4 @@ pytest pytest-flask pytest-mock requests-mock +pyyaml>=4.2b1 \ No newline at end of file diff --git a/requirements-tests.txt b/requirements-tests.txt index 174e60ff..e4a34412 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.89 # via moto +boto3==1.9.90 # via moto boto==2.49.0 # via moto -botocore==1.12.89 # via boto3, moto, s3transfer +botocore==1.12.90 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests @@ -50,7 +50,7 @@ pytest==4.2.0 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto -pyyaml==3.13 # via pyaml +pyyaml==4.2b4 requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses responses==0.10.5 # via moto diff --git a/requirements.txt b/requirements.txt index fd164c3d..f24d274e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,11 +15,11 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.89 -botocore==1.12.89 +boto3==1.9.90 +botocore==1.12.90 celery[redis]==4.2.1 certifi==2018.11.29 -certsrv==2.1.0 +certsrv==2.1.1 cffi==1.11.5 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask From 42af082d3a5a4095300f961533665991947fbdae Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 11 Feb 2019 10:22:54 -0800 Subject: [PATCH 095/357] updating requirements --- requirements-dev.txt | 4 ++-- requirements-docs.txt | 6 +++--- requirements-tests.txt | 6 +++--- requirements.txt | 6 +++--- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index fd491663..f5d6be3c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -28,9 +28,9 @@ requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.30.0 # via twine +tqdm==4.31.1 # via twine twine==1.12.1 urllib3==1.24.1 # via requests -virtualenv==16.3.0 # via pre-commit +virtualenv==16.4.0 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index a6c05582..80822929 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.90 -botocore==1.12.90 +boto3==1.9.91 +botocore==1.12.91 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 @@ -94,4 +94,4 @@ tabulate==0.8.3 urllib3==1.24.1 vine==1.2.0 werkzeug==0.14.1 -xmltodict==0.11.0 +xmltodict==0.12.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index e4a34412..60cda2d7 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.90 # via moto +boto3==1.9.91 # via moto boto==2.49.0 # via moto -botocore==1.12.90 # via boto3, moto, s3transfer +botocore==1.12.91 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.11.5 # via cryptography chardet==3.0.4 # via requests @@ -61,4 +61,4 @@ urllib3==1.24.1 # via botocore, requests websocket-client==0.54.0 # via docker werkzeug==0.14.1 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk -xmltodict==0.11.0 # via moto +xmltodict==0.12.0 # via moto diff --git a/requirements.txt b/requirements.txt index f24d274e..8bc96ac2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.90 -botocore==1.12.90 +boto3==1.9.91 +botocore==1.12.91 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 @@ -84,4 +84,4 @@ tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp werkzeug==0.14.1 # via flask -xmltodict==0.11.0 +xmltodict==0.12.0 From 8abf95063cf53c0087b5c54c2e5a2e952bac4b5f Mon Sep 17 00:00:00 2001 From: Ronald Moesbergen Date: Thu, 14 Feb 2019 11:57:27 +0100 Subject: [PATCH 096/357] Implement a ALLOW_CERT_DELETION option (boolean, default False). When enabled, the certificate delete API call will work and the UI will no longer display deleted certificates. When disabled (the default), the delete API call will not work (405 method not allowed) and the UI will show all certificates, regardless of the 'deleted' flag. --- docs/administration.rst | 7 +++++++ lemur/certificates/models.py | 2 +- lemur/certificates/service.py | 3 +++ lemur/certificates/views.py | 16 +++++++-------- lemur/migrations/versions/318b66568358_.py | 23 ++++++++++++++++++++++ lemur/tests/conf.py | 2 ++ lemur/tests/test_certificates.py | 4 ++-- 7 files changed, 46 insertions(+), 11 deletions(-) create mode 100644 lemur/migrations/versions/318b66568358_.py diff --git a/docs/administration.rst b/docs/administration.rst index 9d6c8d12..352318f5 100644 --- a/docs/administration.rst +++ b/docs/administration.rst @@ -161,6 +161,13 @@ Specifying the `SQLALCHEMY_MAX_OVERFLOW` to 0 will enforce limit to not create c Dump all imported or generated CSR and certificate details to stdout using OpenSSL. (default: `False`) +.. data:: ALLOW_CERT_DELETION + :noindex: + + When set to True, certificates can be marked as deleted via the API and deleted certificates will not be displayed + in the UI. When set to False (the default), the certificate delete API will always return "405 method not allowed" + and deleted certificates will always be visible in the UI. (default: `False`) + Certificate Default Options --------------------------- diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index 34305cc2..ab43cd01 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -101,7 +101,7 @@ class Certificate(db.Model): issuer = Column(String(128)) serial = Column(String(128)) cn = Column(String(128)) - deleted = Column(Boolean, index=True) + deleted = Column(Boolean, index=True, default=False) dns_provider_id = Column(Integer(), ForeignKey('dns_providers.id', ondelete='CASCADE'), nullable=True) not_before = Column(ArrowType) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index d5012012..22009043 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -381,6 +381,9 @@ def render(args): now = arrow.now().format('YYYY-MM-DD') query = query.filter(Certificate.not_after <= to).filter(Certificate.not_after >= now) + if current_app.config.get('ALLOW_CERT_DELETION', False): + query = query.filter(Certificate.deleted == False) # noqa + result = database.sort_and_page(query, Certificate, args) return result diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 948c44d6..37ebf518 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -6,10 +6,9 @@ .. moduleauthor:: Kevin Glisson """ import base64 -import arrow from builtins import str -from flask import Blueprint, make_response, jsonify, g +from flask import Blueprint, make_response, jsonify, g, current_app from flask_restful import reqparse, Api, inputs from lemur.common.schema import validate_schema @@ -678,17 +677,21 @@ class Certificates(AuthenticatedResource): .. sourcecode:: http - HTTP/1.1 200 OK + HTTP/1.1 204 OK :reqheader Authorization: OAuth token to authenticate :statuscode 204: no error :statuscode 403: unauthenticated :statusoode 404: certificate not found + :statusoode 405: certificate deletion is disabled """ + if not current_app.config.get('ALLOW_CERT_DELETION', False): + return dict(message="Certificate deletion is disabled"), 405 + cert = service.get(certificate_id) - if not cert: + if not cert or cert.deleted: return dict(message="Cannot find specified certificate"), 404 # allow creators @@ -699,12 +702,9 @@ class Certificates(AuthenticatedResource): if not permission.can(): return dict(message='You are not authorized to delete this certificate'), 403 - if arrow.get(cert.not_after) > arrow.utcnow(): - return dict(message='Certificate is still valid, only expired certificates can be deleted'), 412 - service.update(certificate_id, deleted=True) log_service.create(g.current_user, 'delete_cert', certificate=cert) - return '', 204 + return 'Certificate deleted', 204 class NotificationCertificatesList(AuthenticatedResource): diff --git a/lemur/migrations/versions/318b66568358_.py b/lemur/migrations/versions/318b66568358_.py new file mode 100644 index 00000000..9d4aa48d --- /dev/null +++ b/lemur/migrations/versions/318b66568358_.py @@ -0,0 +1,23 @@ +""" Set 'deleted' flag from null to false on all certificates once + +Revision ID: 318b66568358 +Revises: 9f79024fe67b +Create Date: 2019-02-05 15:42:25.477587 + +""" + +# revision identifiers, used by Alembic. +revision = '318b66568358' +down_revision = '9f79024fe67b' + +from alembic import op + + +def upgrade(): + connection = op.get_bind() + # Delete duplicate entries + connection.execute('UPDATE certificates SET deleted = false WHERE deleted IS NULL') + + +def downgrade(): + pass diff --git a/lemur/tests/conf.py b/lemur/tests/conf.py index bbe155cd..525200cf 100644 --- a/lemur/tests/conf.py +++ b/lemur/tests/conf.py @@ -186,3 +186,5 @@ LDAP_BASE_DN = 'dc=example,dc=com' LDAP_EMAIL_DOMAIN = 'example.com' LDAP_REQUIRED_GROUP = 'Lemur Access' LDAP_DEFAULT_ROLE = 'role1' + +ALLOW_CERT_DELETION = True diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index 8247c36b..75a29e16 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -737,8 +737,8 @@ def test_certificate_put_with_data(client, certificate, issuer_plugin): @pytest.mark.parametrize("token,status", [ (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 412), - (VALID_ADMIN_API_TOKEN, 412), + (VALID_ADMIN_HEADER_TOKEN, 204), + (VALID_ADMIN_API_TOKEN, 404), ('', 401) ]) def test_certificate_delete(client, token, status): From 29bda6c00d2351cc4a08ed797c9940d8615a9c73 Mon Sep 17 00:00:00 2001 From: Ronald Moesbergen Date: Thu, 14 Feb 2019 11:58:29 +0100 Subject: [PATCH 097/357] Fix typo's --- lemur/certificates/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 37ebf518..b464b3ed 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -682,8 +682,8 @@ class Certificates(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 204: no error :statuscode 403: unauthenticated - :statusoode 404: certificate not found - :statusoode 405: certificate deletion is disabled + :statuscode 404: certificate not found + :statuscode 405: certificate deletion is disabled """ if not current_app.config.get('ALLOW_CERT_DELETION', False): From eaa73998a0b17858d83fe21080f78f2ec4d2c1f3 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 19 Feb 2019 15:03:15 -0500 Subject: [PATCH 098/357] adding lemur_vault destination plugin --- lemur/plugins/lemur_vault/__init__.py | 5 ++ lemur/plugins/lemur_vault/plugin.py | 85 +++++++++++++++++++++++++++ setup.py | 3 +- 3 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 lemur/plugins/lemur_vault/__init__.py create mode 100644 lemur/plugins/lemur_vault/plugin.py diff --git a/lemur/plugins/lemur_vault/__init__.py b/lemur/plugins/lemur_vault/__init__.py new file mode 100644 index 00000000..8ce5a7f3 --- /dev/null +++ b/lemur/plugins/lemur_vault/__init__.py @@ -0,0 +1,5 @@ +try: + VERSION = __import__('pkg_resources') \ + .get_distribution(__name__).version +except Exception as e: + VERSION = 'unknown' diff --git a/lemur/plugins/lemur_vault/plugin.py b/lemur/plugins/lemur_vault/plugin.py new file mode 100644 index 00000000..505170ad --- /dev/null +++ b/lemur/plugins/lemur_vault/plugin.py @@ -0,0 +1,85 @@ +""" +.. module: lemur.plugins.lemur_vault.plugin + :platform: Unix + :copyright: (c) 2019 + :license: Apache, see LICENCE for more details. + + Plugin for uploading certificates and private key as secret to hashi vault + that can be pulled down by end point nodes. + +.. moduleauthor:: Christopher Jolley +""" +import hvac + +#import lemur_vault +from flask import current_app + +from lemur.common.defaults import common_name +from lemur.common.utils import parse_certificate +from lemur.plugins.bases import DestinationPlugin + +class VaultDestinationPlugin(DestinationPlugin): + """Hashicorp Vault Destination plugin for Lemur""" + title = 'Vault' + slug = 'hashi-vault-destination' + description = 'Allow the uploading of certificates to Hashi Vault as secret' + + author = 'Christopher Jolley' + author_url = 'https://github.com/alwaysjolley/lemur' + + options = [ + { + 'name': 'vaultMount', + 'type': 'str', + 'required': True, + 'validation': '^[a-zA-Z0-9]+$', + 'helpMessage': 'Must be a valid Vault secrets mount name!' + }, + { + 'name': 'vaultPath', + 'type': 'str', + 'required': True, + 'validation': '^([a-zA-Z0-9_-]+/?)+$', + 'helpMessage': 'Must be a valid Vault secrets path' + }, + { + 'name': 'vaultUrl', + 'type': 'str', + 'required': True, + 'validation': '^https?://[a-zA-Z0-9.-]+(?::[0-9]+)?$', + 'helpMessage': 'Must be a valid Vault server url' + } + ] + + def __init__(self, *args, **kwargs): + super(VaultDestinationPlugin, self).__init__(*args, **kwargs) + + def upload(self, name, body, private_key, cert_chain, options, **kwargs): + """ + Upload certificate and private key + + :param private_key: + :param cert_chain: + :return: + """ + cn = common_name(parse_certificate(body)) + data = {} + #current_app.logger.warning("Cert body content: {0}".format(body)) + + token = current_app.config.get('VAULT_TOKEN') + + mount = self.get_option('vaultMount', options) + path = '{0}/{1}'.format(self.get_option('vaultPath', options),cn) + url = self.get_option('vaultUrl', options) + + client = hvac.Client(url=url, token=token) + + data['cert'] = cert_chain + data['key'] = private_key + + ## upload certificate and key + try: + client.secrets.kv.v1.create_or_update_secret(path=path, mount_point=mount, secret=data) + except Exception as err: + current_app.logger.exception( + "Exception uploading secret to vault: {0}".format(err), exc_info=True) diff --git a/setup.py b/setup.py index 1511b013..b5dcdb3b 100644 --- a/setup.py +++ b/setup.py @@ -154,7 +154,8 @@ setup( 'digicert_cis_issuer = lemur.plugins.lemur_digicert.plugin:DigiCertCISIssuerPlugin', 'digicert_cis_source = lemur.plugins.lemur_digicert.plugin:DigiCertCISSourcePlugin', 'csr_export = lemur.plugins.lemur_csr.plugin:CSRExportPlugin', - 'sftp_destination = lemur.plugins.lemur_sftp.plugin:SFTPDestinationPlugin' + 'sftp_destination = lemur.plugins.lemur_sftp.plugin:SFTPDestinationPlugin', + 'vault_desination = lemur.plugins.lemur_vault.plugin:VaultDestinationPlugin' ], }, classifiers=[ From a0ca486f0f9975eeffe6dddb13ed3fe60eee9661 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 19 Feb 2019 15:22:11 -0500 Subject: [PATCH 099/357] adding hvac and updating requrements --- requirements-dev.txt | 12 ++++++------ requirements-docs.txt | 39 ++++++++++++++++++++------------------- requirements-tests.txt | 24 ++++++++++++------------ requirements.in | 3 ++- requirements.txt | 35 ++++++++++++++++++----------------- 5 files changed, 58 insertions(+), 55 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index ac35f3e9..6e2a3fb9 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -11,7 +11,7 @@ cfgv==1.4.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.2.1 # via pre-commit +identify==1.2.2 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit importlib-resources==1.0.2 # via pre-commit @@ -19,19 +19,19 @@ invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.14.2 +pre-commit==1.14.4 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer pyyaml==3.13 # via aspy.yaml, pre-commit readme-renderer==24.0 # via twine -requests-toolbelt==0.9.0 # via twine +requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.30.0 # via twine -twine==1.12.1 +tqdm==4.31.1 # via twine +twine==1.13.0 urllib3==1.24.1 # via requests -virtualenv==16.3.0 # via pre-commit +virtualenv==16.4.0 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 15085766..e9dd92cb 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,24 +4,24 @@ # # pip-compile --no-index --output-file requirements-docs.txt requirements-docs.in # -acme==0.30.2 +acme==0.31.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.7 -amqp==2.4.0 +amqp==2.4.1 aniso8601==4.1.0 -arrow==0.13.0 +arrow==0.13.1 asn1crypto==0.24.0 asyncpool==1.0 babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.86 -botocore==1.12.86 +boto3==1.9.98 +botocore==1.12.98 celery[redis]==4.2.1 certifi==2018.11.29 -cffi==1.11.5 +cffi==1.12.1 chardet==3.0.4 click==7.0 cloudflare==2.1.0 @@ -33,7 +33,7 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.3.1 +flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 @@ -41,6 +41,7 @@ flask-sqlalchemy==2.3.2 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 +hvac==0.7.2 idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 @@ -49,17 +50,17 @@ jinja2==2.10 jmespath==0.9.3 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.2.2.post1 +kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 -marshmallow-sqlalchemy==0.15.0 -marshmallow==2.18.0 +marshmallow-sqlalchemy==0.16.0 +marshmallow==2.18.1 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 -pbr==5.1.1 +pbr==5.1.2 pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 @@ -71,26 +72,26 @@ pynacl==1.3.0 pyopenssl==19.0.0 pyparsing==2.3.1 # via packaging pyrfc3339==1.1 -python-dateutil==2.7.5 -python-editor==1.0.3 +python-dateutil==2.8.0 +python-editor==1.0.4 pytz==2018.9 pyyaml==3.13 raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.9.0 +requests-toolbelt==0.9.1 requests[security]==2.21.0 retrying==1.3.3 -s3transfer==0.1.13 +s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx -sphinx-rtd-theme==0.4.2 -sphinx==1.8.3 +sphinx-rtd-theme==0.4.3 +sphinx==1.8.4 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.17 +sqlalchemy==1.2.18 tabulate==0.8.3 urllib3==1.24.1 vine==1.2.0 werkzeug==0.14.1 -xmltodict==0.11.0 +xmltodict==0.12.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index c326e951..1bb8ba03 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -5,14 +5,14 @@ # pip-compile --no-index --output-file requirements-tests.txt requirements-tests.in # asn1crypto==0.24.0 # via cryptography -atomicwrites==1.2.1 # via pytest +atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.86 # via moto +boto3==1.9.98 # via moto boto==2.49.0 # via moto -botocore==1.12.86 # via boto3, moto, s3transfer +botocore==1.12.98 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests -cffi==1.11.5 # via cryptography +cffi==1.12.1 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.2 @@ -34,10 +34,10 @@ jsondiff==1.1.1 # via moto jsonpickle==1.1 # via aws-xray-sdk markupsafe==1.1.0 # via jinja2 mock==2.0.0 # via moto -more-itertools==5.0.0 # via pytest +more-itertools==6.0.0 # via pytest moto==1.3.7 nose==1.3.7 -pbr==5.1.1 # via mock +pbr==5.1.2 # via mock pluggy==0.8.1 # via pytest py==1.7.0 # via pytest pyaml==18.11.0 # via moto @@ -45,20 +45,20 @@ pycparser==2.19 # via cffi pycryptodome==3.7.3 # via python-jose pyflakes==2.1.0 pytest-flask==0.14.0 -pytest-mock==1.10.0 -pytest==4.1.1 -python-dateutil==2.7.5 # via botocore, faker, freezegun, moto +pytest-mock==1.10.1 +pytest==4.3.0 +python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto pyyaml==3.13 # via pyaml requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses responses==0.10.5 # via moto -s3transfer==0.1.13 # via boto3 -six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, more-itertools, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client +s3transfer==0.2.0 # via boto3 +six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests websocket-client==0.54.0 # via docker werkzeug==0.14.1 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk -xmltodict==0.11.0 # via moto +xmltodict==0.12.0 # via moto diff --git a/requirements.in b/requirements.in index 9824650b..1147cc8d 100644 --- a/requirements.in +++ b/requirements.in @@ -23,6 +23,7 @@ Flask Flask-Cors future gunicorn +hvac # required for the vault destination plugin inflection jinja2 lockfile @@ -42,4 +43,4 @@ retrying six SQLAlchemy-Utils tabulate -xmltodict \ No newline at end of file +xmltodict diff --git a/requirements.txt b/requirements.txt index c595e509..edd56b09 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,22 +4,22 @@ # # pip-compile --no-index --output-file requirements.txt requirements.in # -acme==0.30.2 +acme==0.31.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.7 # via flask-migrate -amqp==2.4.0 # via kombu +amqp==2.4.1 # via kombu aniso8601==4.1.0 # via flask-restful -arrow==0.13.0 +arrow==0.13.1 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.86 -botocore==1.12.86 +boto3==1.9.98 +botocore==1.12.98 celery[redis]==4.2.1 certifi==2018.11.29 -cffi==1.11.5 # via bcrypt, cryptography, pynacl +cffi==1.12.1 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.1.0 @@ -31,7 +31,7 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.3.1 +flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 @@ -39,6 +39,7 @@ flask-sqlalchemy==2.3.2 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 +hvac==0.7.2 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask @@ -46,16 +47,16 @@ jinja2==2.10 jmespath==0.9.3 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.2.2.post1 # via celery +kombu==4.3.0 # via celery lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako -marshmallow-sqlalchemy==0.15.0 -marshmallow==2.18.0 +marshmallow-sqlalchemy==0.16.0 +marshmallow==2.18.1 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 -pbr==5.1.1 # via mock +pbr==5.1.2 # via mock pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 # via python-ldap @@ -65,22 +66,22 @@ pyjwt==1.7.1 pynacl==1.3.0 # via paramiko pyopenssl==19.0.0 pyrfc3339==1.1 # via acme -python-dateutil==2.7.5 # via alembic, arrow, botocore -python-editor==1.0.3 # via alembic +python-dateutil==2.8.0 # via alembic, arrow, botocore +python-editor==1.0.4 # via alembic python-ldap==3.1.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 pyyaml==3.13 # via cloudflare raven[flask]==6.10.0 redis==2.10.6 -requests-toolbelt==0.9.0 # via acme +requests-toolbelt==0.9.1 # via acme requests[security]==2.21.0 retrying==1.3.3 -s3transfer==0.1.13 # via boto3 +s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.17 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.2.18 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp werkzeug==0.14.1 # via flask -xmltodict==0.11.0 +xmltodict==0.12.0 From ef0c08dfd9927e5d9db149a64685c9b8ba9fb350 Mon Sep 17 00:00:00 2001 From: Ronald Moesbergen Date: Thu, 21 Feb 2019 16:33:43 +0100 Subject: [PATCH 100/357] Fix: when no alias is entered when exporting a certificate, the alias is set to 'blah'. This fix sets it to the common name instead. --- lemur/plugins/lemur_java/plugin.py | 4 +++- lemur/plugins/lemur_openssl/plugin.py | 5 +++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/lemur/plugins/lemur_java/plugin.py b/lemur/plugins/lemur_java/plugin.py index 5aab5342..7eb33b90 100644 --- a/lemur/plugins/lemur_java/plugin.py +++ b/lemur/plugins/lemur_java/plugin.py @@ -15,6 +15,8 @@ from cryptography.fernet import Fernet from lemur.utils import mktempfile, mktemppath from lemur.plugins.bases import ExportPlugin from lemur.plugins import lemur_java as java +from lemur.common.utils import parse_certificate +from lemur.common.defaults import common_name def run_process(command): @@ -233,7 +235,7 @@ class JavaKeystoreExportPlugin(ExportPlugin): if self.get_option('alias', options): alias = self.get_option('alias', options) else: - alias = "blah" + alias = common_name(parse_certificate(body)) with mktemppath() as jks_tmp: create_keystore(body, chain, jks_tmp, key, alias, passphrase) diff --git a/lemur/plugins/lemur_openssl/plugin.py b/lemur/plugins/lemur_openssl/plugin.py index 9ddce925..6d6f89aa 100644 --- a/lemur/plugins/lemur_openssl/plugin.py +++ b/lemur/plugins/lemur_openssl/plugin.py @@ -14,7 +14,8 @@ from flask import current_app from lemur.utils import mktempfile, mktemppath from lemur.plugins.bases import ExportPlugin from lemur.plugins import lemur_openssl as openssl -from lemur.common.utils import get_psuedo_random_string +from lemur.common.utils import get_psuedo_random_string, parse_certificate +from lemur.common.defaults import common_name def run_process(command): @@ -122,7 +123,7 @@ class OpenSSLExportPlugin(ExportPlugin): if self.get_option('alias', options): alias = self.get_option('alias', options) else: - alias = "blah" + alias = common_name(parse_certificate(body)) type = self.get_option('type', options) From 14d8596b8a175cca79d41eaba19fc53a3dd249fd Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 21 Feb 2019 20:19:14 -0800 Subject: [PATCH 101/357] updating requirements --- requirements-dev.txt | 8 ++++---- requirements-docs.txt | 18 +++++++++--------- requirements-tests.txt | 16 ++++++++-------- requirements.txt | 16 ++++++++-------- 4 files changed, 29 insertions(+), 29 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index f5d6be3c..c7d7986c 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-dev.txt requirements-dev.in +# pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index # aspy.yaml==1.1.2 # via pre-commit bleach==3.1.0 # via readme-renderer @@ -11,14 +11,14 @@ cfgv==1.4.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.2.1 # via pre-commit +identify==1.2.2 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.14.3 +pre-commit==1.14.4 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer @@ -29,7 +29,7 @@ requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit tqdm==4.31.1 # via twine -twine==1.12.1 +twine==1.13.0 urllib3==1.24.1 # via requests virtualenv==16.4.0 # via pre-commit webencodings==0.5.1 # via bleach diff --git a/requirements-docs.txt b/requirements-docs.txt index 80822929..c3848b44 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-docs.txt requirements-docs.in +# pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # acme==0.31.0 alabaster==0.7.12 # via sphinx @@ -10,19 +10,19 @@ alembic-autogenerate-enums==0.0.2 alembic==1.0.7 amqp==2.4.1 aniso8601==4.1.0 -arrow==0.13.0 +arrow==0.13.1 asn1crypto==0.24.0 asyncpool==1.0 babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.91 -botocore==1.12.91 +boto3==1.9.100 +botocore==1.12.100 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 -cffi==1.11.5 +cffi==1.12.1 chardet==3.0.4 click==7.0 cloudflare==2.1.0 @@ -34,7 +34,7 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.3.1 +flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 @@ -55,7 +55,7 @@ lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.0 marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.0 +marshmallow==2.18.1 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx @@ -84,12 +84,12 @@ retrying==1.3.3 s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx -sphinx-rtd-theme==0.4.2 +sphinx-rtd-theme==0.4.3 sphinx==1.8.4 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.17 +sqlalchemy==1.2.18 tabulate==0.8.3 urllib3==1.24.1 vine==1.2.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 60cda2d7..ad97675e 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -2,17 +2,17 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-tests.txt requirements-tests.in +# pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-index # asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.91 # via moto +boto3==1.9.100 # via moto boto==2.49.0 # via moto -botocore==1.12.91 # via boto3, moto, s3transfer +botocore==1.12.100 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests -cffi==1.11.5 # via cryptography +cffi==1.12.1 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.2 @@ -34,19 +34,19 @@ jsondiff==1.1.1 # via moto jsonpickle==1.1 # via aws-xray-sdk markupsafe==1.1.0 # via jinja2 mock==2.0.0 # via moto -more-itertools==5.0.0 # via pytest +more-itertools==6.0.0 # via pytest moto==1.3.7 nose==1.3.7 pbr==5.1.2 # via mock pluggy==0.8.1 # via pytest -py==1.7.0 # via pytest +py==1.8.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi pycryptodome==3.7.3 # via python-jose pyflakes==2.1.0 pytest-flask==0.14.0 pytest-mock==1.10.1 -pytest==4.2.0 +pytest==4.3.0 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto @@ -55,7 +55,7 @@ requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses responses==0.10.5 # via moto s3transfer==0.2.0 # via boto3 -six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, more-itertools, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client +six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests websocket-client==0.54.0 # via docker diff --git a/requirements.txt b/requirements.txt index 8bc96ac2..0319fa3f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,25 +2,25 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements.txt requirements.in +# pip-compile --output-file requirements.txt requirements.in -U --no-index # acme==0.31.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.7 # via flask-migrate amqp==2.4.1 # via kombu aniso8601==4.1.0 # via flask-restful -arrow==0.13.0 +arrow==0.13.1 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.91 -botocore==1.12.91 +boto3==1.9.100 +botocore==1.12.100 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 -cffi==1.11.5 # via bcrypt, cryptography, pynacl +cffi==1.12.1 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.1.0 @@ -32,7 +32,7 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.3.1 +flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 @@ -52,7 +52,7 @@ lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.0 # via jinja2, mako marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.0 +marshmallow==2.18.1 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 @@ -79,7 +79,7 @@ retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.17 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.2.18 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp From cd65a36437e05d6cd4f5e29b76c6f5e76567beb1 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Mon, 25 Feb 2019 09:42:07 -0500 Subject: [PATCH 102/357] - support multiple bundle configuration, nginx, apache, cert only - update vault destination to support multi cert under one object - added san list as key value - read and update object with new keys, keeping other keys, allowing us to keep an iterable list of keys in an object for deploying multiple certs to a single node --- .gitignore | 5 ++ lemur/plugins/lemur_vault/plugin.py | 81 +++++++++++++++++++++++++---- requirements-dev.txt | 6 +-- requirements-docs.txt | 10 ++-- requirements-tests.txt | 16 +++--- requirements.txt | 10 ++-- 6 files changed, 98 insertions(+), 30 deletions(-) diff --git a/.gitignore b/.gitignore index 97af00ca..72e85f26 100644 --- a/.gitignore +++ b/.gitignore @@ -26,6 +26,11 @@ package-lock.json /lemur/static/dist/ /lemur/static/app/vendor/ /wheelhouse +/lemur/lib +/lemur/bin +/lemur/lib64 +/lemur/include + docs/_build .editorconfig .idea diff --git a/lemur/plugins/lemur_vault/plugin.py b/lemur/plugins/lemur_vault/plugin.py index 505170ad..58a9e601 100644 --- a/lemur/plugins/lemur_vault/plugin.py +++ b/lemur/plugins/lemur_vault/plugin.py @@ -18,6 +18,10 @@ from lemur.common.defaults import common_name from lemur.common.utils import parse_certificate from lemur.plugins.bases import DestinationPlugin +from cryptography import x509 +from cryptography.hazmat.backends import default_backend + + class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = 'Vault' @@ -48,6 +52,25 @@ class VaultDestinationPlugin(DestinationPlugin): 'required': True, 'validation': '^https?://[a-zA-Z0-9.-]+(?::[0-9]+)?$', 'helpMessage': 'Must be a valid Vault server url' + }, + { + 'name': 'bundleChain', + 'type': 'select', + 'value': 'cert only', + 'available': [ + 'Nginx', + 'Apache', + 'no chain' + ], + 'required': True, + 'helpMessage': 'Bundle the chain into the certificate' + }, + { + 'name': 'objectName', + 'type': 'str', + 'required': False, + 'validation': '[0-9a-zA-Z:_-]+', + 'helpMessage': 'Name to bundle certs under, if blank use cn' } ] @@ -62,24 +85,64 @@ class VaultDestinationPlugin(DestinationPlugin): :param cert_chain: :return: """ - cn = common_name(parse_certificate(body)) - data = {} - #current_app.logger.warning("Cert body content: {0}".format(body)) + cname = common_name(parse_certificate(body)) + secret = {'data':{}} + key_name = '{0}.key'.format(cname) + cert_name = '{0}.crt'.format(cname) + chain_name = '{0}.chain'.format(cname) + sans_name = '{0}.san'.format(cname) token = current_app.config.get('VAULT_TOKEN') mount = self.get_option('vaultMount', options) - path = '{0}/{1}'.format(self.get_option('vaultPath', options),cn) + path = self.get_option('vaultPath', options) url = self.get_option('vaultUrl', options) + bundle = self.get_option('bundleChain', options) + obj_name = self.get_option('objectName', options) client = hvac.Client(url=url, token=token) + if obj_name: + path = '{0}/{1}'.format(path, obj_name) + else: + path = '{0}/{1}'.format(path, cname) - data['cert'] = cert_chain - data['key'] = private_key + secret = get_secret(url, token, mount, path) + - ## upload certificate and key + if bundle == 'Nginx' and cert_chain: + secret['data'][cert_name] = '{0}\n{1}'.format(body, cert_chain) + elif bundle == 'Apache' and cert_chain: + secret['data'][cert_name] = body + secret['data'][chain_name] = cert_chain + else: + secret['data'][cert_name] = body + secret['data'][key_name] = private_key + san_list = get_san_list(body) + if isinstance(san_list, list): + secret['data'][sans_name] = san_list try: - client.secrets.kv.v1.create_or_update_secret(path=path, mount_point=mount, secret=data) - except Exception as err: + client.secrets.kv.v1.create_or_update_secret( + path=path, mount_point=mount, secret=secret['data']) + except ConnectionError as err: current_app.logger.exception( "Exception uploading secret to vault: {0}".format(err), exc_info=True) + +def get_san_list(body): + """ parse certificate for SAN names and return list, return empty list on error """ + try: + byte_body = body.encode('utf-8') + cert = x509.load_pem_x509_certificate(byte_body, default_backend()) + ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME) + return ext.value.get_values_for_type(x509.DNSName) + except: + pass + return [] + +def get_secret(url, token, mount, path): + result = {'data': {}} + try: + client = hvac.Client(url=url, token=token) + result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount) + except: + pass + return result diff --git a/requirements-dev.txt b/requirements-dev.txt index 6e2a3fb9..fd487bd7 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-dev.txt requirements-dev.in +# pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index # aspy.yaml==1.1.2 # via pre-commit bleach==3.1.0 # via readme-renderer @@ -11,7 +11,7 @@ cfgv==1.4.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.2.2 # via pre-commit +identify==1.3.0 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit importlib-resources==1.0.2 # via pre-commit @@ -32,6 +32,6 @@ toml==0.10.0 # via pre-commit tqdm==4.31.1 # via twine twine==1.13.0 urllib3==1.24.1 # via requests -virtualenv==16.4.0 # via pre-commit +virtualenv==16.4.1 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index e9dd92cb..8b9c3f2b 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-docs.txt requirements-docs.in +# pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # acme==0.31.0 alabaster==0.7.12 # via sphinx @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.98 -botocore==1.12.98 +boto3==1.9.101 +botocore==1.12.101 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.12.1 @@ -47,13 +47,13 @@ imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 jinja2==2.10 -jmespath==0.9.3 +jmespath==0.9.4 josepy==1.1.0 jsonlines==1.2.0 kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 -markupsafe==1.1.0 +markupsafe==1.1.1 marshmallow-sqlalchemy==0.16.0 marshmallow==2.18.1 mock==2.0.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 1bb8ba03..1c3a4969 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-tests.txt requirements-tests.in +# pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-index # asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.98 # via moto +boto3==1.9.101 # via moto boto==2.49.0 # via moto -botocore==1.12.98 # via boto3, moto, s3transfer +botocore==1.12.101 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.12.1 # via cryptography chardet==3.0.4 # via requests @@ -29,17 +29,17 @@ future==0.17.1 # via python-jose idna==2.8 # via requests itsdangerous==1.1.0 # via flask jinja2==2.10 # via flask, moto -jmespath==0.9.3 # via boto3, botocore +jmespath==0.9.4 # via boto3, botocore jsondiff==1.1.1 # via moto jsonpickle==1.1 # via aws-xray-sdk -markupsafe==1.1.0 # via jinja2 +markupsafe==1.1.1 # via jinja2 mock==2.0.0 # via moto more-itertools==6.0.0 # via pytest moto==1.3.7 nose==1.3.7 pbr==5.1.2 # via mock -pluggy==0.8.1 # via pytest -py==1.7.0 # via pytest +pluggy==0.9.0 # via pytest +py==1.8.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi pycryptodome==3.7.3 # via python-jose @@ -58,7 +58,7 @@ s3transfer==0.2.0 # via boto3 six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests -websocket-client==0.54.0 # via docker +websocket-client==0.55.0 # via docker werkzeug==0.14.1 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto diff --git a/requirements.txt b/requirements.txt index edd56b09..a8615094 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements.txt requirements.in +# pip-compile --output-file requirements.txt requirements.in -U --no-index # acme==0.31.0 alembic-autogenerate-enums==0.0.2 @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.98 -botocore==1.12.98 +boto3==1.9.101 +botocore==1.12.101 celery[redis]==4.2.1 certifi==2018.11.29 cffi==1.12.1 # via bcrypt, cryptography, pynacl @@ -44,13 +44,13 @@ idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask jinja2==2.10 -jmespath==0.9.3 # via boto3, botocore +jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare kombu==4.3.0 # via celery lockfile==0.12.2 mako==1.0.7 # via alembic -markupsafe==1.1.0 # via jinja2, mako +markupsafe==1.1.1 # via jinja2, mako marshmallow-sqlalchemy==0.16.0 marshmallow==2.18.1 mock==2.0.0 # via acme From 40fac02d8b0dc9d411331eb25a4f16fddb774ab0 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 25 Feb 2019 19:05:54 -0800 Subject: [PATCH 103/357] the check_cert_signature() method was attempting to compare RSA and ECC signatures. If a ec public-key certificate is signed with an RSA key, then it can't be a self-signed certificate, in which case we just raise InvalidSignature. --- lemur/common/utils.py | 9 +++++++-- lemur/tests/test_utils.py | 3 ++- lemur/tests/vectors.py | 28 ++++++++++++++++++++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/lemur/common/utils.py b/lemur/common/utils.py index f3ac5fe7..7c9269cf 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -147,6 +147,8 @@ def generate_private_key(key_type): def check_cert_signature(cert, issuer_public_key): """ Check a certificate's signature against an issuer public key. + Before EC validation, make sure public key and signature are of the same type, + otherwise verification not possible (raise InvalidSignature) On success, returns None; on failure, raises UnsupportedAlgorithm or InvalidSignature. """ if isinstance(issuer_public_key, rsa.RSAPublicKey): @@ -160,9 +162,10 @@ def check_cert_signature(cert, issuer_public_key): else: padder = padding.PKCS1v15() issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, padder, cert.signature_hash_algorithm) + elif isinstance(issuer_public_key, ec.EllipticCurvePublicKey) and isinstance(cert.signature_hash_algorithm, ec.ECDSA): + issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, cert.signature_hash_algorithm) else: - # EllipticCurvePublicKey or DSAPublicKey - issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, cert.signature_hash_algorithm) + raise InvalidSignature def is_selfsigned(cert): @@ -176,6 +179,8 @@ def is_selfsigned(cert): return True except InvalidSignature: return False + except UnsupportedAlgorithm as e: + raise Exception(e) def is_weekend(date): diff --git a/lemur/tests/test_utils.py b/lemur/tests/test_utils.py index 3e226f0f..c44f7b9c 100644 --- a/lemur/tests/test_utils.py +++ b/lemur/tests/test_utils.py @@ -1,6 +1,6 @@ import pytest -from lemur.tests.vectors import SAN_CERT, INTERMEDIATE_CERT, ROOTCA_CERT +from lemur.tests.vectors import SAN_CERT, INTERMEDIATE_CERT, ROOTCA_CERT, EC_CERT_EXAMPLE def test_generate_private_key(): @@ -83,3 +83,4 @@ def test_is_selfsigned(selfsigned_cert): assert is_selfsigned(INTERMEDIATE_CERT) is False # Root CA certificates are also technically self-signed assert is_selfsigned(ROOTCA_CERT) is True + assert is_selfsigned(EC_CERT_EXAMPLE) is False diff --git a/lemur/tests/vectors.py b/lemur/tests/vectors.py index 5da37c61..9af77bf6 100644 --- a/lemur/tests/vectors.py +++ b/lemur/tests/vectors.py @@ -394,3 +394,31 @@ zm3Cn4Ul8DO26w9QS4fmZjmnPOZFXYMWoOR6osHzb62PWQ8FBMqXcdToBV2Q9Iw4 PiFAxlc0tVjlLqQ= -----END CERTIFICATE REQUEST----- """ + + +EC_CERT_STR = """ +-----BEGIN CERTIFICATE----- +MIIDxzCCAq+gAwIBAgIIHsJeci1JWAkwDQYJKoZIhvcNAQELBQAwVDELMAkGA1UE +BhMCVVMxHjAcBgNVBAoTFUdvb2dsZSBUcnVzdCBTZXJ2aWNlczElMCMGA1UEAxMc +R29vZ2xlIEludGVybmV0IEF1dGhvcml0eSBHMzAeFw0xOTAyMTMxNTM1NTdaFw0x +OTA1MDgxNTM1MDBaMGgxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlh +MRYwFAYDVQQHDA1Nb3VudGFpbiBWaWV3MRMwEQYDVQQKDApHb29nbGUgTExDMRcw +FQYDVQQDDA53d3cuZ29vZ2xlLmNvbTBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IA +BKwMlIbd4rAwf6eWoa6RrR2w0s5k1M40XOORPf96PByPmld+qhjRMLvA/xcAxdCR +XdcMfaX6EUr0Zw8CepitMB2jggFSMIIBTjATBgNVHSUEDDAKBggrBgEFBQcDATAO +BgNVHQ8BAf8EBAMCB4AwGQYDVR0RBBIwEIIOd3d3Lmdvb2dsZS5jb20waAYIKwYB +BQUHAQEEXDBaMC0GCCsGAQUFBzAChiFodHRwOi8vcGtpLmdvb2cvZ3NyMi9HVFNH +SUFHMy5jcnQwKQYIKwYBBQUHMAGGHWh0dHA6Ly9vY3NwLnBraS5nb29nL0dUU0dJ +QUczMB0GA1UdDgQWBBQLovm8GG0oG91gOGCL58YPNoAlejAMBgNVHRMBAf8EAjAA +MB8GA1UdIwQYMBaAFHfCuFCaZ3Z2sS3ChtCDoH6mfrpLMCEGA1UdIAQaMBgwDAYK +KwYBBAHWeQIFAzAIBgZngQwBAgIwMQYDVR0fBCowKDAmoCSgIoYgaHR0cDovL2Ny +bC5wa2kuZ29vZy9HVFNHSUFHMy5jcmwwDQYJKoZIhvcNAQELBQADggEBAKFbmNOA +e3pJ7UVI5EmkAMZgSDRdrsLHV6F7WluuyYCyE/HFpZjBd6y8xgGtYWcask6edwrq +zrcXNEN/GY34AYre0M+p0xAs+lKSwkrJd2sCgygmzsBFtGwjW6lhjm+rg83zPHhH +mQZ0ShUR1Kp4TvzXgxj44RXOsS5ZyDe3slGiG4aw/hl+igO8Y8JMvcv/Tpzo+V75 +BkDAFmLRi08NayfeyCqK/TcRpzxKMKhS7jEHK8Pzu5P+FyFHKqIsobi+BA+psOix +5nZLhrweLdKNz387mE2lSSKzr7qeLGHSOMt+ajQtZio4YVyZqJvg4Y++J0n5+Rjw +MXp8GrvTfn1DQ+o= +-----END CERTIFICATE----- +""" +EC_CERT_EXAMPLE = parse_certificate(EC_CERT_STR) From e64de7d312816aa5a16369d57ba403072d52b436 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 25 Feb 2019 19:12:20 -0800 Subject: [PATCH 104/357] updating requirements --- requirements-dev.txt | 12 ++++++------ requirements-docs.txt | 24 ++++++++++++------------ requirements-tests.txt | 26 +++++++++++++------------- requirements.txt | 22 +++++++++++----------- 4 files changed, 42 insertions(+), 42 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index f5d6be3c..1cfbe393 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-dev.txt requirements-dev.in +# pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index # aspy.yaml==1.1.2 # via pre-commit bleach==3.1.0 # via readme-renderer @@ -11,26 +11,26 @@ cfgv==1.4.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.2.1 # via pre-commit +identify==1.3.0 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.14.3 +pre-commit==1.14.4 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer -pyyaml==4.2b4 +pyyaml==5.1b1 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit tqdm==4.31.1 # via twine -twine==1.12.1 +twine==1.13.0 urllib3==1.24.1 # via requests -virtualenv==16.4.0 # via pre-commit +virtualenv==16.4.1 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 80822929..db20a4b9 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-docs.txt requirements-docs.in +# pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # acme==0.31.0 alabaster==0.7.12 # via sphinx @@ -10,19 +10,19 @@ alembic-autogenerate-enums==0.0.2 alembic==1.0.7 amqp==2.4.1 aniso8601==4.1.0 -arrow==0.13.0 +arrow==0.13.1 asn1crypto==0.24.0 asyncpool==1.0 babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.91 -botocore==1.12.91 +boto3==1.9.102 +botocore==1.12.102 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 -cffi==1.11.5 +cffi==1.12.1 chardet==3.0.4 click==7.0 cloudflare==2.1.0 @@ -34,7 +34,7 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.3.1 +flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 @@ -47,15 +47,15 @@ imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 jinja2==2.10 -jmespath==0.9.3 +jmespath==0.9.4 josepy==1.1.0 jsonlines==1.2.0 kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 -markupsafe==1.1.0 +markupsafe==1.1.1 marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.0 +marshmallow==2.18.1 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx @@ -75,7 +75,7 @@ pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 pytz==2018.9 -pyyaml==4.2b4 +pyyaml==5.1b1 raven[flask]==6.10.0 redis==2.10.6 requests-toolbelt==0.9.1 @@ -84,12 +84,12 @@ retrying==1.3.3 s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx -sphinx-rtd-theme==0.4.2 +sphinx-rtd-theme==0.4.3 sphinx==1.8.4 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.17 +sqlalchemy==1.2.18 tabulate==0.8.3 urllib3==1.24.1 vine==1.2.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 60cda2d7..d1d6ae7f 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -2,17 +2,17 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements-tests.txt requirements-tests.in +# pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-index # asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==18.2.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.91 # via moto +boto3==1.9.102 # via moto boto==2.49.0 # via moto -botocore==1.12.91 # via boto3, moto, s3transfer +botocore==1.12.102 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests -cffi==1.11.5 # via cryptography +cffi==1.12.1 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.2 @@ -29,36 +29,36 @@ future==0.17.1 # via python-jose idna==2.8 # via requests itsdangerous==1.1.0 # via flask jinja2==2.10 # via flask, moto -jmespath==0.9.3 # via boto3, botocore +jmespath==0.9.4 # via boto3, botocore jsondiff==1.1.1 # via moto jsonpickle==1.1 # via aws-xray-sdk -markupsafe==1.1.0 # via jinja2 +markupsafe==1.1.1 # via jinja2 mock==2.0.0 # via moto -more-itertools==5.0.0 # via pytest +more-itertools==6.0.0 # via pytest moto==1.3.7 nose==1.3.7 pbr==5.1.2 # via mock -pluggy==0.8.1 # via pytest -py==1.7.0 # via pytest +pluggy==0.9.0 # via pytest +py==1.8.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi pycryptodome==3.7.3 # via python-jose pyflakes==2.1.0 pytest-flask==0.14.0 pytest-mock==1.10.1 -pytest==4.2.0 +pytest==4.3.0 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto -pyyaml==4.2b4 +pyyaml==5.1b1 requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses responses==0.10.5 # via moto s3transfer==0.2.0 # via boto3 -six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, more-itertools, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client +six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests -websocket-client==0.54.0 # via docker +websocket-client==0.55.0 # via docker werkzeug==0.14.1 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto diff --git a/requirements.txt b/requirements.txt index 8bc96ac2..72a37692 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,25 +2,25 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --no-index --output-file requirements.txt requirements.in +# pip-compile --output-file requirements.txt requirements.in -U --no-index # acme==0.31.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.7 # via flask-migrate amqp==2.4.1 # via kombu aniso8601==4.1.0 # via flask-restful -arrow==0.13.0 +arrow==0.13.1 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.91 -botocore==1.12.91 +boto3==1.9.102 +botocore==1.12.102 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 -cffi==1.11.5 # via bcrypt, cryptography, pynacl +cffi==1.12.1 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.1.0 @@ -32,7 +32,7 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.3.1 +flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 @@ -44,15 +44,15 @@ idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask jinja2==2.10 -jmespath==0.9.3 # via boto3, botocore +jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare kombu==4.3.0 # via celery lockfile==0.12.2 mako==1.0.7 # via alembic -markupsafe==1.1.0 # via jinja2, mako +markupsafe==1.1.1 # via jinja2, mako marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.0 +marshmallow==2.18.1 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 @@ -70,7 +70,7 @@ python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-ldap==3.1.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==4.2b4 +pyyaml==5.1b1 raven[flask]==6.10.0 redis==2.10.6 requests-toolbelt==0.9.1 # via acme @@ -79,7 +79,7 @@ retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.17 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.2.18 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp From 53301728fa9e052214f7c2f8211a693ff5313ac9 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 26 Feb 2019 09:15:12 -0500 Subject: [PATCH 105/357] Moved url to config file instead of plugin option. One one url can be supported unless both the token and url are moved to the plugin options. --- lemur/plugins/lemur_vault/plugin.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/lemur/plugins/lemur_vault/plugin.py b/lemur/plugins/lemur_vault/plugin.py index 58a9e601..2e46b155 100644 --- a/lemur/plugins/lemur_vault/plugin.py +++ b/lemur/plugins/lemur_vault/plugin.py @@ -47,11 +47,11 @@ class VaultDestinationPlugin(DestinationPlugin): 'helpMessage': 'Must be a valid Vault secrets path' }, { - 'name': 'vaultUrl', + 'name': 'objectName', 'type': 'str', - 'required': True, - 'validation': '^https?://[a-zA-Z0-9.-]+(?::[0-9]+)?$', - 'helpMessage': 'Must be a valid Vault server url' + 'required': False, + 'validation': '[0-9a-zA-Z:_-]+', + 'helpMessage': 'Name to bundle certs under, if blank use cn' }, { 'name': 'bundleChain', @@ -64,13 +64,6 @@ class VaultDestinationPlugin(DestinationPlugin): ], 'required': True, 'helpMessage': 'Bundle the chain into the certificate' - }, - { - 'name': 'objectName', - 'type': 'str', - 'required': False, - 'validation': '[0-9a-zA-Z:_-]+', - 'helpMessage': 'Name to bundle certs under, if blank use cn' } ] @@ -93,10 +86,10 @@ class VaultDestinationPlugin(DestinationPlugin): sans_name = '{0}.san'.format(cname) token = current_app.config.get('VAULT_TOKEN') + url = current_app.config.get('VAULT_URL') mount = self.get_option('vaultMount', options) path = self.get_option('vaultPath', options) - url = self.get_option('vaultUrl', options) bundle = self.get_option('bundleChain', options) obj_name = self.get_option('objectName', options) From 16a18cc4b71d780821e3480f19787e117aec96f9 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 26 Feb 2019 16:35:49 -0800 Subject: [PATCH 106/357] adding more edge test cases for EC-certs --- lemur/tests/test_utils.py | 9 +++++- lemur/tests/vectors.py | 67 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 75 insertions(+), 1 deletion(-) diff --git a/lemur/tests/test_utils.py b/lemur/tests/test_utils.py index c44f7b9c..74c11643 100644 --- a/lemur/tests/test_utils.py +++ b/lemur/tests/test_utils.py @@ -1,6 +1,6 @@ import pytest -from lemur.tests.vectors import SAN_CERT, INTERMEDIATE_CERT, ROOTCA_CERT, EC_CERT_EXAMPLE +from lemur.tests.vectors import SAN_CERT, INTERMEDIATE_CERT, ROOTCA_CERT, EC_CERT_EXAMPLE, ECDSA_PRIME256V1_CERT, ECDSA_SECP384r1_CERT, DSA_CERT def test_generate_private_key(): @@ -84,3 +84,10 @@ def test_is_selfsigned(selfsigned_cert): # Root CA certificates are also technically self-signed assert is_selfsigned(ROOTCA_CERT) is True assert is_selfsigned(EC_CERT_EXAMPLE) is False + + # selfsigned certs + assert is_selfsigned(ECDSA_PRIME256V1_CERT) is True + assert is_selfsigned(ECDSA_SECP384r1_CERT) is True + # unsupported algorithm (DSA) + with pytest.raises(Exception): + is_selfsigned(DSA_CERT) diff --git a/lemur/tests/vectors.py b/lemur/tests/vectors.py index 9af77bf6..06e7445a 100644 --- a/lemur/tests/vectors.py +++ b/lemur/tests/vectors.py @@ -422,3 +422,70 @@ MXp8GrvTfn1DQ+o= -----END CERTIFICATE----- """ EC_CERT_EXAMPLE = parse_certificate(EC_CERT_STR) + + +ECDSA_PRIME256V1_CERT_STR = """ +-----BEGIN CERTIFICATE----- +MIICUTCCAfYCCQCvH7H/e2nuiDAKBggqhkjOPQQDAjCBrzELMAkGA1UEBhMCVVMx +EzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcMCUxvcyBHYXRvczEjMCEGA1UE +CgwaTGVtdXJUcnVzdCBFbnRlcnByaXNlcyBMdGQxJjAkBgNVBAsMHVVuaXR0ZXN0 +aW5nIE9wZXJhdGlvbnMgQ2VudGVyMSowKAYDVQQDDCFMZW11clRydXN0IFVuaXR0 +ZXN0cyBSb290IENBIDIwMTkwHhcNMTkwMjI2MTgxMTUyWhcNMjkwMjIzMTgxMTUy +WjCBrzELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcM +CUxvcyBHYXRvczEjMCEGA1UECgwaTGVtdXJUcnVzdCBFbnRlcnByaXNlcyBMdGQx +JjAkBgNVBAsMHVVuaXR0ZXN0aW5nIE9wZXJhdGlvbnMgQ2VudGVyMSowKAYDVQQD +DCFMZW11clRydXN0IFVuaXR0ZXN0cyBSb290IENBIDIwMTkwWTATBgcqhkjOPQIB +BggqhkjOPQMBBwNCAAQsnAVUtpDCFMK/k9Chynu8BWRVUBUYbGQ9Q9xeLR60J4fD +uBt48YpTqg5RMZEclVknMReXqTmqphOBo37/YVdlMAoGCCqGSM49BAMCA0kAMEYC +IQDQZ6xfBiCTHxY4GM4+zLeG1iPBUSfIJOjkFNViFZY/XAIhAJYmrkVQb/YjWCdd +Vl89McYhmV4IV7WDgUmUhkUSFXgy +-----END CERTIFICATE----- +""" +ECDSA_PRIME256V1_CERT = parse_certificate(ECDSA_PRIME256V1_CERT_STR) + + +ECDSA_SECP384r1_CERT_STR = """ +-----BEGIN CERTIFICATE----- +MIICjjCCAhMCCQD2UadeQ7ub1jAKBggqhkjOPQQDAjCBrzELMAkGA1UEBhMCVVMx +EzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcMCUxvcyBHYXRvczEjMCEGA1UE +CgwaTGVtdXJUcnVzdCBFbnRlcnByaXNlcyBMdGQxJjAkBgNVBAsMHVVuaXR0ZXN0 +aW5nIE9wZXJhdGlvbnMgQ2VudGVyMSowKAYDVQQDDCFMZW11clRydXN0IFVuaXR0 +ZXN0cyBSb290IENBIDIwMTgwHhcNMTkwMjI2MTgxODU2WhcNMjkwMjIzMTgxODU2 +WjCBrzELMAkGA1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExEjAQBgNVBAcM +CUxvcyBHYXRvczEjMCEGA1UECgwaTGVtdXJUcnVzdCBFbnRlcnByaXNlcyBMdGQx +JjAkBgNVBAsMHVVuaXR0ZXN0aW5nIE9wZXJhdGlvbnMgQ2VudGVyMSowKAYDVQQD +DCFMZW11clRydXN0IFVuaXR0ZXN0cyBSb290IENBIDIwMTgwdjAQBgcqhkjOPQIB +BgUrgQQAIgNiAARuKyHIRp2e6PB5UcY8L/bUdavkL5Zf3IegNKvaAsvkDenhDGAI +zwWgsk3rOo7jmpMibn7yJQn404uZovwyeKcApn8uVv8ltheeYAx+ySzzn/APxNGy +cye/nv1D9cDW628wCgYIKoZIzj0EAwIDaQAwZgIxANl1ljDH4ykNK2OaRqKOkBOW +cKk1SvtiEZDS/wytiZGCeaxYteSYF+3GE8V2W1geWAIxAI8D7DY0HU5zw+oxAlTD +Uw/TeHA6q0QV4otPvrINW3V09iXDwFSPe265fTkHSfT6hQ== +-----END CERTIFICATE----- +""" +ECDSA_SECP384r1_CERT = parse_certificate(ECDSA_SECP384r1_CERT_STR) + +DSA_CERT_STR = """ +-----BEGIN CERTIFICATE----- +MIIDmTCCA1YCCQD5h/cM7xYO9jALBglghkgBZQMEAwIwga8xCzAJBgNVBAYTAlVT +MRMwEQYDVQQIDApDYWxpZm9ybmlhMRIwEAYDVQQHDAlMb3MgR2F0b3MxIzAhBgNV +BAoMGkxlbXVyVHJ1c3QgRW50ZXJwcmlzZXMgTHRkMSYwJAYDVQQLDB1Vbml0dGVz +dGluZyBPcGVyYXRpb25zIENlbnRlcjEqMCgGA1UEAwwhTGVtdXJUcnVzdCBVbml0 +dGVzdHMgUm9vdCBDQSAyMDE4MB4XDTE5MDIyNjE4MjUyMloXDTI5MDIyMzE4MjUy +Mlowga8xCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRIwEAYDVQQH +DAlMb3MgR2F0b3MxIzAhBgNVBAoMGkxlbXVyVHJ1c3QgRW50ZXJwcmlzZXMgTHRk +MSYwJAYDVQQLDB1Vbml0dGVzdGluZyBPcGVyYXRpb25zIENlbnRlcjEqMCgGA1UE +AwwhTGVtdXJUcnVzdCBVbml0dGVzdHMgUm9vdCBDQSAyMDE4MIIBtjCCASsGByqG +SM44BAEwggEeAoGBAO2+6wO20rn9K7RtXJ7/kCSVFzYZsY1RKvmJ6BBkMFIepBkz +2pk62tRhJgNH07GKF7pyTPRRKqt38CaPK4ERUpavx3Ok6vZ3PKq8tMac/PMKBmT1 +Xfpch54KDlCdreEMJqYiCwbIyiSCR4+PCH+7xC5Uh0PIZo6otNWe3Wkk53CfAhUA +8d4YAtto6D30f7qkEa7DMAccUS8CgYAiv8r0k0aUEaeioblcCAjmhvE0v8/tD5u1 +anHO4jZIIv7uOrNFIGfqcNEOBs5AQkt5Bxn6x0b/VvtZ0FSrD0j4f36pTgro6noG +/0oRt0JngxsMSfo0LV4+bY62v21A0SneNgTgY+ugdfgGWvb0+9tpsIhiY69T+7c8 +Oa0S6OWSPAOBhAACgYB5wa+nJJNZPoTWFum27JlWGYLO2flg5EpWlOvcEE0o5RfB +FPnMM033kKQQEI0YpCAq9fIMKhhUMk1X4mKUBUTt+Nrn1pY2l/wt5G6AQdHI8QXz +P1ecBbHPNZtWe3iVnfOgz/Pd8tU9slcXP9z5XbZ7R/oGcF/TPRTtbLEkYZNaDDAL +BglghkgBZQMEAwIDMAAwLQIVANubSNMSLt8plN9ZV3cp4pe3lMYCAhQPLLE7rTgm +92X+hWfyz000QEpYEQ== +-----END CERTIFICATE----- +""" +DSA_CERT = parse_certificate(DSA_CERT_STR) From 9dbae39604a705544b541370b33e8b164bd48a28 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 26 Feb 2019 16:36:59 -0800 Subject: [PATCH 107/357] updating cryptography API call, to create right signing algorithm object. --- lemur/common/utils.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 7c9269cf..f5db3d75 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -162,10 +162,10 @@ def check_cert_signature(cert, issuer_public_key): else: padder = padding.PKCS1v15() issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, padder, cert.signature_hash_algorithm) - elif isinstance(issuer_public_key, ec.EllipticCurvePublicKey) and isinstance(cert.signature_hash_algorithm, ec.ECDSA): - issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, cert.signature_hash_algorithm) + elif isinstance(issuer_public_key, ec.EllipticCurvePublicKey) and isinstance(ec.ECDSA(cert.signature_hash_algorithm), ec.ECDSA): + issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, ec.ECDSA(cert.signature_hash_algorithm)) else: - raise InvalidSignature + raise UnsupportedAlgorithm("Unsupported Algorithm '{var}'.".format(var=cert.signature_algorithm_oid._name)) def is_selfsigned(cert): @@ -179,8 +179,6 @@ def is_selfsigned(cert): return True except InvalidSignature: return False - except UnsupportedAlgorithm as e: - raise Exception(e) def is_weekend(date): From 658c58e4b63ef50f5e4e4a040f4ce1bab21dab25 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 26 Feb 2019 17:04:43 -0800 Subject: [PATCH 108/357] clarifying comments --- lemur/common/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lemur/common/utils.py b/lemur/common/utils.py index f5db3d75..13e6e067 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -147,8 +147,7 @@ def generate_private_key(key_type): def check_cert_signature(cert, issuer_public_key): """ Check a certificate's signature against an issuer public key. - Before EC validation, make sure public key and signature are of the same type, - otherwise verification not possible (raise InvalidSignature) + Before EC validation, make sure we support the algorithm, otherwise raise UnsupportedAlgorithm On success, returns None; on failure, raises UnsupportedAlgorithm or InvalidSignature. """ if isinstance(issuer_public_key, rsa.RSAPublicKey): From 63de8047ce51f2da0ff181035afb330097017355 Mon Sep 17 00:00:00 2001 From: Ronald Moesbergen Date: Wed, 27 Feb 2019 09:38:25 +0100 Subject: [PATCH 109/357] Return 'already deleted' instead of 'not found' when cert has already been deleted --- lemur/certificates/views.py | 5 ++++- lemur/tests/test_certificates.py | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index b464b3ed..e77160b2 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -691,9 +691,12 @@ class Certificates(AuthenticatedResource): cert = service.get(certificate_id) - if not cert or cert.deleted: + if not cert: return dict(message="Cannot find specified certificate"), 404 + if cert.deleted: + return dict(message="Certificate is already deleted"), 412 + # allow creators if g.current_user != cert.user: owner_role = role_service.get_by_name(cert.owner) diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index 75a29e16..a020ac6b 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -738,7 +738,7 @@ def test_certificate_put_with_data(client, certificate, issuer_plugin): @pytest.mark.parametrize("token,status", [ (VALID_USER_HEADER_TOKEN, 403), (VALID_ADMIN_HEADER_TOKEN, 204), - (VALID_ADMIN_API_TOKEN, 404), + (VALID_ADMIN_API_TOKEN, 412), ('', 401) ]) def test_certificate_delete(client, token, status): From 5d2f603c847771ef0b9bd1651e5c751bc55043f2 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Fri, 1 Mar 2019 09:49:52 -0500 Subject: [PATCH 110/357] renamed vault destination plugin to avoid conflict with vault pki plugin --- lemur/plugins/{lemur_vault => lemur_vault_dest}/__init__.py | 0 lemur/plugins/{lemur_vault => lemur_vault_dest}/plugin.py | 2 +- setup.py | 2 +- 3 files changed, 2 insertions(+), 2 deletions(-) rename lemur/plugins/{lemur_vault => lemur_vault_dest}/__init__.py (100%) rename lemur/plugins/{lemur_vault => lemur_vault_dest}/plugin.py (98%) diff --git a/lemur/plugins/lemur_vault/__init__.py b/lemur/plugins/lemur_vault_dest/__init__.py similarity index 100% rename from lemur/plugins/lemur_vault/__init__.py rename to lemur/plugins/lemur_vault_dest/__init__.py diff --git a/lemur/plugins/lemur_vault/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py similarity index 98% rename from lemur/plugins/lemur_vault/plugin.py rename to lemur/plugins/lemur_vault_dest/plugin.py index 2e46b155..a11c92ba 100644 --- a/lemur/plugins/lemur_vault/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -1,5 +1,5 @@ """ -.. module: lemur.plugins.lemur_vault.plugin +.. module: lemur.plugins.lemur_vault_dest.plugin :platform: Unix :copyright: (c) 2019 :license: Apache, see LICENCE for more details. diff --git a/setup.py b/setup.py index b5dcdb3b..d22d1f7b 100644 --- a/setup.py +++ b/setup.py @@ -155,7 +155,7 @@ setup( 'digicert_cis_source = lemur.plugins.lemur_digicert.plugin:DigiCertCISSourcePlugin', 'csr_export = lemur.plugins.lemur_csr.plugin:CSRExportPlugin', 'sftp_destination = lemur.plugins.lemur_sftp.plugin:SFTPDestinationPlugin', - 'vault_desination = lemur.plugins.lemur_vault.plugin:VaultDestinationPlugin' + 'vault_desination = lemur.plugins.lemur_vault_dest.plugin:VaultDestinationPlugin' ], }, classifiers=[ From 10cec063c2c8561ecda82b6b238c58ec82072fbe Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Wed, 20 Jun 2018 18:42:34 +0300 Subject: [PATCH 111/357] Check that stored certificate chain matches certificate Similar to how the private key is checked. --- lemur/certificates/models.py | 6 +++- lemur/certificates/schemas.py | 14 ++++++++-- lemur/common/utils.py | 21 ++++++++++++++ lemur/common/validators.py | 48 ++++++++++++++++++++++---------- lemur/tests/factories.py | 1 + lemur/tests/test_certificates.py | 26 ++++++++++++++++- 6 files changed, 96 insertions(+), 20 deletions(-) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index 34305cc2..7cc4813c 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -192,12 +192,16 @@ class Certificate(db.Model): def check_integrity(self): """ - Integrity checks: Does the cert have a matching private key? + Integrity checks: Does the cert have a valid chain and matching private key? """ if self.private_key: validators.verify_private_key_match(utils.parse_private_key(self.private_key), self.parsed_cert, error_class=AssertionError) + if self.chain: + chain = [self.parsed_cert] + utils.parse_cert_chain(self.chain) + validators.verify_cert_chain(chain, error_class=AssertionError) + @cached_property def parsed_cert(self): assert self.body, "Certificate body not set" diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index 946bd541..d20fd5a7 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -245,8 +245,7 @@ class CertificateUploadInputSchema(CertificateCreationSchema): external_id = fields.String(missing=None, allow_none=True) private_key = fields.String() body = fields.String(required=True) - chain = fields.String(validate=validators.public_certificate, missing=None, - allow_none=True) # TODO this could be multiple certificates + chain = fields.String(missing=None, allow_none=True) destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True) notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True) @@ -260,7 +259,7 @@ class CertificateUploadInputSchema(CertificateCreationSchema): raise ValidationError('Destinations require private key.') @validates_schema - def validate_cert_private_key(self, data): + def validate_cert_private_key_chain(self, data): cert = None key = None if data.get('body'): @@ -279,6 +278,15 @@ class CertificateUploadInputSchema(CertificateCreationSchema): # Throws ValidationError validators.verify_private_key_match(key, cert) + if data.get('chain'): + try: + chain = utils.parse_cert_chain(data['chain']) + except ValueError: + raise ValidationError("Invalid certificate in certificate chain.", field_names=['chain']) + + # Throws ValidationError + validators.verify_cert_chain([cert] + chain) + class CertificateExportInputSchema(LemurInputSchema): plugin = fields.Nested(PluginInputSchema) diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 13e6e067..62c3182b 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -7,6 +7,7 @@ .. moduleauthor:: Kevin Glisson """ import random +import re import string import sqlalchemy @@ -67,6 +68,26 @@ def parse_private_key(private_key): return load_pem_private_key(private_key.encode('utf8'), password=None, backend=default_backend()) +def split_pem(data): + """ + Split a string of several PEM payloads to a list of strings. + + :param data: String + :return: List of strings + """ + return re.split("\n(?=-----BEGIN )", data) + + +def parse_cert_chain(pem_chain): + """ + Helper function to split and parse a series of PEM certificates. + + :param pem_chain: string + :return: List of parsed certificates + """ + return [parse_certificate(cert) for cert in split_pem(pem_chain) if pem_chain] + + def parse_csr(csr): """ Helper function that parses a CSR. diff --git a/lemur/common/validators.py b/lemur/common/validators.py index 90169553..91b831ba 100644 --- a/lemur/common/validators.py +++ b/lemur/common/validators.py @@ -1,27 +1,14 @@ import re from cryptography import x509 +from cryptography.exceptions import UnsupportedAlgorithm, InvalidSignature from cryptography.hazmat.backends import default_backend from cryptography.x509 import NameOID from flask import current_app from marshmallow.exceptions import ValidationError from lemur.auth.permissions import SensitiveDomainPermission -from lemur.common.utils import parse_certificate, is_weekend - - -def public_certificate(body): - """ - Determines if specified string is valid public certificate. - - :param body: - :return: - """ - try: - parse_certificate(body) - except Exception as e: - current_app.logger.exception(e) - raise ValidationError('Public certificate presented is not valid.') +from lemur.common.utils import check_cert_signature, is_weekend def common_name(value): @@ -138,3 +125,34 @@ def verify_private_key_match(key, cert, error_class=ValidationError): """ if key.public_key().public_numbers() != cert.public_key().public_numbers(): raise error_class("Private key does not match certificate.") + + +def verify_cert_chain(certs, error_class=ValidationError): + """ + Verifies that the certificates in the chain are correct. + + We don't bother with full cert validation but just check that certs in the chain are signed by the next, to avoid + basic human errors -- such as pasting the wrong certificate. + + :param certs: List of parsed certificates, use parse_cert_chain() + :param error_class: Exception class to raise on error + """ + cert = certs[0] + for issuer in certs[1:]: + # Use the current cert's public key to verify the previous signature. + # "certificate validation is a complex problem that involves much more than just signature checks" + try: + check_cert_signature(cert, issuer.public_key()) + + except InvalidSignature: + # Avoid circular import. + from lemur.common import defaults + + raise error_class("Incorrect chain certificate(s) provided: '%s' is not signed by '%s'" + % (defaults.common_name(cert) or 'Unknown', defaults.common_name(issuer))) + + except UnsupportedAlgorithm as err: + current_app.logger.warning("Skipping chain validation: %s", err) + + # Next loop will validate that *this issuer* cert is signed by the next chain cert. + cert = issuer diff --git a/lemur/tests/factories.py b/lemur/tests/factories.py index a4af3d43..de78f8a3 100644 --- a/lemur/tests/factories.py +++ b/lemur/tests/factories.py @@ -140,6 +140,7 @@ class CACertificateFactory(CertificateFactory): class InvalidCertificateFactory(CertificateFactory): body = INVALID_CERT_STR private_key = '' + chain = '' class AuthorityFactory(BaseFactory): diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index 8247c36b..f94dd713 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -512,7 +512,7 @@ def test_certificate_upload_schema_invalid_chain(client): 'owner': 'pwner@example.com', } data, errors = CertificateUploadInputSchema().load(data) - assert errors == {'chain': ['Public certificate presented is not valid.']} + assert errors == {'chain': ['Invalid certificate in certificate chain.']} def test_certificate_upload_schema_wrong_pkey(client): @@ -527,6 +527,30 @@ def test_certificate_upload_schema_wrong_pkey(client): assert errors == {'_schema': ['Private key does not match certificate.']} +def test_certificate_upload_schema_wrong_chain(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'owner': 'pwner@example.com', + 'body': SAN_CERT_STR, + 'chain': ROOTCA_CERT_STR, + } + data, errors = CertificateUploadInputSchema().load(data) + assert errors == {'_schema': ["Incorrect chain certificate(s) provided: 'san.example.org' is not signed by " + "'LemurTrust Unittests Root CA 2018'"]} + + +def test_certificate_upload_schema_wrong_chain_2nd(client): + from lemur.certificates.schemas import CertificateUploadInputSchema + data = { + 'owner': 'pwner@example.com', + 'body': SAN_CERT_STR, + 'chain': INTERMEDIATE_CERT_STR + '\n' + SAN_CERT_STR, + } + data, errors = CertificateUploadInputSchema().load(data) + assert errors == {'_schema': ["Incorrect chain certificate(s) provided: 'LemurTrust Unittests Class 1 CA 2018' is " + "not signed by 'san.example.org'"]} + + def test_create_basic_csr(client): csr_config = dict( common_name='example.com', From dd2900bdbc8ff315c937ca9860eeaf01959b22b2 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 4 Mar 2019 10:04:06 -0800 Subject: [PATCH 112/357] Relax search;update requirements --- lemur/certificates/service.py | 2 +- requirements-dev.txt | 11 ++++++----- requirements-docs.txt | 19 ++++++++++--------- requirements-tests.txt | 16 ++++++++-------- requirements.txt | 19 ++++++++++--------- 5 files changed, 35 insertions(+), 32 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index d5012012..2488115b 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -317,7 +317,7 @@ def render(args): if filt: terms = filt.split(';') - term = '{0}%'.format(terms[1]) + term = '%{0}%'.format(terms[1]) # Exact matches for quotes. Only applies to name, issuer, and cn if terms[1].startswith('"') and terms[1].endswith('"'): term = terms[1][1:-1] diff --git a/requirements-dev.txt b/requirements-dev.txt index 1cfbe393..e67aea64 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,16 +4,17 @@ # # pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index # -aspy.yaml==1.1.2 # via pre-commit +aspy.yaml==1.2.0 # via pre-commit bleach==3.1.0 # via readme-renderer certifi==2018.11.29 # via requests -cfgv==1.4.0 # via pre-commit +cfgv==1.5.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.3.0 # via pre-commit +identify==1.4.0 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit +importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 @@ -22,7 +23,7 @@ pre-commit==1.14.4 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer -pyyaml==5.1b1 +pyyaml==5.1b3 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine @@ -31,6 +32,6 @@ toml==0.10.0 # via pre-commit tqdm==4.31.1 # via twine twine==1.13.0 urllib3==1.24.1 # via requests -virtualenv==16.4.1 # via pre-commit +virtualenv==16.4.3 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index db20a4b9..50a2a077 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -8,8 +8,8 @@ acme==0.31.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.7 -amqp==2.4.1 -aniso8601==4.1.0 +amqp==2.4.2 +aniso8601==5.1.0 arrow==0.13.1 asn1crypto==0.24.0 asyncpool==1.0 @@ -17,16 +17,16 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.102 -botocore==1.12.102 +boto3==1.9.106 +botocore==1.12.106 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 -cffi==1.12.1 +cffi==1.12.2 chardet==3.0.4 click==7.0 cloudflare==2.1.0 -cryptography==2.5 +cryptography==2.6.1 dnspython3==1.15.0 dnspython==1.15.0 docutils==0.14 @@ -50,7 +50,7 @@ jinja2==2.10 jmespath==0.9.4 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.3.0 +kombu==4.4.0 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.1 @@ -60,7 +60,7 @@ mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 -pbr==5.1.2 +pbr==5.1.3 pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 @@ -75,9 +75,10 @@ pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 pytz==2018.9 -pyyaml==5.1b1 +pyyaml==5.1b3 raven[flask]==6.10.0 redis==2.10.6 +relativetimebuilder==0.2.0 requests-toolbelt==0.9.1 requests[security]==2.21.0 retrying==1.3.3 diff --git a/requirements-tests.txt b/requirements-tests.txt index d1d6ae7f..84c59d0d 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -6,17 +6,17 @@ # asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest -attrs==18.2.0 # via pytest +attrs==19.1.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.102 # via moto +boto3==1.9.106 # via moto boto==2.49.0 # via moto -botocore==1.12.102 # via boto3, moto, s3transfer +botocore==1.12.106 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests -cffi==1.12.1 # via cryptography +cffi==1.12.2 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.2 -cryptography==2.5 # via moto +cryptography==2.6.1 # via moto docker-pycreds==0.4.0 # via docker docker==3.7.0 # via moto docutils==0.14 # via botocore @@ -37,20 +37,20 @@ mock==2.0.0 # via moto more-itertools==6.0.0 # via pytest moto==1.3.7 nose==1.3.7 -pbr==5.1.2 # via mock +pbr==5.1.3 # via mock pluggy==0.9.0 # via pytest py==1.8.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi pycryptodome==3.7.3 # via python-jose -pyflakes==2.1.0 +pyflakes==2.1.1 pytest-flask==0.14.0 pytest-mock==1.10.1 pytest==4.3.0 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto -pyyaml==5.1b1 +pyyaml==5.1b3 requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses responses==0.10.5 # via moto diff --git a/requirements.txt b/requirements.txt index 72a37692..dd442b5f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,24 +7,24 @@ acme==0.31.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.7 # via flask-migrate -amqp==2.4.1 # via kombu -aniso8601==4.1.0 # via flask-restful +amqp==2.4.2 # via kombu +aniso8601==5.1.0 # via flask-restful, relativetimebuilder arrow==0.13.1 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.102 -botocore==1.12.102 +boto3==1.9.106 +botocore==1.12.106 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 -cffi==1.12.1 # via bcrypt, cryptography, pynacl +cffi==1.12.2 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.1.0 -cryptography==2.5 +cryptography==2.6.1 dnspython3==1.15.0 dnspython==1.15.0 # via dnspython3 docutils==0.14 # via botocore @@ -47,7 +47,7 @@ jinja2==2.10 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.3.0 # via celery +kombu==4.4.0 # via celery lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.1 # via jinja2, mako @@ -56,7 +56,7 @@ marshmallow==2.18.1 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 -pbr==5.1.2 # via mock +pbr==5.1.3 # via mock pem==18.2.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 # via python-ldap @@ -70,9 +70,10 @@ python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-ldap==3.1.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.1b1 +pyyaml==5.1b3 raven[flask]==6.10.0 redis==2.10.6 +relativetimebuilder==0.2.0 # via aniso8601 requests-toolbelt==0.9.1 # via acme requests[security]==2.21.0 retrying==1.3.3 From 4a027797e057d28900049c16190c4659d5bb48a5 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 5 Mar 2019 07:19:22 -0500 Subject: [PATCH 113/357] fixing linting issues --- lemur/plugins/lemur_vault_dest/plugin.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index a11c92ba..92089b02 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -10,8 +10,6 @@ .. moduleauthor:: Christopher Jolley """ import hvac - -#import lemur_vault from flask import current_app from lemur.common.defaults import common_name @@ -21,7 +19,6 @@ from lemur.plugins.bases import DestinationPlugin from cryptography import x509 from cryptography.hazmat.backends import default_backend - class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = 'Vault' @@ -79,7 +76,7 @@ class VaultDestinationPlugin(DestinationPlugin): :return: """ cname = common_name(parse_certificate(body)) - secret = {'data':{}} + secret = {'data': {}} key_name = '{0}.key'.format(cname) cert_name = '{0}.crt'.format(cname) chain_name = '{0}.chain'.format(cname) @@ -100,7 +97,6 @@ class VaultDestinationPlugin(DestinationPlugin): path = '{0}/{1}'.format(path, cname) secret = get_secret(url, token, mount, path) - if bundle == 'Nginx' and cert_chain: secret['data'][cert_name] = '{0}\n{1}'.format(body, cert_chain) @@ -120,6 +116,7 @@ class VaultDestinationPlugin(DestinationPlugin): current_app.logger.exception( "Exception uploading secret to vault: {0}".format(err), exc_info=True) + def get_san_list(body): """ parse certificate for SAN names and return list, return empty list on error """ try: @@ -127,15 +124,16 @@ def get_san_list(body): cert = x509.load_pem_x509_certificate(byte_body, default_backend()) ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME) return ext.value.get_values_for_type(x509.DNSName) - except: + except ValueError: pass return [] + def get_secret(url, token, mount, path): result = {'data': {}} try: client = hvac.Client(url=url, token=token) result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount) - except: + except ConnectionError: pass return result From a1cb8ee266af23aa0ba7171f5cf9d40750b0220b Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 5 Mar 2019 07:37:04 -0500 Subject: [PATCH 114/357] fixing lint --- lemur/plugins/lemur_vault_dest/plugin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 92089b02..774b6bb1 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -19,6 +19,7 @@ from lemur.plugins.bases import DestinationPlugin from cryptography import x509 from cryptography.hazmat.backends import default_backend + class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = 'Vault' From 20ac4bd3dd380b311c3b6f570c55a939e1d1399a Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 5 Mar 2019 07:34:30 -0800 Subject: [PATCH 115/357] downgrade kombu --- requirements-docs.txt | 8 ++++---- requirements-tests.txt | 4 ++-- requirements.txt | 8 ++++---- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index 50a2a077..894defcb 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -7,7 +7,7 @@ acme==0.31.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.0.7 +alembic==1.0.8 amqp==2.4.2 aniso8601==5.1.0 arrow==0.13.1 @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.106 -botocore==1.12.106 +boto3==1.9.107 +botocore==1.12.107 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 @@ -90,7 +90,7 @@ sphinx==1.8.4 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.18 +sqlalchemy==1.3.0 tabulate==0.8.3 urllib3==1.24.1 vine==1.2.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 84c59d0d..55e38cbf 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.106 # via moto +boto3==1.9.107 # via moto boto==2.49.0 # via moto -botocore==1.12.106 # via boto3, moto, s3transfer +botocore==1.12.107 # via boto3, moto, s3transfer certifi==2018.11.29 # via requests cffi==1.12.2 # via cryptography chardet==3.0.4 # via requests diff --git a/requirements.txt b/requirements.txt index dd442b5f..cf2be225 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ # acme==0.31.0 alembic-autogenerate-enums==0.0.2 -alembic==1.0.7 # via flask-migrate +alembic==1.0.8 # via flask-migrate amqp==2.4.2 # via kombu aniso8601==5.1.0 # via flask-restful, relativetimebuilder arrow==0.13.1 @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.106 -botocore==1.12.106 +boto3==1.9.107 +botocore==1.12.107 celery[redis]==4.2.1 certifi==2018.11.29 certsrv==2.1.1 @@ -80,7 +80,7 @@ retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.2.18 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.3.0 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp From 077ae1eedd7b827b809bb2908a20491342a331a0 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 5 Mar 2019 09:45:59 -0800 Subject: [PATCH 116/357] Downgrade Kombu for real this time --- requirements-docs.txt | 2 +- requirements.in | 1 + requirements.txt | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index 894defcb..e936c197 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -50,7 +50,7 @@ jinja2==2.10 jmespath==0.9.4 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.4.0 +kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.1 diff --git a/requirements.in b/requirements.in index b085f5c7..e3d0c66b 100644 --- a/requirements.in +++ b/requirements.in @@ -26,6 +26,7 @@ future gunicorn inflection jinja2 +kombu==4.3.0 # kombu 4.4.0 requires redis 3 lockfile marshmallow-sqlalchemy marshmallow diff --git a/requirements.txt b/requirements.txt index cf2be225..2aa5f157 100644 --- a/requirements.txt +++ b/requirements.txt @@ -47,7 +47,7 @@ jinja2==2.10 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.4.0 # via celery +kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.1 # via jinja2, mako From cc6d53fdeb4ff3cd24dd96ac2caa0b292f101208 Mon Sep 17 00:00:00 2001 From: Kevin Glisson Date: Tue, 5 Mar 2019 15:39:37 -0800 Subject: [PATCH 117/357] Ensuring that configs passed via the command line are respected. --- lemur/__init__.py | 4 ++-- lemur/manage.py | 4 ++-- lemur/tests/conftest.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lemur/__init__.py b/lemur/__init__.py index 1cdb3468..769e0cec 100644 --- a/lemur/__init__.py +++ b/lemur/__init__.py @@ -62,8 +62,8 @@ LEMUR_BLUEPRINTS = ( ) -def create_app(config=None): - app = factory.create_app(app_name=__name__, blueprints=LEMUR_BLUEPRINTS, config=config) +def create_app(config_path=None): + app = factory.create_app(app_name=__name__, blueprints=LEMUR_BLUEPRINTS, config=config_path) configure_hook(app) return app diff --git a/lemur/manage.py b/lemur/manage.py index 184b9aa6..9161109b 100755 --- a/lemur/manage.py +++ b/lemur/manage.py @@ -50,7 +50,7 @@ from lemur.pending_certificates.models import PendingCertificate # noqa from lemur.dns_providers.models import DnsProvider # noqa manager = Manager(create_app) -manager.add_option('-c', '--config', dest='config') +manager.add_option('-c', '--config', dest='config_path', required=False) migrate = Migrate(create_app) @@ -391,7 +391,7 @@ class LemurServer(Command): # run startup tasks on an app like object validate_conf(current_app, REQUIRED_VARIABLES) - app.app_uri = 'lemur:create_app(config="{0}")'.format(current_app.config.get('CONFIG_PATH')) + app.app_uri = 'lemur:create_app(config_path="{0}")'.format(current_app.config.get('CONFIG_PATH')) return app.run() diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index b3dad8b2..43fa7163 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -43,7 +43,7 @@ def app(request): Creates a new Flask application for a test duration. Uses application factory `create_app`. """ - _app = create_app(os.path.dirname(os.path.realpath(__file__)) + '/conf.py') + _app = create_app(config_path=os.path.dirname(os.path.realpath(__file__)) + '/conf.py') ctx = _app.app_context() ctx.push() From b8d3a4f9aac35ee82ed958433742701f965ca190 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 6 Mar 2019 11:13:34 -0800 Subject: [PATCH 118/357] Update requirements.in --- requirements.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.in b/requirements.in index bd408f1c..9b27f604 100644 --- a/requirements.in +++ b/requirements.in @@ -46,4 +46,4 @@ six SQLAlchemy-Utils tabulate xmltodict -pyyaml>=4.2b1 #high severity alert \ No newline at end of file +pyyaml>=4.2b1 #high severity alert From 752c9a086bd1aff975df3622c0b06422368170e4 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Thu, 7 Mar 2019 15:41:29 -0500 Subject: [PATCH 119/357] fixing error handling and better data formating --- lemur/plugins/lemur_vault_dest/plugin.py | 36 ++++++++++++------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 774b6bb1..5924f387 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -34,7 +34,7 @@ class VaultDestinationPlugin(DestinationPlugin): 'name': 'vaultMount', 'type': 'str', 'required': True, - 'validation': '^[a-zA-Z0-9]+$', + 'validation': '^\S+$', 'helpMessage': 'Must be a valid Vault secrets mount name!' }, { @@ -77,11 +77,6 @@ class VaultDestinationPlugin(DestinationPlugin): :return: """ cname = common_name(parse_certificate(body)) - secret = {'data': {}} - key_name = '{0}.key'.format(cname) - cert_name = '{0}.crt'.format(cname) - chain_name = '{0}.chain'.format(cname) - sans_name = '{0}.san'.format(cname) token = current_app.config.get('VAULT_TOKEN') url = current_app.config.get('VAULT_URL') @@ -98,18 +93,19 @@ class VaultDestinationPlugin(DestinationPlugin): path = '{0}/{1}'.format(path, cname) secret = get_secret(url, token, mount, path) + secret['data'][cname] = {} if bundle == 'Nginx' and cert_chain: - secret['data'][cert_name] = '{0}\n{1}'.format(body, cert_chain) + secret['data'][cname]['crt'] = '{0}\n{1}'.format(body, cert_chain) elif bundle == 'Apache' and cert_chain: - secret['data'][cert_name] = body - secret['data'][chain_name] = cert_chain + secret['data'][cname]['crt'] = body + secret['data'][cname]['chain'] = cert_chain else: - secret['data'][cert_name] = body - secret['data'][key_name] = private_key + secret['data'][cname]['crt'] = body + secret['data'][cname]['key'] = private_key san_list = get_san_list(body) if isinstance(san_list, list): - secret['data'][sans_name] = san_list + secret['data'][cname]['san'] = san_list try: client.secrets.kv.v1.create_or_update_secret( path=path, mount_point=mount, secret=secret['data']) @@ -120,21 +116,25 @@ class VaultDestinationPlugin(DestinationPlugin): def get_san_list(body): """ parse certificate for SAN names and return list, return empty list on error """ + san_list = [] try: byte_body = body.encode('utf-8') cert = x509.load_pem_x509_certificate(byte_body, default_backend()) ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME) - return ext.value.get_values_for_type(x509.DNSName) - except ValueError: + san_list = ext.value.get_values_for_type(x509.DNSName) + except x509.extensions.ExtensionNotFound: pass - return [] + finally: + return san_list def get_secret(url, token, mount, path): + """ retreiive existing data from mount path and return dictionary """ result = {'data': {}} try: client = hvac.Client(url=url, token=token) result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount) - except ConnectionError: - pass - return result + #except ConnectionError: + # pass + finally: + return result From f1c09a6f8f8f7c66d66b0d2c85cfa4420b200d00 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Thu, 7 Mar 2019 15:58:34 -0500 Subject: [PATCH 120/357] fixed comments --- lemur/plugins/lemur_vault_dest/plugin.py | 4 ++-- lemur/plugins/lemur_vault_dest/tests/conftest.py | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 lemur/plugins/lemur_vault_dest/tests/conftest.py diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 5924f387..2f2a2e82 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -134,7 +134,7 @@ def get_secret(url, token, mount, path): try: client = hvac.Client(url=url, token=token) result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount) - #except ConnectionError: - # pass + except ConnectionError: + pass finally: return result diff --git a/lemur/plugins/lemur_vault_dest/tests/conftest.py b/lemur/plugins/lemur_vault_dest/tests/conftest.py new file mode 100644 index 00000000..0e1cd89f --- /dev/null +++ b/lemur/plugins/lemur_vault_dest/tests/conftest.py @@ -0,0 +1 @@ +from lemur.tests.conftest import * # noqa From d220e9326c0c2a66106a9f3ec74832091c3c7ff2 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 12 Mar 2019 14:45:43 -0700 Subject: [PATCH 121/357] Skip a task if similar task already active --- lemur/common/celery.py | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index f2a2f826..56837cba 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -47,6 +47,19 @@ def make_celery(app): celery = make_celery(flask_app) +def is_task_active(fun: str, task_id: str, args: str) -> bool: + from celery.task.control import inspect + i = inspect() + active_tasks: dict = i.active() + for _, tasks in active_tasks.items(): + for task in tasks: + if task.get("id") == task_id: + continue + if task.get("name") == fun and task.get("args") == str(args): + return True + return False + + @celery.task() def fetch_acme_cert(id): """ @@ -224,5 +237,21 @@ def sync_source(source): :param source: :return: """ - current_app.logger.debug("Syncing source {}".format(source)) + + function = f"{__name__}.{sys._getframe().f_code.co_name}" + task_id = celery.current_task.request.id + log_data = { + "function": function, + "message": "Syncing source", + "source": source, + "task_id": task_id, + } + current_app.logger.debug(log_data) + + if is_task_active(function, task_id, (source,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return sync([source]) + log_data["message"] = "Done syncing source" + current_app.logger.debug(log_data) From 1a5a91ccc72ec869ed1ea5f940f33ea92d77c50e Mon Sep 17 00:00:00 2001 From: Curtis Date: Tue, 12 Mar 2019 15:11:13 -0700 Subject: [PATCH 122/357] Update celery.py --- lemur/common/celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 56837cba..b7f23c32 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -50,7 +50,7 @@ celery = make_celery(flask_app) def is_task_active(fun: str, task_id: str, args: str) -> bool: from celery.task.control import inspect i = inspect() - active_tasks: dict = i.active() + active_tasks = i.active() for _, tasks in active_tasks.items(): for task in tasks: if task.get("id") == task_id: From f38e5b0879225255fb9ba514cb1d81f86fdb8d3c Mon Sep 17 00:00:00 2001 From: Curtis Date: Tue, 12 Mar 2019 15:29:04 -0700 Subject: [PATCH 123/357] Update celery.py --- lemur/common/celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index b7f23c32..90b6f9a2 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -47,7 +47,7 @@ def make_celery(app): celery = make_celery(flask_app) -def is_task_active(fun: str, task_id: str, args: str) -> bool: +def is_task_active(fun, task_id, args): from celery.task.control import inspect i = inspect() active_tasks = i.active() From c445297357e3ab47116beed3d9fe0a4b50582595 Mon Sep 17 00:00:00 2001 From: Curtis Date: Tue, 12 Mar 2019 15:41:24 -0700 Subject: [PATCH 124/357] Update celery.py --- lemur/common/celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 90b6f9a2..991dac2c 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -238,7 +238,7 @@ def sync_source(source): :return: """ - function = f"{__name__}.{sys._getframe().f_code.co_name}" + function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) task_id = celery.current_task.request.id log_data = { "function": function, From f7452e837974dd39bf7452a8ee059ec7738f18ef Mon Sep 17 00:00:00 2001 From: Javier Ramos Date: Fri, 15 Mar 2019 09:18:33 +0100 Subject: [PATCH 125/357] Parse DNSNames from CSR into Lemur Certificate --- lemur/certificates/schemas.py | 6 +++++ lemur/certificates/utils.py | 42 +++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) create mode 100644 lemur/certificates/utils.py diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index d20fd5a7..e9b61539 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -10,6 +10,7 @@ from marshmallow import fields, validate, validates_schema, post_load, pre_load from marshmallow.exceptions import ValidationError from lemur.authorities.schemas import AuthorityNestedOutputSchema +from lemur.certificates import utils as cert_utils from lemur.common import missing, utils, validators from lemur.common.fields import ArrowDateTime, Hex from lemur.common.schema import LemurInputSchema, LemurOutputSchema @@ -107,6 +108,11 @@ class CertificateInputSchema(CertificateCreationSchema): def load_data(self, data): if data.get('replacements'): data['replaces'] = data['replacements'] # TODO remove when field is deprecated + if data['csr']: + dns_names = cert_utils.get_dns_names_from_csr(data['csr']) + if not data['extensions']['subAltNames']['names']: + data['extensions']['subAltNames']['names'] = [] + data['extensions']['subAltNames']['names'] += dns_names return missing.convert_validity_years(data) diff --git a/lemur/certificates/utils.py b/lemur/certificates/utils.py new file mode 100644 index 00000000..933fe45e --- /dev/null +++ b/lemur/certificates/utils.py @@ -0,0 +1,42 @@ +""" +Utils to parse certificate data. + +.. module: lemur.certificates.hooks + :platform: Unix + :copyright: (c) 2019 by Javier Ramos, see AUTHORS for more + :license: Apache, see LICENSE for more details. + +.. moduleauthor:: Javier Ramos +""" + +from cryptography import x509 +from cryptography.hazmat.backends import default_backend +from marshmallow.exceptions import ValidationError + + +def get_dns_names_from_csr(data): + """ + Fetches DNSNames from CSR. + Potentially extendable to any kind of SubjectAlternativeName + :param data: PEM-encoded string with CSR + :return: + """ + dns_names = [] + try: + request = x509.load_pem_x509_csr(data.encode('utf-8'), default_backend()) + except Exception: + raise ValidationError('CSR presented is not valid.') + + try: + alt_names = request.extensions.get_extension_for_class(x509.SubjectAlternativeName) + + for name in alt_names.value.get_values_for_type(x509.DNSName): + dns_name = { + 'nameType': 'DNSName', + 'value': name + } + dns_names.append(dns_name) + except x509.ExtensionNotFound: + pass + + return dns_names From 9e5496b484fd5dcc87120938beabaf7b28031dcf Mon Sep 17 00:00:00 2001 From: Javier Ramos Date: Fri, 15 Mar 2019 10:19:25 +0100 Subject: [PATCH 126/357] Update schemas.py --- lemur/certificates/schemas.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index e9b61539..25fc2c46 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -108,7 +108,7 @@ class CertificateInputSchema(CertificateCreationSchema): def load_data(self, data): if data.get('replacements'): data['replaces'] = data['replacements'] # TODO remove when field is deprecated - if data['csr']: + if data.get('csr'): dns_names = cert_utils.get_dns_names_from_csr(data['csr']) if not data['extensions']['subAltNames']['names']: data['extensions']['subAltNames']['names'] = [] From dbd948be6eda9ce70731dee908f71de5a15910dc Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 18 Mar 2019 12:50:18 -0700 Subject: [PATCH 127/357] updating requirements --- requirements-dev.txt | 5 ++--- requirements-docs.txt | 21 ++++++++++----------- requirements-tests.txt | 16 ++++++++-------- requirements.txt | 21 ++++++++++----------- 4 files changed, 30 insertions(+), 33 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e67aea64..36e2c9a4 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ # aspy.yaml==1.2.0 # via pre-commit bleach==3.1.0 # via readme-renderer -certifi==2018.11.29 # via requests +certifi==2019.3.9 # via requests cfgv==1.5.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer @@ -14,7 +14,6 @@ flake8==3.5.0 identify==1.4.0 # via pre-commit idna==2.8 # via requests importlib-metadata==0.8 # via pre-commit -importlib-resources==1.0.2 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 @@ -23,7 +22,7 @@ pre-commit==1.14.4 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer -pyyaml==5.1b3 +pyyaml==5.1 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine diff --git a/requirements-docs.txt b/requirements-docs.txt index e936c197..7879c667 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,12 +4,12 @@ # # pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # -acme==0.31.0 +acme==0.32.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.8 amqp==2.4.2 -aniso8601==5.1.0 +aniso8601==6.0.0 arrow==0.13.1 asn1crypto==0.24.0 asyncpool==1.0 @@ -17,10 +17,10 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.107 -botocore==1.12.107 +boto3==1.9.116 +botocore==1.12.116 celery[redis]==4.2.1 -certifi==2018.11.29 +certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.2 chardet==3.0.4 @@ -54,8 +54,8 @@ kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 markupsafe==1.1.1 -marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.1 +marshmallow-sqlalchemy==0.16.1 +marshmallow==2.19.1 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx @@ -75,10 +75,9 @@ pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 pytz==2018.9 -pyyaml==5.1b3 +pyyaml==5.1 raven[flask]==6.10.0 redis==2.10.6 -relativetimebuilder==0.2.0 requests-toolbelt==0.9.1 requests[security]==2.21.0 retrying==1.3.3 @@ -86,11 +85,11 @@ s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.3 -sphinx==1.8.4 +sphinx==1.8.5 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.0 +sqlalchemy==1.3.1 tabulate==0.8.3 urllib3==1.24.1 vine==1.2.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 55e38cbf..da3b4482 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,21 +8,21 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.107 # via moto +boto3==1.9.116 # via moto boto==2.49.0 # via moto -botocore==1.12.107 # via boto3, moto, s3transfer -certifi==2018.11.29 # via requests +botocore==1.12.116 # via boto3, moto, s3transfer +certifi==2019.3.9 # via requests cffi==1.12.2 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask -coverage==4.5.2 +coverage==4.5.3 cryptography==2.6.1 # via moto docker-pycreds==0.4.0 # via docker docker==3.7.0 # via moto docutils==0.14 # via botocore ecdsa==0.13 # via python-jose factory-boy==2.11.1 -faker==1.0.2 +faker==1.0.4 flask==1.0.2 # via pytest-flask freezegun==0.3.11 future==0.17.1 # via python-jose @@ -46,14 +46,14 @@ pycryptodome==3.7.3 # via python-jose pyflakes==2.1.1 pytest-flask==0.14.0 pytest-mock==1.10.1 -pytest==4.3.0 +pytest==4.3.1 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto -pyyaml==5.1b3 +pyyaml==5.1 requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses -responses==0.10.5 # via moto +responses==0.10.6 # via moto s3transfer==0.2.0 # via boto3 six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker diff --git a/requirements.txt b/requirements.txt index 2aa5f157..5bd37693 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,21 +4,21 @@ # # pip-compile --output-file requirements.txt requirements.in -U --no-index # -acme==0.31.0 +acme==0.32.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.8 # via flask-migrate amqp==2.4.2 # via kombu -aniso8601==5.1.0 # via flask-restful, relativetimebuilder +aniso8601==6.0.0 # via flask-restful arrow==0.13.1 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.107 -botocore==1.12.107 +boto3==1.9.116 +botocore==1.12.116 celery[redis]==4.2.1 -certifi==2018.11.29 +certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.2 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests @@ -51,8 +51,8 @@ kombu==4.3.0 lockfile==0.12.2 mako==1.0.7 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.1 +marshmallow-sqlalchemy==0.16.1 +marshmallow==2.19.1 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 @@ -68,19 +68,18 @@ pyopenssl==19.0.0 pyrfc3339==1.1 # via acme python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic -python-ldap==3.1.0 +python-ldap==3.2.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.1b3 +pyyaml==5.1 raven[flask]==6.10.0 redis==2.10.6 -relativetimebuilder==0.2.0 # via aniso8601 requests-toolbelt==0.9.1 # via acme requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.0 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.3.1 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests vine==1.2.0 # via amqp From f99b11d50ec91b5e344eeb1497fd60b96c1af107 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Wed, 20 Mar 2019 13:51:06 -0400 Subject: [PATCH 128/357] refactor url and token to support muiltiple instances of vault --- lemur/plugins/lemur_vault_dest/plugin.py | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 2f2a2e82..c47b49a3 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -30,6 +30,22 @@ class VaultDestinationPlugin(DestinationPlugin): author_url = 'https://github.com/alwaysjolley/lemur' options = [ + { + 'name': 'vaultUrl', + 'type': 'str', + 'required': True, + 'validation': '^https?://[a-zA-Z0-9.:-]+$', + 'helpMessage': 'Valid URL to Hashi Vault instance' + 'default': 'http://127.0.0.1:8200' + }, + { + 'name': 'vaultAuthTokenFile', + 'type': 'str', + 'required': True, + 'validation': '(/[^/]+)+', + 'helpMessage': 'Must be a valid file path!', + 'default': '/etc/pki/secrets/vault/token' + }, { 'name': 'vaultMount', 'type': 'str', @@ -79,13 +95,17 @@ class VaultDestinationPlugin(DestinationPlugin): cname = common_name(parse_certificate(body)) token = current_app.config.get('VAULT_TOKEN') - url = current_app.config.get('VAULT_URL') - + #url = current_app.config.get('VAULT_URL') + url = self.get_option('vaultUrl', options) + token_file = self.get_option('vaultFile', options) mount = self.get_option('vaultMount', options) path = self.get_option('vaultPath', options) bundle = self.get_option('bundleChain', options) obj_name = self.get_option('objectName', options) + with open(token_file, 'r') as file: + token = file.readline() + client = hvac.Client(url=url, token=token) if obj_name: path = '{0}/{1}'.format(path, obj_name) From fa4a5122bc7c723e5b8e9a8396f05a63577399db Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Wed, 20 Mar 2019 14:59:04 -0400 Subject: [PATCH 129/357] fixing file read to trim line endings and cleanup --- lemur/plugins/lemur_vault_dest/plugin.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index c47b49a3..91f6a07a 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -36,15 +36,13 @@ class VaultDestinationPlugin(DestinationPlugin): 'required': True, 'validation': '^https?://[a-zA-Z0-9.:-]+$', 'helpMessage': 'Valid URL to Hashi Vault instance' - 'default': 'http://127.0.0.1:8200' }, { 'name': 'vaultAuthTokenFile', 'type': 'str', 'required': True, 'validation': '(/[^/]+)+', - 'helpMessage': 'Must be a valid file path!', - 'default': '/etc/pki/secrets/vault/token' + 'helpMessage': 'Must be a valid file path!' }, { 'name': 'vaultMount', @@ -94,17 +92,15 @@ class VaultDestinationPlugin(DestinationPlugin): """ cname = common_name(parse_certificate(body)) - token = current_app.config.get('VAULT_TOKEN') - #url = current_app.config.get('VAULT_URL') url = self.get_option('vaultUrl', options) - token_file = self.get_option('vaultFile', options) + token_file = self.get_option('vaultAuthTokenFile', options) mount = self.get_option('vaultMount', options) path = self.get_option('vaultPath', options) bundle = self.get_option('bundleChain', options) obj_name = self.get_option('objectName', options) with open(token_file, 'r') as file: - token = file.readline() + token = file.readline().rstrip('\n') client = hvac.Client(url=url, token=token) if obj_name: From c2158ff8fb284062afb70a2fef40fbbbc94092d9 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 25 Mar 2019 08:28:23 -0700 Subject: [PATCH 130/357] Add order URI during LE cert creation failure; Fail properly when invalid CA passed; Update reqs --- lemur/certificates/schemas.py | 3 +++ lemur/plugins/lemur_acme/plugin.py | 5 ++++- requirements-dev.txt | 4 ++-- requirements-docs.txt | 31 +++++++++++++++--------------- requirements-tests.txt | 26 ++++++++++++------------- requirements.txt | 31 +++++++++++++++--------------- 6 files changed, 52 insertions(+), 48 deletions(-) diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index d20fd5a7..f790d92f 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -96,6 +96,9 @@ class CertificateInputSchema(CertificateCreationSchema): @validates_schema def validate_authority(self, data): + if isinstance(data['authority'], str): + raise ValidationError("Authority not found.") + if not data['authority'].active: raise ValidationError("The authority is inactive.", ['authority']) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 66295ed2..59cde380 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -459,7 +459,10 @@ class ACMEIssuerPlugin(IssuerPlugin): "pending_cert": entry["pending_cert"], }) except (PollError, AcmeError, Exception) as e: - current_app.logger.error("Unable to resolve pending cert: {}".format(pending_cert), exc_info=True) + order_url = order.uri + current_app.logger.error( + "Unable to resolve pending cert: {}. " + "Check out {} for more information.".format(pending_cert, order_url), exc_info=True) certs.append({ "cert": False, "pending_cert": entry["pending_cert"], diff --git a/requirements-dev.txt b/requirements-dev.txt index e67aea64..37202d97 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,7 +6,7 @@ # aspy.yaml==1.2.0 # via pre-commit bleach==3.1.0 # via readme-renderer -certifi==2018.11.29 # via requests +certifi==2019.3.9 # via requests cfgv==1.5.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer @@ -23,7 +23,7 @@ pre-commit==1.14.4 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer -pyyaml==5.1b3 +pyyaml==5.1 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine diff --git a/requirements-docs.txt b/requirements-docs.txt index e936c197..40cd73de 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,12 +4,12 @@ # # pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # -acme==0.31.0 +acme==0.32.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.8 amqp==2.4.2 -aniso8601==5.1.0 +aniso8601==6.0.0 arrow==0.13.1 asn1crypto==0.24.0 asyncpool==1.0 @@ -17,10 +17,10 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.5.0.5 blinker==1.4 -boto3==1.9.107 -botocore==1.12.107 -celery[redis]==4.2.1 -certifi==2018.11.29 +boto3==1.9.120 +botocore==1.12.120 +celery[redis]==4.2.2 +certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.2 chardet==3.0.4 @@ -52,16 +52,16 @@ josepy==1.1.0 jsonlines==1.2.0 kombu==4.3.0 lockfile==0.12.2 -mako==1.0.7 +mako==1.0.8 markupsafe==1.1.1 -marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.1 +marshmallow-sqlalchemy==0.16.1 +marshmallow==2.19.1 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 pbr==5.1.3 -pem==18.2.0 +pem==19.1.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 pyasn1==0.4.5 @@ -75,10 +75,9 @@ pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 pytz==2018.9 -pyyaml==5.1b3 +pyyaml==5.1 raven[flask]==6.10.0 redis==2.10.6 -relativetimebuilder==0.2.0 requests-toolbelt==0.9.1 requests[security]==2.21.0 retrying==1.3.3 @@ -86,13 +85,13 @@ s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.3 -sphinx==1.8.4 +sphinx==1.8.5 sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-websupport==1.1.0 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.0 +sqlalchemy==1.3.1 tabulate==0.8.3 urllib3==1.24.1 -vine==1.2.0 -werkzeug==0.14.1 +vine==1.3.0 +werkzeug==0.15.1 xmltodict==0.12.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 55e38cbf..ed48cfdd 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,21 +8,21 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.107 # via moto +boto3==1.9.120 # via moto boto==2.49.0 # via moto -botocore==1.12.107 # via boto3, moto, s3transfer -certifi==2018.11.29 # via requests +botocore==1.12.120 # via boto3, moto, s3transfer +certifi==2019.3.9 # via requests cffi==1.12.2 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask -coverage==4.5.2 +coverage==4.5.3 cryptography==2.6.1 # via moto docker-pycreds==0.4.0 # via docker -docker==3.7.0 # via moto +docker==3.7.1 # via moto docutils==0.14 # via botocore ecdsa==0.13 # via python-jose factory-boy==2.11.1 -faker==1.0.2 +faker==1.0.4 flask==1.0.2 # via pytest-flask freezegun==0.3.11 future==0.17.1 # via python-jose @@ -42,23 +42,23 @@ pluggy==0.9.0 # via pytest py==1.8.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi -pycryptodome==3.7.3 # via python-jose +pycryptodome==3.8.0 # via python-jose pyflakes==2.1.1 pytest-flask==0.14.0 -pytest-mock==1.10.1 -pytest==4.3.0 +pytest-mock==1.10.2 +pytest==4.3.1 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto pytz==2018.9 # via moto -pyyaml==5.1b3 +pyyaml==5.1 requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses -responses==0.10.5 # via moto +responses==0.10.6 # via moto s3transfer==0.2.0 # via boto3 six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests -websocket-client==0.55.0 # via docker -werkzeug==0.14.1 # via flask, moto, pytest-flask +websocket-client==0.56.0 # via docker +werkzeug==0.15.1 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto diff --git a/requirements.txt b/requirements.txt index 2aa5f157..9adbdf37 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,21 +4,21 @@ # # pip-compile --output-file requirements.txt requirements.in -U --no-index # -acme==0.31.0 +acme==0.32.0 alembic-autogenerate-enums==0.0.2 alembic==1.0.8 # via flask-migrate amqp==2.4.2 # via kombu -aniso8601==5.1.0 # via flask-restful, relativetimebuilder +aniso8601==6.0.0 # via flask-restful arrow==0.13.1 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.5.0.5 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.107 -botocore==1.12.107 -celery[redis]==4.2.1 -certifi==2018.11.29 +boto3==1.9.120 +botocore==1.12.120 +celery[redis]==4.2.2 +certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.2 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests @@ -49,15 +49,15 @@ josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare kombu==4.3.0 lockfile==0.12.2 -mako==1.0.7 # via alembic +mako==1.0.8 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.16.0 -marshmallow==2.18.1 +marshmallow-sqlalchemy==0.16.1 +marshmallow==2.19.1 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 pbr==5.1.3 # via mock -pem==18.2.0 +pem==19.1.0 psycopg2==2.7.7 pyasn1-modules==0.2.4 # via python-ldap pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap @@ -68,21 +68,20 @@ pyopenssl==19.0.0 pyrfc3339==1.1 # via acme python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic -python-ldap==3.1.0 +python-ldap==3.2.0 pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.1b3 +pyyaml==5.1 raven[flask]==6.10.0 redis==2.10.6 -relativetimebuilder==0.2.0 # via aniso8601 requests-toolbelt==0.9.1 # via acme requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.0 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.3.1 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests -vine==1.2.0 # via amqp -werkzeug==0.14.1 # via flask +vine==1.3.0 # via amqp +werkzeug==0.15.1 # via flask xmltodict==0.12.0 From d2e969b83648fc8d8317f0822522ab192dcd8983 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 21 Feb 2019 19:38:50 -0800 Subject: [PATCH 131/357] better synching of source and destinations --- lemur/plugins/bases/destination.py | 1 + lemur/plugins/lemur_aws/plugin.py | 1 + 2 files changed, 2 insertions(+) diff --git a/lemur/plugins/bases/destination.py b/lemur/plugins/bases/destination.py index 1e7e4ed2..04b01235 100644 --- a/lemur/plugins/bases/destination.py +++ b/lemur/plugins/bases/destination.py @@ -12,6 +12,7 @@ from lemur.plugins.base import Plugin, plugins class DestinationPlugin(Plugin): type = 'destination' requires_key = True + sync_as_source = False def upload(self, name, body, private_key, cert_chain, options, **kwargs): raise NotImplementedError diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 1c2607a5..d3c58464 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -195,6 +195,7 @@ class AWSSourcePlugin(SourcePlugin): slug = 'aws-source' description = 'Discovers all SSL certificates and ELB endpoints in an AWS account' version = aws.VERSION + sync_as_source = True author = 'Kevin Glisson' author_url = 'https://github.com/netflix/lemur' From b86e381e20b3fc84839a5c9952717564219efa00 Mon Sep 17 00:00:00 2001 From: Javier Ramos Date: Tue, 26 Mar 2019 15:09:08 +0100 Subject: [PATCH 132/357] Parse SubjectAlternativeNames from CSR into Lemur Certificate --- lemur/certificates/schemas.py | 18 +++++++++++++++--- lemur/certificates/utils.py | 24 +++++++++++------------- 2 files changed, 26 insertions(+), 16 deletions(-) diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index 78217de0..5528e168 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -112,10 +112,22 @@ class CertificateInputSchema(CertificateCreationSchema): if data.get('replacements'): data['replaces'] = data['replacements'] # TODO remove when field is deprecated if data.get('csr'): - dns_names = cert_utils.get_dns_names_from_csr(data['csr']) - if not data['extensions']['subAltNames']['names']: + csr_sans = cert_utils.get_sans_from_csr(data['csr']) + if not data.get('extensions'): + data['extensions'] = { + 'subAltNames': { + 'names': [] + } + } + elif not data['extensions'].get('subAltNames'): + data['extensions']['subAltNames'] = { + 'subAltNames': { + 'names': [] + } + } + elif not data['extensions']['subAltNames'].get('names'): data['extensions']['subAltNames']['names'] = [] - data['extensions']['subAltNames']['names'] += dns_names + data['extensions']['subAltNames']['names'] += csr_sans return missing.convert_validity_years(data) diff --git a/lemur/certificates/utils.py b/lemur/certificates/utils.py index 933fe45e..800e1201 100644 --- a/lemur/certificates/utils.py +++ b/lemur/certificates/utils.py @@ -14,14 +14,14 @@ from cryptography.hazmat.backends import default_backend from marshmallow.exceptions import ValidationError -def get_dns_names_from_csr(data): +def get_sans_from_csr(data): """ - Fetches DNSNames from CSR. - Potentially extendable to any kind of SubjectAlternativeName + Fetches SubjectAlternativeNames from CSR. + Works with any kind of SubjectAlternativeName :param data: PEM-encoded string with CSR - :return: + :return: List of LemurAPI-compatible subAltNames """ - dns_names = [] + sub_alt_names = [] try: request = x509.load_pem_x509_csr(data.encode('utf-8'), default_backend()) except Exception: @@ -29,14 +29,12 @@ def get_dns_names_from_csr(data): try: alt_names = request.extensions.get_extension_for_class(x509.SubjectAlternativeName) - - for name in alt_names.value.get_values_for_type(x509.DNSName): - dns_name = { - 'nameType': 'DNSName', - 'value': name - } - dns_names.append(dns_name) + for alt_name in alt_names.value: + sub_alt_names.append({ + 'nameType': type(alt_name).__name__, + 'value': alt_name.value + }) except x509.ExtensionNotFound: pass - return dns_names + return sub_alt_names From e10007ef7b135eff08dea58b6b542dbb36ec72e3 Mon Sep 17 00:00:00 2001 From: Ryan DeShone Date: Fri, 29 Mar 2019 10:32:49 -0400 Subject: [PATCH 133/357] Add support for Vault KV API v2 This adds the ability to target KV API v1 or v2. --- lemur/plugins/lemur_vault_dest/plugin.py | 29 +++++++++++++++++++----- 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 91f6a07a..6868b7b0 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -37,6 +37,17 @@ class VaultDestinationPlugin(DestinationPlugin): 'validation': '^https?://[a-zA-Z0-9.:-]+$', 'helpMessage': 'Valid URL to Hashi Vault instance' }, + { + 'name': 'vaultKvApiVersion', + 'type': 'select', + 'value': '2', + 'available': [ + '1', + '2' + ], + 'required': True, + 'helpMessage': 'Version of the Vault KV API to use' + }, { 'name': 'vaultAuthTokenFile', 'type': 'str', @@ -98,17 +109,20 @@ class VaultDestinationPlugin(DestinationPlugin): path = self.get_option('vaultPath', options) bundle = self.get_option('bundleChain', options) obj_name = self.get_option('objectName', options) + api_version = self.get_option('vaultKvApiVersion', options) with open(token_file, 'r') as file: token = file.readline().rstrip('\n') client = hvac.Client(url=url, token=token) + client.secrets.kv.default_kv_version = api_version + if obj_name: path = '{0}/{1}'.format(path, obj_name) else: path = '{0}/{1}'.format(path, cname) - secret = get_secret(url, token, mount, path) + secret = get_secret(client, mount, path) secret['data'][cname] = {} if bundle == 'Nginx' and cert_chain: @@ -123,8 +137,9 @@ class VaultDestinationPlugin(DestinationPlugin): if isinstance(san_list, list): secret['data'][cname]['san'] = san_list try: - client.secrets.kv.v1.create_or_update_secret( - path=path, mount_point=mount, secret=secret['data']) + client.secrets.kv.create_or_update_secret( + path=path, mount_point=mount, secret=secret['data'] + ) except ConnectionError as err: current_app.logger.exception( "Exception uploading secret to vault: {0}".format(err), exc_info=True) @@ -144,12 +159,14 @@ def get_san_list(body): return san_list -def get_secret(url, token, mount, path): +def get_secret(client, mount, path): """ retreiive existing data from mount path and return dictionary """ result = {'data': {}} try: - client = hvac.Client(url=url, token=token) - result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount) + if client.secrets.kv.default_kv_version == '1': + result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount) + else: + result = client.secrets.kv.v2.read_secret_version(path=path, mount_point=mount) except ConnectionError: pass finally: From d80a6bb405be98b6cfffbbd04077d4fe82b597ef Mon Sep 17 00:00:00 2001 From: Javier Ramos Date: Wed, 27 Mar 2019 13:47:05 +0100 Subject: [PATCH 134/357] Added tests for CSR parsing into CertificateInputSchema --- lemur/certificates/schemas.py | 6 ++---- lemur/tests/test_certificates.py | 25 +++++++++++++++++++++++++ 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index 5528e168..1352f796 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -121,11 +121,9 @@ class CertificateInputSchema(CertificateCreationSchema): } elif not data['extensions'].get('subAltNames'): data['extensions']['subAltNames'] = { - 'subAltNames': { - 'names': [] - } + 'names': [] } - elif not data['extensions']['subAltNames'].get('names'): + elif not data['extensions']['subAltNames']['names']: data['extensions']['subAltNames']['names'] = [] data['extensions']['subAltNames']['names'] += csr_sans return missing.convert_validity_years(data) diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index 4013d367..1d7bf65d 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -284,6 +284,31 @@ def test_certificate_input_with_extensions(client, authority): assert not errors +def test_certificate_input_schema_parse_csr(authority): + from lemur.certificates.schemas import CertificateInputSchema + + test_san_dns = 'foobar.com' + extensions = {'sub_alt_names': {'names': x509.SubjectAlternativeName([x509.DNSName(test_san_dns)])}} + csr, private_key = create_csr(owner='joe@example.com', common_name='ACommonName', organization='test', + organizational_unit='Meters', country='NL', state='Noord-Holland', location='Amsterdam', + key_type='RSA2048', extensions=extensions) + + input_data = { + 'commonName': 'test.example.com', + 'owner': 'jim@example.com', + 'authority': {'id': authority.id}, + 'description': 'testtestest', + 'csr': csr, + 'dnsProvider': None, + } + + data, errors = CertificateInputSchema().load(input_data) + + for san in data['extensions']['sub_alt_names']['names']: + assert san.value == test_san_dns + assert not errors + + def test_certificate_out_of_range_date(client, authority): from lemur.certificates.schemas import CertificateInputSchema input_data = { From 2c82708391435018352af0cdac4b025413d8f21c Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 12 Mar 2019 14:17:53 -0700 Subject: [PATCH 135/357] simple hardcoded announcement --- lemur/static/app/index.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index 466cfe9e..fcc54069 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,6 +89,11 @@
+
+ × + Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! +
+
From dbf34a4d48152db540bcaacb92453ef33e878ee4 Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Fri, 5 Apr 2019 17:52:55 +0300 Subject: [PATCH 136/357] Rewrite Java Keystore/Truststore support based on pyjks library --- lemur/common/utils.py | 4 +- lemur/plugins/lemur_java/plugin.py | 246 ------------------ lemur/plugins/lemur_java/tests/test_java.py | 63 ----- .../{lemur_java => lemur_jks}/__init__.py | 0 lemur/plugins/lemur_jks/plugin.py | 140 ++++++++++ .../tests/conftest.py | 0 lemur/plugins/lemur_jks/tests/test_jks.py | 96 +++++++ requirements.in | 1 + requirements.txt | 1 + setup.py | 4 +- 10 files changed, 243 insertions(+), 312 deletions(-) delete mode 100644 lemur/plugins/lemur_java/plugin.py delete mode 100644 lemur/plugins/lemur_java/tests/test_java.py rename lemur/plugins/{lemur_java => lemur_jks}/__init__.py (100%) create mode 100644 lemur/plugins/lemur_jks/plugin.py rename lemur/plugins/{lemur_java => lemur_jks}/tests/conftest.py (100%) create mode 100644 lemur/plugins/lemur_jks/tests/test_jks.py diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 62c3182b..24ff5784 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -85,7 +85,9 @@ def parse_cert_chain(pem_chain): :param pem_chain: string :return: List of parsed certificates """ - return [parse_certificate(cert) for cert in split_pem(pem_chain) if pem_chain] + if pem_chain is None: + return [] + return [parse_certificate(cert) for cert in split_pem(pem_chain) if cert] def parse_csr(csr): diff --git a/lemur/plugins/lemur_java/plugin.py b/lemur/plugins/lemur_java/plugin.py deleted file mode 100644 index 7eb33b90..00000000 --- a/lemur/plugins/lemur_java/plugin.py +++ /dev/null @@ -1,246 +0,0 @@ -""" -.. module: lemur.plugins.lemur_java.plugin - :platform: Unix - :copyright: (c) 2018 by Netflix Inc., see AUTHORS for more - :license: Apache, see LICENSE for more details. - -.. moduleauthor:: Kevin Glisson -""" -import subprocess - -from flask import current_app - -from cryptography.fernet import Fernet - -from lemur.utils import mktempfile, mktemppath -from lemur.plugins.bases import ExportPlugin -from lemur.plugins import lemur_java as java -from lemur.common.utils import parse_certificate -from lemur.common.defaults import common_name - - -def run_process(command): - """ - Runs a given command with pOpen and wraps some - error handling around it. - :param command: - :return: - """ - p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout, stderr = p.communicate() - - if p.returncode != 0: - current_app.logger.debug(" ".join(command)) - current_app.logger.error(stderr) - current_app.logger.error(stdout) - raise Exception(stderr) - - -def split_chain(chain): - """ - Split the chain into individual certificates for import into keystore - - :param chain: - :return: - """ - certs = [] - - if not chain: - return certs - - lines = chain.split('\n') - - cert = [] - for line in lines: - cert.append(line + '\n') - if line == '-----END CERTIFICATE-----': - certs.append("".join(cert)) - cert = [] - - return certs - - -def create_truststore(cert, chain, jks_tmp, alias, passphrase): - assert isinstance(cert, str) - assert isinstance(chain, str) - - with mktempfile() as cert_tmp: - with open(cert_tmp, 'w') as f: - f.write(cert) - - run_process([ - "keytool", - "-importcert", - "-file", cert_tmp, - "-keystore", jks_tmp, - "-alias", "{0}_cert".format(alias), - "-storepass", passphrase, - "-noprompt" - ]) - - # Import the entire chain - for idx, cert in enumerate(split_chain(chain)): - with mktempfile() as c_tmp: - with open(c_tmp, 'w') as f: - f.write(cert) - - # Import signed cert in to JKS keystore - run_process([ - "keytool", - "-importcert", - "-file", c_tmp, - "-keystore", jks_tmp, - "-alias", "{0}_cert_{1}".format(alias, idx), - "-storepass", passphrase, - "-noprompt" - ]) - - -def create_keystore(cert, chain, jks_tmp, key, alias, passphrase): - assert isinstance(cert, str) - assert isinstance(chain, str) - assert isinstance(key, str) - - # Create PKCS12 keystore from private key and public certificate - with mktempfile() as cert_tmp: - with open(cert_tmp, 'w') as f: - if chain: - f.writelines([key.strip() + "\n", cert.strip() + "\n", chain.strip() + "\n"]) - else: - f.writelines([key.strip() + "\n", cert.strip() + "\n"]) - - with mktempfile() as p12_tmp: - run_process([ - "openssl", - "pkcs12", - "-export", - "-nodes", - "-name", alias, - "-in", cert_tmp, - "-out", p12_tmp, - "-password", "pass:{}".format(passphrase) - ]) - - # Convert PKCS12 keystore into a JKS keystore - run_process([ - "keytool", - "-importkeystore", - "-destkeystore", jks_tmp, - "-srckeystore", p12_tmp, - "-srcstoretype", "pkcs12", - "-deststoretype", "JKS", - "-alias", alias, - "-srcstorepass", passphrase, - "-deststorepass", passphrase - ]) - - -class JavaTruststoreExportPlugin(ExportPlugin): - title = 'Java Truststore (JKS)' - slug = 'java-truststore-jks' - description = 'Attempts to generate a JKS truststore' - requires_key = False - version = java.VERSION - - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' - - options = [ - { - 'name': 'alias', - 'type': 'str', - 'required': False, - 'helpMessage': 'Enter the alias you wish to use for the truststore.', - }, - { - 'name': 'passphrase', - 'type': 'str', - 'required': False, - 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8.', - 'validation': '' - }, - ] - - def export(self, body, chain, key, options, **kwargs): - """ - Generates a Java Truststore - - :param key: - :param chain: - :param body: - :param options: - :param kwargs: - """ - - if self.get_option('alias', options): - alias = self.get_option('alias', options) - else: - alias = "blah" - - if self.get_option('passphrase', options): - passphrase = self.get_option('passphrase', options) - else: - passphrase = Fernet.generate_key().decode('utf-8') - - with mktemppath() as jks_tmp: - create_truststore(body, chain, jks_tmp, alias, passphrase) - - with open(jks_tmp, 'rb') as f: - raw = f.read() - - return "jks", passphrase, raw - - -class JavaKeystoreExportPlugin(ExportPlugin): - title = 'Java Keystore (JKS)' - slug = 'java-keystore-jks' - description = 'Attempts to generate a JKS keystore' - version = java.VERSION - - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' - - options = [ - { - 'name': 'passphrase', - 'type': 'str', - 'required': False, - 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8.', - 'validation': '' - }, - { - 'name': 'alias', - 'type': 'str', - 'required': False, - 'helpMessage': 'Enter the alias you wish to use for the keystore.', - } - ] - - def export(self, body, chain, key, options, **kwargs): - """ - Generates a Java Keystore - - :param key: - :param chain: - :param body: - :param options: - :param kwargs: - """ - - if self.get_option('passphrase', options): - passphrase = self.get_option('passphrase', options) - else: - passphrase = Fernet.generate_key().decode('utf-8') - - if self.get_option('alias', options): - alias = self.get_option('alias', options) - else: - alias = common_name(parse_certificate(body)) - - with mktemppath() as jks_tmp: - create_keystore(body, chain, jks_tmp, key, alias, passphrase) - - with open(jks_tmp, 'rb') as f: - raw = f.read() - - return "jks", passphrase, raw diff --git a/lemur/plugins/lemur_java/tests/test_java.py b/lemur/plugins/lemur_java/tests/test_java.py deleted file mode 100644 index 2b8598b8..00000000 --- a/lemur/plugins/lemur_java/tests/test_java.py +++ /dev/null @@ -1,63 +0,0 @@ -import pytest - -from lemur.tests.vectors import INTERNAL_CERTIFICATE_A_STR, INTERNAL_PRIVATE_KEY_A_STR - - -@pytest.mark.skip(reason="no way of currently testing this") -def test_export_truststore(app): - from lemur.plugins.base import plugins - - p = plugins.get('java-truststore-jks') - options = [{'name': 'passphrase', 'value': 'test1234'}] - actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) - - assert actual[0] == 'jks' - assert actual[1] == 'test1234' - assert isinstance(actual[2], bytes) - - -@pytest.mark.skip(reason="no way of currently testing this") -def test_export_truststore_default_password(app): - from lemur.plugins.base import plugins - - p = plugins.get('java-truststore-jks') - options = [] - actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) - - assert actual[0] == 'jks' - assert isinstance(actual[1], str) - assert isinstance(actual[2], bytes) - - -@pytest.mark.skip(reason="no way of currently testing this") -def test_export_keystore(app): - from lemur.plugins.base import plugins - - p = plugins.get('java-keystore-jks') - options = [{'name': 'passphrase', 'value': 'test1234'}] - - with pytest.raises(Exception): - p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) - - actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", INTERNAL_PRIVATE_KEY_A_STR, options) - - assert actual[0] == 'jks' - assert actual[1] == 'test1234' - assert isinstance(actual[2], bytes) - - -@pytest.mark.skip(reason="no way of currently testing this") -def test_export_keystore_default_password(app): - from lemur.plugins.base import plugins - - p = plugins.get('java-keystore-jks') - options = [] - - with pytest.raises(Exception): - p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) - - actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", INTERNAL_PRIVATE_KEY_A_STR, options) - - assert actual[0] == 'jks' - assert isinstance(actual[1], str) - assert isinstance(actual[2], bytes) diff --git a/lemur/plugins/lemur_java/__init__.py b/lemur/plugins/lemur_jks/__init__.py similarity index 100% rename from lemur/plugins/lemur_java/__init__.py rename to lemur/plugins/lemur_jks/__init__.py diff --git a/lemur/plugins/lemur_jks/plugin.py b/lemur/plugins/lemur_jks/plugin.py new file mode 100644 index 00000000..3d456f1c --- /dev/null +++ b/lemur/plugins/lemur_jks/plugin.py @@ -0,0 +1,140 @@ +""" +.. module: lemur.plugins.lemur_jks.plugin + :platform: Unix + :copyright: (c) 2018 by Netflix Inc., see AUTHORS for more + :license: Apache, see LICENSE for more details. + +.. moduleauthor:: Marti Raudsepp +""" + +from cryptography.fernet import Fernet +from cryptography.hazmat.primitives import serialization +from jks import PrivateKeyEntry, KeyStore, TrustedCertEntry + +from lemur.common.defaults import common_name +from lemur.common.utils import parse_certificate, parse_cert_chain, parse_private_key +from lemur.plugins import lemur_jks as jks +from lemur.plugins.bases import ExportPlugin + + +def cert_chain_as_der(cert, chain): + """Return a certificate and its chain in a list format, as expected by pyjks.""" + + certs = [parse_certificate(cert)] + certs.extend(parse_cert_chain(chain)) + # certs (list) – A list of certificates, as byte strings. The first one should be the one belonging to the private + # key, the others the chain (in correct order). + return [cert.public_bytes(encoding=serialization.Encoding.DER) for cert in certs] + + +def create_truststore(cert, chain, alias, passphrase): + entries = [] + for idx, cert_bytes in enumerate(cert_chain_as_der(cert, chain)): + # The original cert gets name _cert, first chain element is _cert_1, etc. + cert_alias = alias + '_cert' + ('_{}'.format(idx) if idx else '') + entries.append(TrustedCertEntry.new(cert_alias, cert_bytes)) + + return KeyStore.new('jks', entries).saves(passphrase) + + +def create_keystore(cert, chain, key, alias, passphrase): + certs_bytes = cert_chain_as_der(cert, chain) + key_bytes = parse_private_key(key).private_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption() + ) + entry = PrivateKeyEntry.new(alias, certs_bytes, key_bytes) + + return KeyStore.new('jks', [entry]).saves(passphrase) + + +class JavaTruststoreExportPlugin(ExportPlugin): + title = 'Java Truststore (JKS)' + slug = 'java-truststore-jks' + description = 'Generates a JKS truststore' + requires_key = False + version = jks.VERSION + + author = 'Marti Raudsepp' + author_url = 'https://github.com/intgr' + + options = [ + { + 'name': 'alias', + 'type': 'str', + 'required': False, + 'helpMessage': 'Enter the alias you wish to use for the truststore.', + }, + { + 'name': 'passphrase', + 'type': 'str', + 'required': False, + 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.', + 'validation': '' + }, + ] + + def export(self, body, chain, key, options, **kwargs): + """ + Generates a Java Truststore + """ + + if self.get_option('alias', options): + alias = self.get_option('alias', options) + else: + alias = common_name(parse_certificate(body)) + + if self.get_option('passphrase', options): + passphrase = self.get_option('passphrase', options) + else: + passphrase = Fernet.generate_key().decode('utf-8') + + raw = create_truststore(body, chain, alias, passphrase) + + return 'jks', passphrase, raw + + +class JavaKeystoreExportPlugin(ExportPlugin): + title = 'Java Keystore (JKS)' + slug = 'java-keystore-jks' + description = 'Generates a JKS keystore' + version = jks.VERSION + + author = 'Marti Raudsepp' + author_url = 'https://github.com/intgr' + + options = [ + { + 'name': 'passphrase', + 'type': 'str', + 'required': False, + 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.', + 'validation': '' + }, + { + 'name': 'alias', + 'type': 'str', + 'required': False, + 'helpMessage': 'Enter the alias you wish to use for the keystore.', + } + ] + + def export(self, body, chain, key, options, **kwargs): + """ + Generates a Java Keystore + """ + + if self.get_option('passphrase', options): + passphrase = self.get_option('passphrase', options) + else: + passphrase = Fernet.generate_key().decode('utf-8') + + if self.get_option('alias', options): + alias = self.get_option('alias', options) + else: + alias = common_name(parse_certificate(body)) + + raw = create_keystore(body, chain, key, alias, passphrase) + + return 'jks', passphrase, raw diff --git a/lemur/plugins/lemur_java/tests/conftest.py b/lemur/plugins/lemur_jks/tests/conftest.py similarity index 100% rename from lemur/plugins/lemur_java/tests/conftest.py rename to lemur/plugins/lemur_jks/tests/conftest.py diff --git a/lemur/plugins/lemur_jks/tests/test_jks.py b/lemur/plugins/lemur_jks/tests/test_jks.py new file mode 100644 index 00000000..e4a5e64a --- /dev/null +++ b/lemur/plugins/lemur_jks/tests/test_jks.py @@ -0,0 +1,96 @@ +import pytest +from jks import KeyStore, TrustedCertEntry, PrivateKeyEntry + +from lemur.tests.vectors import INTERNAL_CERTIFICATE_A_STR, SAN_CERT_STR, INTERMEDIATE_CERT_STR, ROOTCA_CERT_STR, \ + SAN_CERT_KEY + + +def test_export_truststore(app): + from lemur.plugins.base import plugins + + p = plugins.get('java-truststore-jks') + options = [ + {'name': 'passphrase', 'value': 'hunter2'}, + {'name': 'alias', 'value': 'AzureDiamond'}, + ] + chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR + ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options) + + assert ext == 'jks' + assert password == 'hunter2' + assert isinstance(raw, bytes) + + ks = KeyStore.loads(raw, 'hunter2') + assert ks.store_type == 'jks' + # JKS lower-cases alias strings + assert ks.entries.keys() == {'azurediamond_cert', 'azurediamond_cert_1', 'azurediamond_cert_2'} + assert isinstance(ks.entries['azurediamond_cert'], TrustedCertEntry) + + +def test_export_truststore_defaults(app): + from lemur.plugins.base import plugins + + p = plugins.get('java-truststore-jks') + options = [] + ext, password, raw = p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options) + + assert ext == 'jks' + assert isinstance(password, str) + assert isinstance(raw, bytes) + + ks = KeyStore.loads(raw, password) + assert ks.store_type == 'jks' + # JKS lower-cases alias strings + assert ks.entries.keys() == {'acommonname_cert'} + assert isinstance(ks.entries['acommonname_cert'], TrustedCertEntry) + + +def test_export_keystore(app): + from lemur.plugins.base import plugins + + p = plugins.get('java-keystore-jks') + options = [ + {'name': 'passphrase', 'value': 'hunter2'}, + {'name': 'alias', 'value': 'AzureDiamond'}, + ] + + chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR + with pytest.raises(Exception): + p.export(INTERNAL_CERTIFICATE_A_STR, chain, '', options) + + ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options) + + assert ext == 'jks' + assert password == 'hunter2' + assert isinstance(raw, bytes) + + ks = KeyStore.loads(raw, password) + assert ks.store_type == 'jks' + # JKS lower-cases alias strings + assert ks.entries.keys() == {'azurediamond'} + entry = ks.entries['azurediamond'] + assert isinstance(entry, PrivateKeyEntry) + assert len(entry.cert_chain) == 3 # Cert and chain were provided + + +def test_export_keystore_defaults(app): + from lemur.plugins.base import plugins + + p = plugins.get('java-keystore-jks') + options = [] + + with pytest.raises(Exception): + p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options) + + ext, password, raw = p.export(SAN_CERT_STR, '', SAN_CERT_KEY, options) + + assert ext == 'jks' + assert isinstance(password, str) + assert isinstance(raw, bytes) + + ks = KeyStore.loads(raw, password) + assert ks.store_type == 'jks' + assert ks.entries.keys() == {'san.example.org'} + entry = ks.entries['san.example.org'] + assert isinstance(entry, PrivateKeyEntry) + assert len(entry.cert_chain) == 1 # Only cert itself, no chain was provided diff --git a/requirements.in b/requirements.in index 9b27f604..ecd95284 100644 --- a/requirements.in +++ b/requirements.in @@ -47,3 +47,4 @@ SQLAlchemy-Utils tabulate xmltodict pyyaml>=4.2b1 #high severity alert +pyjks diff --git a/requirements.txt b/requirements.txt index c0e69fb4..b458fb00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -86,3 +86,4 @@ urllib3==1.24.1 # via botocore, requests vine==1.3.0 # via amqp werkzeug==0.15.1 # via flask xmltodict==0.12.0 +pyjks==18.0.0 diff --git a/setup.py b/setup.py index 148f51b1..6fc55420 100644 --- a/setup.py +++ b/setup.py @@ -143,8 +143,8 @@ setup( 'aws_s3 = lemur.plugins.lemur_aws.plugin:S3DestinationPlugin', 'email_notification = lemur.plugins.lemur_email.plugin:EmailNotificationPlugin', 'slack_notification = lemur.plugins.lemur_slack.plugin:SlackNotificationPlugin', - 'java_truststore_export = lemur.plugins.lemur_java.plugin:JavaTruststoreExportPlugin', - 'java_keystore_export = lemur.plugins.lemur_java.plugin:JavaKeystoreExportPlugin', + 'java_truststore_export = lemur.plugins.lemur_jks.plugin:JavaTruststoreExportPlugin', + 'java_keystore_export = lemur.plugins.lemur_jks.plugin:JavaKeystoreExportPlugin', 'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin', 'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin', 'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin', From 6fd7752b29391a500a34469c9a9b6cca0e637a04 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 10:08:25 -0700 Subject: [PATCH 137/357] removing the announcement --- lemur/static/app/index.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index fcc54069..466cfe9e 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,11 +89,6 @@
-
- × - Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! -
-
From 0cc98c378ff15abaae6b2b7e1b332249b2cb4b9e Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 12 Mar 2019 14:17:53 -0700 Subject: [PATCH 138/357] simple hardcoded announcement --- lemur/static/app/index.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index 466cfe9e..fcc54069 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,6 +89,11 @@
+
+ × + Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! +
+
From 1c09712df0c5b43be97d4c1a8e923f2e7e80d118 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 10:08:25 -0700 Subject: [PATCH 139/357] removing the announcement --- lemur/static/app/index.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index fcc54069..466cfe9e 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,11 +89,6 @@
-
- × - Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! -
-
From f3d0536800d7db899c38e3bc32ee86fee931c0d3 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 20:49:07 -0700 Subject: [PATCH 140/357] removing hardcoded rules, to give more flexibility into defining new source-destinations --- lemur/common/celery.py | 7 ++++--- lemur/plugins/bases/destination.py | 1 + lemur/plugins/lemur_aws/plugin.py | 3 ++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 308adced..c926b390 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -234,14 +234,15 @@ def sync_source(source): @celery.task() def sync_source_destination(): """ - This celery task will sync destination and source, to make sure all new destinations are also present in source. + This celery task will sync destination and source, to make sure all new destinations are also present as source. Some destinations do not qualify as sources, and hence should be excluded from being added as sources """ current_app.logger.debug("Syncing source and destination") for dst in destinations_service.get_all(): - if dst.plugin_name == 'aws-destination' and not sources_service.get_by_label(dst.label): + destination_plugin = plugins.get(dst.plugin_name) + if destination_plugin.sync_as_source and not sources_service.get_by_label(dst.label): sources_service.create(label=dst.label, - plugin_name='aws-source', + plugin_name=destination_plugin.sync_as_source_name, options=dst.options, description=dst.description) current_app.logger.info("Source: %s added", dst.label) diff --git a/lemur/plugins/bases/destination.py b/lemur/plugins/bases/destination.py index 04b01235..fc73ebcb 100644 --- a/lemur/plugins/bases/destination.py +++ b/lemur/plugins/bases/destination.py @@ -13,6 +13,7 @@ class DestinationPlugin(Plugin): type = 'destination' requires_key = True sync_as_source = False + sync_as_source_name = '' def upload(self, name, body, private_key, cert_chain, options, **kwargs): raise NotImplementedError diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index d3c58464..2f271296 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -154,6 +154,8 @@ class AWSDestinationPlugin(DestinationPlugin): slug = 'aws-destination' description = 'Allow the uploading of certificates to AWS IAM' version = aws.VERSION + sync_as_source = True + sync_as_source_name = 'aws-source' author = 'Kevin Glisson' author_url = 'https://github.com/netflix/lemur' @@ -195,7 +197,6 @@ class AWSSourcePlugin(SourcePlugin): slug = 'aws-source' description = 'Discovers all SSL certificates and ELB endpoints in an AWS account' version = aws.VERSION - sync_as_source = True author = 'Kevin Glisson' author_url = 'https://github.com/netflix/lemur' From 2ff57e932c0686c769327abc15bd0382dbc21429 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 10 Apr 2019 15:40:48 -0700 Subject: [PATCH 141/357] Update requirements - upgrade to py37 --- docker-compose.yml | 5 ++++- lemur/manage.py | 3 +++ lemur/tests/conftest.py | 2 ++ requirements-dev.txt | 8 ++++---- requirements-docs.txt | 39 ++++++++++++++++++++++----------------- requirements-tests.txt | 20 ++++++++++---------- requirements.in | 4 ++-- requirements.txt | 30 +++++++++++++++--------------- tox.ini | 2 +- 9 files changed, 63 insertions(+), 50 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index 66f2f0b1..ee0d8396 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,10 +13,13 @@ services: VIRTUAL_ENV: 'true' postgres: - image: postgres:9.4 + image: postgres + restart: always environment: POSTGRES_USER: lemur POSTGRES_PASSWORD: lemur + ports: + - "5432:5432" redis: image: "redis:alpine" diff --git a/lemur/manage.py b/lemur/manage.py index 9161109b..c9ce4240 100755 --- a/lemur/manage.py +++ b/lemur/manage.py @@ -49,6 +49,8 @@ from lemur.policies.models import RotationPolicy # noqa from lemur.pending_certificates.models import PendingCertificate # noqa from lemur.dns_providers.models import DnsProvider # noqa +from sqlalchemy.sql import text + manager = Manager(create_app) manager.add_option('-c', '--config', dest='config_path', required=False) @@ -142,6 +144,7 @@ SQLALCHEMY_DATABASE_URI = 'postgresql://lemur:lemur@localhost:5432/lemur' @MigrateCommand.command def create(): + database.db.engine.execute(text('CREATE EXTENSION IF NOT EXISTS pg_trgm')) database.db.create_all() stamp(revision='head') diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index 43fa7163..e65b9440 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -7,6 +7,7 @@ from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes from flask import current_app from flask_principal import identity_changed, Identity +from sqlalchemy.sql import text from lemur import create_app from lemur.common.utils import parse_private_key @@ -55,6 +56,7 @@ def app(request): @pytest.yield_fixture(scope="session") def db(app, request): _db.drop_all() + _db.engine.execute(text('CREATE EXTENSION IF NOT EXISTS pg_trgm')) _db.create_all() _db.app = app diff --git a/requirements-dev.txt b/requirements-dev.txt index 36e2c9a4..e62d1ee6 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -7,18 +7,18 @@ aspy.yaml==1.2.0 # via pre-commit bleach==3.1.0 # via readme-renderer certifi==2019.3.9 # via requests -cfgv==1.5.0 # via pre-commit +cfgv==1.6.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.4.0 # via pre-commit +identify==1.4.1 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.8 # via pre-commit +importlib-metadata==0.9 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.14.4 +pre-commit==1.15.1 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer diff --git a/requirements-docs.txt b/requirements-docs.txt index e99c9cdc..e4233960 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # -acme==0.32.0 +acme==0.33.1 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.8 @@ -15,11 +15,11 @@ asn1crypto==0.24.0 asyncpool==1.0 babel==2.6.0 # via sphinx bcrypt==3.1.6 -billiard==3.5.0.5 +billiard==3.6.0.0 blinker==1.4 -boto3==1.9.120 -botocore==1.12.120 -celery[redis]==4.2.2 +boto3==1.9.130 +botocore==1.12.130 +celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.2 @@ -42,28 +42,28 @@ flask-sqlalchemy==2.3.2 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 -hvac==0.7.2 +hvac==0.8.2 idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 -jinja2==2.10 +jinja2==2.10.1 jmespath==0.9.4 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.3.0 +kombu==4.5.0 lockfile==0.12.2 mako==1.0.8 markupsafe==1.1.1 marshmallow-sqlalchemy==0.16.1 -marshmallow==2.19.1 +marshmallow==2.19.2 mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 pbr==5.1.3 pem==19.1.0 -psycopg2==2.7.7 +psycopg2==2.8.1 pyasn1-modules==0.2.4 pyasn1==0.4.5 pycparser==2.19 @@ -71,14 +71,14 @@ pygments==2.3.1 # via sphinx pyjwt==1.7.1 pynacl==1.3.0 pyopenssl==19.0.0 -pyparsing==2.3.1 # via packaging +pyparsing==2.4.0 # via packaging pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 -pytz==2018.9 +pytz==2019.1 pyyaml==5.1 raven[flask]==6.10.0 -redis==2.10.6 +redis==3.2.1 requests-toolbelt==0.9.1 requests[security]==2.21.0 retrying==1.3.3 @@ -86,13 +86,18 @@ s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.3 -sphinx==1.8.5 +sphinx==2.0.1 +sphinxcontrib-applehelp==1.0.1 # via sphinx +sphinxcontrib-devhelp==1.0.1 # via sphinx +sphinxcontrib-htmlhelp==1.0.1 # via sphinx sphinxcontrib-httpdomain==1.7.0 -sphinxcontrib-websupport==1.1.0 # via sphinx +sphinxcontrib-jsmath==1.0.1 # via sphinx +sphinxcontrib-qthelp==1.0.2 # via sphinx +sphinxcontrib-serializinghtml==1.1.3 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.1 +sqlalchemy==1.3.2 tabulate==0.8.3 urllib3==1.24.1 vine==1.3.0 -werkzeug==0.15.1 +werkzeug==0.15.2 xmltodict==0.12.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index ed48cfdd..87fc5b66 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,9 +8,9 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.120 # via moto +boto3==1.9.130 # via moto boto==2.49.0 # via moto -botocore==1.12.120 # via boto3, moto, s3transfer +botocore==1.12.130 # via boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.2 # via cryptography chardet==3.0.4 # via requests @@ -18,7 +18,7 @@ click==7.0 # via flask coverage==4.5.3 cryptography==2.6.1 # via moto docker-pycreds==0.4.0 # via docker -docker==3.7.1 # via moto +docker==3.7.2 # via moto docutils==0.14 # via botocore ecdsa==0.13 # via python-jose factory-boy==2.11.1 @@ -28,13 +28,13 @@ freezegun==0.3.11 future==0.17.1 # via python-jose idna==2.8 # via requests itsdangerous==1.1.0 # via flask -jinja2==2.10 # via flask, moto +jinja2==2.10.1 # via flask, moto jmespath==0.9.4 # via boto3, botocore jsondiff==1.1.1 # via moto jsonpickle==1.1 # via aws-xray-sdk markupsafe==1.1.1 # via jinja2 mock==2.0.0 # via moto -more-itertools==6.0.0 # via pytest +more-itertools==7.0.0 # via pytest moto==1.3.7 nose==1.3.7 pbr==5.1.3 # via mock @@ -42,14 +42,14 @@ pluggy==0.9.0 # via pytest py==1.8.0 # via pytest pyaml==18.11.0 # via moto pycparser==2.19 # via cffi -pycryptodome==3.8.0 # via python-jose +pycryptodome==3.8.1 # via python-jose pyflakes==2.1.1 pytest-flask==0.14.0 -pytest-mock==1.10.2 -pytest==4.3.1 +pytest-mock==1.10.3 +pytest==4.4.0 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto -pytz==2018.9 # via moto +pytz==2019.1 # via moto pyyaml==5.1 requests-mock==1.5.2 requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses @@ -59,6 +59,6 @@ six==1.12.0 # via cryptography, docker, docker-pycreds, faker, fre text-unidecode==1.2 # via faker urllib3==1.24.1 # via botocore, requests websocket-client==0.56.0 # via docker -werkzeug==0.15.1 # via flask, moto, pytest-flask +werkzeug==0.15.2 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto diff --git a/requirements.in b/requirements.in index 9b27f604..526f1b88 100644 --- a/requirements.in +++ b/requirements.in @@ -27,7 +27,7 @@ gunicorn hvac # required for the vault destination plugin inflection jinja2 -kombu==4.3.0 # kombu 4.4.0 requires redis 3 +kombu lockfile marshmallow-sqlalchemy marshmallow @@ -39,7 +39,7 @@ pyjwt pyOpenSSL python_ldap raven[flask] -redis<3 # redis>=3 is not compatible with celery +redis requests retrying six diff --git a/requirements.txt b/requirements.txt index c0e69fb4..b7cda309 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file requirements.txt requirements.in -U --no-index # -acme==0.32.0 +acme==0.33.1 alembic-autogenerate-enums==0.0.2 alembic==1.0.8 # via flask-migrate amqp==2.4.2 # via kombu @@ -13,11 +13,11 @@ arrow==0.13.1 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko -billiard==3.5.0.5 # via celery +billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.120 -botocore==1.12.120 -celery[redis]==4.2.2 +boto3==1.9.130 +botocore==1.12.130 +celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.2 # via bcrypt, cryptography, pynacl @@ -40,26 +40,26 @@ flask-sqlalchemy==2.3.2 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 -hvac==0.7.2 +hvac==0.8.2 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask -jinja2==2.10 +jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.3.0 +kombu==4.5.0 lockfile==0.12.2 mako==1.0.8 # via alembic markupsafe==1.1.1 # via jinja2, mako marshmallow-sqlalchemy==0.16.1 -marshmallow==2.19.1 +marshmallow==2.19.2 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 pbr==5.1.3 # via mock pem==19.1.0 -psycopg2==2.7.7 +psycopg2==2.8.1 pyasn1-modules==0.2.4 # via python-ldap pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap pycparser==2.19 # via cffi @@ -70,19 +70,19 @@ pyrfc3339==1.1 # via acme python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-ldap==3.2.0 -pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339 +pytz==2019.1 # via acme, celery, flask-restful, pyrfc3339 pyyaml==5.1 raven[flask]==6.10.0 -redis==2.10.6 +redis==3.2.1 requests-toolbelt==0.9.1 # via acme requests[security]==2.21.0 retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.1 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.3.2 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 urllib3==1.24.1 # via botocore, requests -vine==1.3.0 # via amqp -werkzeug==0.15.1 # via flask +vine==1.3.0 # via amqp, celery +werkzeug==0.15.2 # via flask xmltodict==0.12.0 diff --git a/tox.ini b/tox.ini index fdd2585b..d3ad8944 100644 --- a/tox.ini +++ b/tox.ini @@ -1,2 +1,2 @@ [tox] -envlist = py35 +envlist = py37 From d3fbf46f7a07aa70e6062cb7a922c97b9c0965df Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 10 Apr 2019 16:09:55 -0700 Subject: [PATCH 142/357] Upgrade travis deps --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index b540937d..8765fed3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,8 +10,8 @@ addons: matrix: include: - - python: "3.5" - env: TOXENV=py35 + - python: "3.7" + env: TOXENV=py37 cache: directories: From 142aadffef0f1318e94ab62aba078215cac39340 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 10 Apr 2019 16:18:49 -0700 Subject: [PATCH 143/357] Upgrade travis to xenial --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 8765fed3..cf693a8b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,6 +1,6 @@ language: python sudo: required -dist: trusty +dist: xenial node_js: - "6.2.0" From f185df4f1e38e6e7b682bd11b7d1184382ea6c45 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 13:28:58 -0700 Subject: [PATCH 144/357] bringing class AWSDestinationPlugin(DestinationPlugin) after AWSSourcePlugin.slug, such that we can do: sync_as_source_name = AWSSourcePlugin.slug --- lemur/plugins/lemur_aws/plugin.py | 86 +++++++++++++++---------------- 1 file changed, 43 insertions(+), 43 deletions(-) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 2f271296..57cc831c 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -149,49 +149,6 @@ def get_elb_endpoints_v2(account_number, region, elb_dict): return endpoints -class AWSDestinationPlugin(DestinationPlugin): - title = 'AWS' - slug = 'aws-destination' - description = 'Allow the uploading of certificates to AWS IAM' - version = aws.VERSION - sync_as_source = True - sync_as_source_name = 'aws-source' - - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' - - options = [ - { - 'name': 'accountNumber', - 'type': 'str', - 'required': True, - 'validation': '[0-9]{12}', - 'helpMessage': 'Must be a valid AWS account number!', - }, - { - 'name': 'path', - 'type': 'str', - 'default': '/', - 'helpMessage': 'Path to upload certificate.' - } - ] - - # 'elb': { - # 'name': {'type': 'name'}, - # 'region': {'type': 'str'}, - # 'port': {'type': 'int'} - # } - - def upload(self, name, body, private_key, cert_chain, options, **kwargs): - iam.upload_cert(name, body, private_key, - self.get_option('path', options), - cert_chain=cert_chain, - account_number=self.get_option('accountNumber', options)) - - def deploy(self, elb_name, account, region, certificate): - pass - - class AWSSourcePlugin(SourcePlugin): title = 'AWS' slug = 'aws-source' @@ -268,6 +225,49 @@ class AWSSourcePlugin(SourcePlugin): iam.delete_cert(certificate.name, account_number=account_number) +class AWSDestinationPlugin(DestinationPlugin): + title = 'AWS' + slug = 'aws-destination' + description = 'Allow the uploading of certificates to AWS IAM' + version = aws.VERSION + sync_as_source = True + sync_as_source_name = AWSSourcePlugin.slug + + author = 'Kevin Glisson' + author_url = 'https://github.com/netflix/lemur' + + options = [ + { + 'name': 'accountNumber', + 'type': 'str', + 'required': True, + 'validation': '[0-9]{12}', + 'helpMessage': 'Must be a valid AWS account number!', + }, + { + 'name': 'path', + 'type': 'str', + 'default': '/', + 'helpMessage': 'Path to upload certificate.' + } + ] + + # 'elb': { + # 'name': {'type': 'name'}, + # 'region': {'type': 'str'}, + # 'port': {'type': 'int'} + # } + + def upload(self, name, body, private_key, cert_chain, options, **kwargs): + iam.upload_cert(name, body, private_key, + self.get_option('path', options), + cert_chain=cert_chain, + account_number=self.get_option('accountNumber', options)) + + def deploy(self, elb_name, account, region, certificate): + pass + + class S3DestinationPlugin(ExportDestinationPlugin): title = 'AWS-S3' slug = 'aws-s3' From 266c83367d81e563fa4984206ca1b5bd472527c4 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 13:29:37 -0700 Subject: [PATCH 145/357] avoiding hard-coded plugin names --- lemur/common/celery.py | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index ed751d9b..65114e01 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -18,7 +18,7 @@ from lemur.authorities.service import get as get_authority from lemur.factory import create_app from lemur.notifications.messaging import send_pending_failure_notification from lemur.pending_certificates import service as pending_certificate_service -from lemur.plugins.base import plugins +from lemur.plugins.base import plugins, IPlugin from lemur.sources.cli import clean, sync, validate_sources from lemur.destinations import service as destinations_service from lemur.sources import service as sources_service @@ -265,13 +265,31 @@ def sync_source_destination(): """ This celery task will sync destination and source, to make sure all new destinations are also present as source. Some destinations do not qualify as sources, and hence should be excluded from being added as sources + We identify qualified destinations based on the sync_as_source attributed of the plugin. + The destination sync_as_source_name reviels the name of the suitable source-plugin. + We rely on account numbers to avoid duplicates. """ current_app.logger.debug("Syncing source and destination") + + # a set of all accounts numbers available as sources + src_accounts = set() + sources = validate_sources("all") + for src in sources: + src_accounts.add(IPlugin.get_option('accountNumber' ,src.options)) + for dst in destinations_service.get_all(): destination_plugin = plugins.get(dst.plugin_name) - if destination_plugin.sync_as_source and not sources_service.get_by_label(dst.label): + account_number = IPlugin.get_option('accountNumber', src.options) + if destination_plugin.sync_as_source and (account_number not in src_accounts): + src_options = copy.deepcopy(plugins.get(destination_plugin.sync_as_source_name).options) + for o in src_options: + if o.get('name') == 'accountNumber': + o.update({'value': account_number}) + sources_service.create(label=dst.label, plugin_name=destination_plugin.sync_as_source_name, - options=dst.options, + options=src_options, description=dst.description) current_app.logger.info("Source: %s added", dst.label) + + From ec3d2d73162b8b84de0de5b6fb1d72a85c210904 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 13:51:21 -0700 Subject: [PATCH 146/357] fixing typo --- lemur/common/celery.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 65114e01..fdac27eb 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -275,17 +275,16 @@ def sync_source_destination(): src_accounts = set() sources = validate_sources("all") for src in sources: - src_accounts.add(IPlugin.get_option('accountNumber' ,src.options)) + src_accounts.add(IPlugin.get_option('accountNumber', src.options)) for dst in destinations_service.get_all(): destination_plugin = plugins.get(dst.plugin_name) - account_number = IPlugin.get_option('accountNumber', src.options) + account_number = IPlugin.get_option('accountNumber', dst.options) if destination_plugin.sync_as_source and (account_number not in src_accounts): src_options = copy.deepcopy(plugins.get(destination_plugin.sync_as_source_name).options) for o in src_options: if o.get('name') == 'accountNumber': o.update({'value': account_number}) - sources_service.create(label=dst.label, plugin_name=destination_plugin.sync_as_source_name, options=src_options, From 60edab9f6db11861c059afd6fc2535b758c87cf5 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 14:12:31 -0700 Subject: [PATCH 147/357] cleaning up --- lemur/plugins/lemur_aws/plugin.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 57cc831c..41bec31c 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -252,12 +252,6 @@ class AWSDestinationPlugin(DestinationPlugin): } ] - # 'elb': { - # 'name': {'type': 'name'}, - # 'region': {'type': 'str'}, - # 'port': {'type': 'int'} - # } - def upload(self, name, body, private_key, cert_chain, options, **kwargs): iam.upload_cert(name, body, private_key, self.get_option('path', options), From 245923414741772d16d2ae1c79ef26fa401f75c3 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 14:34:26 -0700 Subject: [PATCH 148/357] removing lines --- lemur/common/celery.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index fdac27eb..61dde28e 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -290,5 +290,3 @@ def sync_source_destination(): options=src_options, description=dst.description) current_app.logger.info("Source: %s added", dst.label) - - From 1bda246df2ae678cd04ba504a89c7a84e534ac65 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 12 Mar 2019 14:17:53 -0700 Subject: [PATCH 149/357] simple hardcoded announcement --- lemur/static/app/index.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index 466cfe9e..fcc54069 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,6 +89,11 @@
+
+ × + Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! +
+
From b66fac049445d5dfab67953343e20149ed5911fb Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 10:08:25 -0700 Subject: [PATCH 150/357] removing the announcement --- lemur/static/app/index.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index fcc54069..466cfe9e 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,11 +89,6 @@
-
- × - Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! -
-
From ba691a26d48e503029e058a8bee69b80f06391fd Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 12 Mar 2019 14:17:53 -0700 Subject: [PATCH 151/357] simple hardcoded announcement --- lemur/static/app/index.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index 466cfe9e..fcc54069 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,6 +89,11 @@
+
+ × + Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! +
+
From 84dfdd0600368b81e740d7e2a55afa32bf2bafd7 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 10:08:25 -0700 Subject: [PATCH 152/357] removing the announcement --- lemur/static/app/index.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index fcc54069..466cfe9e 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,11 +89,6 @@
-
- × - Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! -
-
From e1a67e9b4eb90bc8e26b29f20709a5c6d88525da Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 12 Mar 2019 14:17:53 -0700 Subject: [PATCH 153/357] simple hardcoded announcement --- lemur/static/app/index.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index 466cfe9e..fcc54069 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,6 +89,11 @@
+
+ × + Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! +
+
From 818da6653d5a0105bb8528ca08c882672ffd0501 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 10:08:25 -0700 Subject: [PATCH 154/357] removing the announcement --- lemur/static/app/index.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index fcc54069..466cfe9e 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,11 +89,6 @@
-
- × - Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! -
-
From 5900828051298656fc8cd2de419de89788deec18 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 12 Mar 2019 14:17:53 -0700 Subject: [PATCH 155/357] simple hardcoded announcement --- lemur/static/app/index.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index 466cfe9e..fcc54069 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,6 +89,11 @@
+
+ × + Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! +
+
From d1ead4b79c62df629970bdb9f0d107e7e79c8097 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 10:08:25 -0700 Subject: [PATCH 156/357] removing the announcement --- lemur/static/app/index.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index fcc54069..466cfe9e 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,11 +89,6 @@
-
- × - Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! -
-
From 557fac39b58e565e6485aaa68fb1e66962a63b7e Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 16:37:31 -0700 Subject: [PATCH 157/357] refactoring the sync job into a service method that we can also call when adding a new destination --- lemur/common/celery.py | 27 ++++++--------------------- lemur/sources/service.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 21 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 61dde28e..aa160b15 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -21,8 +21,7 @@ from lemur.pending_certificates import service as pending_certificate_service from lemur.plugins.base import plugins, IPlugin from lemur.sources.cli import clean, sync, validate_sources from lemur.destinations import service as destinations_service -from lemur.sources import service as sources_service - +from lemur.sources.service import add_aws_destination_to_sources if current_app: flask_app = current_app @@ -269,24 +268,10 @@ def sync_source_destination(): The destination sync_as_source_name reviels the name of the suitable source-plugin. We rely on account numbers to avoid duplicates. """ - current_app.logger.debug("Syncing source and destination") - - # a set of all accounts numbers available as sources - src_accounts = set() - sources = validate_sources("all") - for src in sources: - src_accounts.add(IPlugin.get_option('accountNumber', src.options)) + current_app.logger.debug("Syncing AWWS destinations and sources") for dst in destinations_service.get_all(): - destination_plugin = plugins.get(dst.plugin_name) - account_number = IPlugin.get_option('accountNumber', dst.options) - if destination_plugin.sync_as_source and (account_number not in src_accounts): - src_options = copy.deepcopy(plugins.get(destination_plugin.sync_as_source_name).options) - for o in src_options: - if o.get('name') == 'accountNumber': - o.update({'value': account_number}) - sources_service.create(label=dst.label, - plugin_name=destination_plugin.sync_as_source_name, - options=src_options, - description=dst.description) - current_app.logger.info("Source: %s added", dst.label) + if add_aws_destination_to_sources(dst): + current_app.logger.debug("Source: %s added", dst.label) + + current_app.logger.debug("Completed Syncing AWS destinations and sources") diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 47b7f02c..31886b5f 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -6,6 +6,7 @@ .. moduleauthor:: Kevin Glisson """ import arrow +import copy from flask import current_app @@ -21,6 +22,7 @@ from lemur.common.utils import find_matching_certificates_by_hash, parse_certifi from lemur.common.defaults import serial from lemur.plugins.base import plugins +from lemur.plugins.utils import get_plugin_option, set_plugin_option def certificate_create(certificate, source): @@ -256,3 +258,32 @@ def render(args): query = database.filter(query, Source, terms) return database.sort_and_page(query, Source, args) + + +def add_aws_destination_to_sources(dst): + """ + Given a destination check, if it can be added as sources, and included it if not already a source + We identify qualified destinations based on the sync_as_source attributed of the plugin. + The destination sync_as_source_name reveals the name of the suitable source-plugin. + We rely on account numbers to avoid duplicates. + :return: true for success and false for not adding the destination as source + """ + # a set of all accounts numbers available as sources + src_accounts = set() + sources = get_all() + for src in sources: + src_accounts.add(get_plugin_option('accountNumber', src.options)) + + # check + destination_plugin = plugins.get(dst.plugin_name) + account_number = get_plugin_option('accountNumber', dst.options) + if destination_plugin.sync_as_source and (account_number not in src_accounts): + src_options = copy.deepcopy(plugins.get(destination_plugin.sync_as_source_name).options) + set_plugin_option('accountNumber', account_number, src_options) + create(label=dst.label, + plugin_name=destination_plugin.sync_as_source_name, + options=src_options, + description=dst.description) + return True + + return False From d7abf2ec18093d566d9c18edef57995309272da2 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 16:38:00 -0700 Subject: [PATCH 158/357] adding a new util method for setting options --- lemur/plugins/utils.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/utils.py b/lemur/plugins/utils.py index a1914dd7..e057d071 100644 --- a/lemur/plugins/utils.py +++ b/lemur/plugins/utils.py @@ -18,4 +18,14 @@ def get_plugin_option(name, options): """ for o in options: if o.get('name') == name: - return o['value'] + return o.get('value', o.get('default')) + + +def set_plugin_option(name, value, options): + """ + Set value for option name for options dict. + :param options: + """ + for o in options: + if o.get('name') == name: + o.update({'value': value}) From 69c00c4db57efa54d14ba182dd00309d132cac1e Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 16:39:47 -0700 Subject: [PATCH 159/357] upon creating a new destination, we also add it as source, if the plugin defines this as an option --- lemur/destinations/service.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lemur/destinations/service.py b/lemur/destinations/service.py index 94ca3977..8e505fce 100644 --- a/lemur/destinations/service.py +++ b/lemur/destinations/service.py @@ -12,7 +12,7 @@ from lemur import database from lemur.models import certificate_destination_associations from lemur.destinations.models import Destination from lemur.certificates.models import Certificate -from lemur.sources import service as sources_service +from lemur.sources.service import add_aws_destination_to_sources def create(label, plugin_name, options, description=None): @@ -33,8 +33,7 @@ def create(label, plugin_name, options, description=None): current_app.logger.info("Destination: %s created", label) # add the destination as source, to avoid new destinations that are not in source, as long as an AWS destination - if plugin_name == 'aws-destination': - sources_service.create(label=label, plugin_name='aws-source', options=options, description=description) + if add_aws_destination_to_sources(destination): current_app.logger.info("Source: %s created", label) return database.create(destination) From 6ec84a398c16787771d2c5547a30269e28755900 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 17:13:37 -0700 Subject: [PATCH 160/357] checking for None --- lemur/sources/service.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 31886b5f..a4d373ab 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -277,7 +277,10 @@ def add_aws_destination_to_sources(dst): # check destination_plugin = plugins.get(dst.plugin_name) account_number = get_plugin_option('accountNumber', dst.options) - if destination_plugin.sync_as_source and (account_number not in src_accounts): + if account_number is not None and \ + destination_plugin.sync_as_source is not None and \ + destination_plugin.sync_as_source and \ + (account_number not in src_accounts): src_options = copy.deepcopy(plugins.get(destination_plugin.sync_as_source_name).options) set_plugin_option('accountNumber', account_number, src_options) create(label=dst.label, From 512e1a0bdda0b60974f8826f930979a4e00856f7 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 17:17:28 -0700 Subject: [PATCH 161/357] fixing typos --- lemur/common/celery.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index aa160b15..4192eb10 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -265,10 +265,10 @@ def sync_source_destination(): This celery task will sync destination and source, to make sure all new destinations are also present as source. Some destinations do not qualify as sources, and hence should be excluded from being added as sources We identify qualified destinations based on the sync_as_source attributed of the plugin. - The destination sync_as_source_name reviels the name of the suitable source-plugin. + The destination sync_as_source_name reveals the name of the suitable source-plugin. We rely on account numbers to avoid duplicates. """ - current_app.logger.debug("Syncing AWWS destinations and sources") + current_app.logger.debug("Syncing AWS destinations and sources") for dst in destinations_service.get_all(): if add_aws_destination_to_sources(dst): From 6d67ec7e3462b415af1d1ad1b99c94b27ac0a7c8 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Apr 2019 17:34:02 -0700 Subject: [PATCH 162/357] removing unused import --- lemur/common/celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 4192eb10..10747d31 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -18,7 +18,7 @@ from lemur.authorities.service import get as get_authority from lemur.factory import create_app from lemur.notifications.messaging import send_pending_failure_notification from lemur.pending_certificates import service as pending_certificate_service -from lemur.plugins.base import plugins, IPlugin +from lemur.plugins.base import plugins from lemur.sources.cli import clean, sync, validate_sources from lemur.destinations import service as destinations_service from lemur.sources.service import add_aws_destination_to_sources From 9ecc19c481384b0405e92845a01ea4144c1d95f2 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Fri, 12 Apr 2019 09:53:06 -0400 Subject: [PATCH 163/357] adding san filter --- lemur/plugins/lemur_vault_dest/plugin.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 91f6a07a..94647c03 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -9,6 +9,7 @@ .. moduleauthor:: Christopher Jolley """ +import re import hvac from flask import current_app @@ -19,7 +20,6 @@ from lemur.plugins.bases import DestinationPlugin from cryptography import x509 from cryptography.hazmat.backends import default_backend - class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = 'Vault' @@ -76,6 +76,13 @@ class VaultDestinationPlugin(DestinationPlugin): ], 'required': True, 'helpMessage': 'Bundle the chain into the certificate' + }, + { + 'name': 'sanFilter', + 'type': 'str', + 'required': False, + 'validation': '^[0-9a-zA-Z\\\?\[\](){}^$+._-]+$', + 'helpMessage': 'Valid regex filter' } ] @@ -98,6 +105,14 @@ class VaultDestinationPlugin(DestinationPlugin): path = self.get_option('vaultPath', options) bundle = self.get_option('bundleChain', options) obj_name = self.get_option('objectName', options) + san_filter = self.get_option('sanFilter', options) + + san_list = get_san_list(body) + for san in san_list: + if not re.match(san_filter, san): + current_app.logger.exception( + "Exception uploading secret to vault: invalid SAN in certificate", + exc_info=True) with open(token_file, 'r') as file: token = file.readline().rstrip('\n') @@ -119,7 +134,6 @@ class VaultDestinationPlugin(DestinationPlugin): else: secret['data'][cname]['crt'] = body secret['data'][cname]['key'] = private_key - san_list = get_san_list(body) if isinstance(san_list, list): secret['data'][cname]['san'] = san_list try: From f8a9ec6e3edd3ff091c2c8738496291ab4c13e64 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 12 Mar 2019 14:17:53 -0700 Subject: [PATCH 164/357] simple hardcoded announcement --- lemur/static/app/index.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index 466cfe9e..fcc54069 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,6 +89,11 @@
+
+ × + Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! +
+
From da51e7f31daceefaa047bc655c251d178634a972 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Apr 2019 10:08:25 -0700 Subject: [PATCH 165/357] removing the announcement --- lemur/static/app/index.html | 5 ----- 1 file changed, 5 deletions(-) diff --git a/lemur/static/app/index.html b/lemur/static/app/index.html index fcc54069..466cfe9e 100644 --- a/lemur/static/app/index.html +++ b/lemur/static/app/index.html @@ -89,11 +89,6 @@
-
- × - Info: Digicert maintenance and downtime scheduled for 6 April 2019 from 8:30 AM to 8:30 PM Pacific Time! -
-
From 9bdf48c1b960f87424f049d7aa43eacd19cf8a29 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 12 Apr 2019 14:29:08 -0700 Subject: [PATCH 166/357] updating requirements --- requirements-docs.txt | 4 ++-- requirements-tests.txt | 2 +- requirements.txt | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index e4233960..2ca15cd0 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -55,7 +55,7 @@ kombu==4.5.0 lockfile==0.12.2 mako==1.0.8 markupsafe==1.1.1 -marshmallow-sqlalchemy==0.16.1 +marshmallow-sqlalchemy==0.16.2 marshmallow==2.19.2 mock==2.0.0 ndg-httpsclient==0.5.1 @@ -89,7 +89,7 @@ sphinx-rtd-theme==0.4.3 sphinx==2.0.1 sphinxcontrib-applehelp==1.0.1 # via sphinx sphinxcontrib-devhelp==1.0.1 # via sphinx -sphinxcontrib-htmlhelp==1.0.1 # via sphinx +sphinxcontrib-htmlhelp==1.0.2 # via sphinx sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.2 # via sphinx diff --git a/requirements-tests.txt b/requirements-tests.txt index 87fc5b66..60dab022 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -22,7 +22,7 @@ docker==3.7.2 # via moto docutils==0.14 # via botocore ecdsa==0.13 # via python-jose factory-boy==2.11.1 -faker==1.0.4 +faker==1.0.5 flask==1.0.2 # via pytest-flask freezegun==0.3.11 future==0.17.1 # via python-jose diff --git a/requirements.txt b/requirements.txt index b7cda309..912b0868 100644 --- a/requirements.txt +++ b/requirements.txt @@ -52,7 +52,7 @@ kombu==4.5.0 lockfile==0.12.2 mako==1.0.8 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.16.1 +marshmallow-sqlalchemy==0.16.2 marshmallow==2.19.2 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 From c1b02cc8a5a5536e63131b3045eb79b391021e17 Mon Sep 17 00:00:00 2001 From: Jose Plana Date: Fri, 15 Feb 2019 14:31:16 +0100 Subject: [PATCH 167/357] Allow uploading csr along with certificates --- lemur/certificates/schemas.py | 1 + .../certificates/certificate/upload.tpl.html | 13 +++++++++++++ lemur/tests/test_certificates.py | 1 + 3 files changed, 15 insertions(+) diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index 78217de0..5795e71a 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -255,6 +255,7 @@ class CertificateUploadInputSchema(CertificateCreationSchema): private_key = fields.String() body = fields.String(required=True) chain = fields.String(missing=None, allow_none=True) + csr = fields.String(required=False,valudate=validators.csr) destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True) notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True) diff --git a/lemur/static/app/angular/certificates/certificate/upload.tpl.html b/lemur/static/app/angular/certificates/certificate/upload.tpl.html index c3339051..bf897a60 100644 --- a/lemur/static/app/angular/certificates/certificate/upload.tpl.html +++ b/lemur/static/app/angular/certificates/certificate/upload.tpl.html @@ -62,6 +62,19 @@ a valid certificate.

+
+ +
+ +

Enter a valid certificate signing request.

+
+
diff --git a/lemur/static/app/angular/pending_certificates/services.js b/lemur/static/app/angular/pending_certificates/services.js index 32b335ac..4e1b23e4 100644 --- a/lemur/static/app/angular/pending_certificates/services.js +++ b/lemur/static/app/angular/pending_certificates/services.js @@ -245,5 +245,9 @@ angular.module('lemur') return pending_certificate.customOperation('remove', null, {}, {'Content-Type': 'application/json'}, options); }; + PendingCertificateService.upload = function (pending_certificate) { + return pending_certificate.customPOST({'body': pending_certificate.body, 'chain': pending_certificate.chain}, 'upload'); + }; + return PendingCertificateService; }); diff --git a/lemur/static/app/angular/pending_certificates/view/view.js b/lemur/static/app/angular/pending_certificates/view/view.js index 9ada8845..c46d6c74 100644 --- a/lemur/static/app/angular/pending_certificates/view/view.js +++ b/lemur/static/app/angular/pending_certificates/view/view.js @@ -99,4 +99,23 @@ angular.module('lemur') $scope.pendingCertificateTable.reload(); }); }; + + $scope.upload = function (pendingCertificateId) { + var uibModalInstance = $uibModal.open({ + animation: true, + controller: 'PendingCertificateUploadController', + templateUrl: '/angular/pending_certificates/pending_certificate/upload.tpl.html', + size: 'lg', + backdrop: 'static', + resolve: { + uploadId: function () { + return pendingCertificateId; + } + } + }); + uibModalInstance.result.then(function () { + $scope.pendingCertificateTable.reload(); + }); + }; + }); diff --git a/lemur/static/app/angular/pending_certificates/view/view.tpl.html b/lemur/static/app/angular/pending_certificates/view/view.tpl.html index 1f028793..d9c1b461 100644 --- a/lemur/static/app/angular/pending_certificates/view/view.tpl.html +++ b/lemur/static/app/angular/pending_certificates/view/view.tpl.html @@ -51,6 +51,7 @@ diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index e65b9440..809b9a6a 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -13,12 +13,12 @@ from lemur import create_app from lemur.common.utils import parse_private_key from lemur.database import db as _db from lemur.auth.service import create_token -from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY +from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY, ROOTCA_CERT_STR, ROOTCA_KEY from .factories import ApiKeyFactory, AuthorityFactory, NotificationFactory, DestinationFactory, \ CertificateFactory, UserFactory, RoleFactory, SourceFactory, EndpointFactory, \ RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory, InvalidCertificateFactory, \ - CryptoAuthorityFactory + CryptoAuthorityFactory, CACertificateFactory def pytest_runtest_setup(item): @@ -172,6 +172,25 @@ def pending_certificate(session): return p +@pytest.fixture +def pending_certificate_from_full_chain_ca(session): + u = UserFactory() + a = AuthorityFactory() + p = PendingCertificateFactory(user=u, authority=a) + session.commit() + return p + + +@pytest.fixture +def pending_certificate_from_partial_chain_ca(session): + u = UserFactory() + c = CACertificateFactory(body=ROOTCA_CERT_STR, private_key=ROOTCA_KEY, chain=None) + a = AuthorityFactory(authority_certificate=c) + p = PendingCertificateFactory(user=u, authority=a) + session.commit() + return p + + @pytest.fixture def invalid_certificate(session): u = UserFactory() diff --git a/lemur/tests/test_pending_certificates.py b/lemur/tests/test_pending_certificates.py index 7accf7d9..043002d3 100644 --- a/lemur/tests/test_pending_certificates.py +++ b/lemur/tests/test_pending_certificates.py @@ -2,6 +2,7 @@ import json import pytest +from marshmallow import ValidationError from lemur.pending_certificates.views import * # noqa from .vectors import CSR_STR, INTERMEDIATE_CERT_STR, VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, \ VALID_USER_HEADER_TOKEN, WILDCARD_CERT_STR @@ -50,3 +51,44 @@ def test_pending_cancel(client, pending_certificate, token, status): assert client.delete(api.url_for(PendingCertificates, pending_certificate_id=pending_certificate.id), data=json.dumps({'note': "unit test", 'send_email': False}), headers=token).status_code == status + + +def test_pending_upload(pending_certificate_from_full_chain_ca): + from lemur.pending_certificates.service import upload + from lemur.certificates.service import get + + cert = {'body': WILDCARD_CERT_STR, + 'chain': None, + 'external_id': None + } + + pending_cert = upload(pending_certificate_from_full_chain_ca.id, **cert) + assert pending_cert.resolved + assert get(pending_cert.resolved_cert_id) + + +def test_pending_upload_with_chain(pending_certificate_from_partial_chain_ca): + from lemur.pending_certificates.service import upload + from lemur.certificates.service import get + + cert = {'body': WILDCARD_CERT_STR, + 'chain': INTERMEDIATE_CERT_STR, + 'external_id': None + } + + pending_cert = upload(pending_certificate_from_partial_chain_ca.id, **cert) + assert pending_cert.resolved + assert get(pending_cert.resolved_cert_id) + + +def test_invalid_pending_upload_with_chain(pending_certificate_from_partial_chain_ca): + from lemur.pending_certificates.service import upload + + cert = {'body': WILDCARD_CERT_STR, + 'chain': None, + 'external_id': None + } + with pytest.raises(ValidationError) as err: + upload(pending_certificate_from_partial_chain_ca.id, **cert) + assert str(err.value).startswith( + 'Incorrect chain certificate(s) provided: \'*.wild.example.org\' is not signed by \'LemurTrust Unittests Root CA 2018') From 1667c057428c58e6c25ff3a9ae76621f8639c9d7 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Thu, 18 Apr 2019 13:57:10 -0400 Subject: [PATCH 178/357] removed unused functions --- lemur/plugins/lemur_vault_dest/plugin.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 93134e7f..1b07cd83 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -21,14 +21,6 @@ from lemur.plugins.bases import DestinationPlugin from cryptography import x509 from cryptography.hazmat.backends import default_backend -class Error(Exception): - """Base exception class""" - pass - -class InvalidSanError(Error): - """Invlied SAN in SAN list as defined by regex in destination""" - pass - class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = 'Vault' From 8dccaaf54450eb8e35e8e46804c6c910d040ca5f Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Mon, 22 Apr 2019 07:58:01 -0400 Subject: [PATCH 179/357] simpler validation --- lemur/plugins/lemur_vault_dest/.plugin.py.swp | Bin 0 -> 16384 bytes lemur/plugins/lemur_vault_dest/plugin.py | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 lemur/plugins/lemur_vault_dest/.plugin.py.swp diff --git a/lemur/plugins/lemur_vault_dest/.plugin.py.swp b/lemur/plugins/lemur_vault_dest/.plugin.py.swp new file mode 100644 index 0000000000000000000000000000000000000000..b82010b3d3acbd7728a41aa95e77a928b96bc16d GIT binary patch literal 16384 zcmeHOU2Ggz6`rJ}4TQ9%g$HPB6-JvLVfB&avpS^rp`72ixo3}eY zwcp?p{l><%HKW%sxUZRRdrOM)+S*!KbdLUdcBg4Mf&8U+jgB4absmIec|-GhTk^>p za}DGgcqbY-pnUr9_!v=sYUDn4*PRRRgh@`5YarJ^u7O+wxdw6#yfm{Q* z267GL8u&ldfKgYJ+acFYQ~<#9|M>j>Z#4cIcoBFWI1gCBCxF*)QjR4N zXW%8^3h*>=26z;x0`~xWfY)wTl&ipZfi<8FG=TQm-=1t&xz;nO_zys!i37`Pn z16+eWUjwcHE^q`O8}9`ME*4#$5w=*B1>A0EO&;p57h0}UP{n&0r*Ts)G1(Rrm6)jQ zcARiqsu{Yw6!kLAS0C+iCxF|vo#7({B%zWX zQmny#(Rjw(891O}@_HAtCqG{{CI+AOxlE+CjC{ zf8Xa9I+oAtxO>8|13qvY6na#Z48JNs+|9E0d$Cb?c`G{MSo?*7Im=yYzz0_H} zh_ya&Lp8dFWg9D4eigQ)^z~1aNRH23$L0@=S2#rk9!pU^mwL)iU zIYWh#jqv+`fhG4#%xbU|+i+G{CxE)0esv(KWu0hgY;?jHePVL-ixquz`A|j_nzfp- z!B!rXOm%y=aaYbm3nXra_pYZo(+NVh!kNK@*i0-~EKd{BC}0%0cbwh1!jhaEW=Yd4 zKU_R?e@2eMIS%`ox{E1h_KC#S|DzeoU50$-zMUs}cJ95JShSVB`efY?xbf$bIq{XwO=oa&1~V!DN@Skji6;kix=;`hK6Gf zYNJ_UPiq-Z zw2e?(F-&B>^?+Gz&&39dEK9r{9kmY=#vRum9Gi&viZ`t_DQEzW0Z-Ts2_qRw3Yi;g zozSuqf}tFF3xY_-geH??M-nxlpw(DXv*Y{R33bEslxUW%u3^UCeI9>v1BJA!X{_zm zJ2sbYRH?9GFJi|`@~qEgJ&;`Tvr*n1wv3ROhC_R)*MWc5S>0X3&mM!endc%OV~$(r zfhJagw4JbW>VSRTH;}Wl#Qn%Gh0|fnhEquaDz;^EC*Tz(GciiBIXg8yRhylviDBBV z&tdHliB}{;#dN)%Z#7$Cg%!*qJ3KZ%A%v@Vwh_Vs+ZDFpSnH{RDy-l1;#cPN`p74Q zR_XlzL!1}?fwLu@|EHtF;Z>aTF9XxSZNLHG8qV{-1zrH?oc|E;C!FE00AB~50vf=B zzykoC?|+Un{42nZfG+_KPz8l+8ZkBF$Ssx8=JDWgAJRM>~qWNa;Om59_4P-i;1L1p6EycqU;qJq;3ff8-9<` zmQS@MszgfN6{Mh%!cg%7F(pw7S%9vH2`V(YjR6&EyP;hvVbb_|h;13R4YXSPb=D0} zz%unz|1o3O83YU&u@~!fv?vIk$7kp0xSuiRIu%MOd&rJQ^Gk*oEL^KD zIp9kw&g0X^wYh~yPS&R8^~qE7wKKDm^V5qpeRguDmX0B+F;yl6p!%Zj@wQIC`st!x z&WhS)khMLTtQ6ajx4p2(s0p5!T#mRM#oR?T*sP5i5)3=SBipimFGwvHE+7-E3b znW$?&durJ!EKssY1)r+?Sdwclt*R3J@LC216`$$A=3)8qcrhu|ovy@!@+XT%iPPWwh-Sg3F~2f#@+g*PS~PeE!bwNL*q_CkckIiC6t%dBE<({ zk(mq;#Frar*d?s6FrKO)z88P)5V_(m^trphXgQ)1#Sp4#S5JZ@mUpqglppinU383l z!+wq%BHU{Ty=F^<4a^_URS@P3r^HYzXa-f>pMG@q@iVol$+=o4m=GJPy`U?ep(HUW zU1Z=-kw~&ko2gCgDv@{hZ6m2+Q&L!V=z%Eio`@a+4L=ag_|{gmV&Ca6Dh~6AJ$ow# V*#`;3lyMju7Fynk9<-!{{{?&E-R}SZ literal 0 HcmV?d00001 diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 1b07cd83..a9c85dd7 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -94,7 +94,7 @@ class VaultDestinationPlugin(DestinationPlugin): 'type': 'str', 'value': '.*', 'required': False, - 'validation': '^[0-9a-zA-Z\\\?\[\](){}|^$+*,._-]+$', + 'validation': '.*', 'helpMessage': 'Valid regex filter' } ] From f9dadb2670de7c8f4414870ac6cb1b11c7d9c546 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Mon, 22 Apr 2019 09:38:44 -0400 Subject: [PATCH 180/357] fixing validation --- lemur/plugins/lemur_vault_dest/plugin.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index a9c85dd7..8d2ca6c6 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -124,11 +124,16 @@ class VaultDestinationPlugin(DestinationPlugin): san_list = get_san_list(body) if san_filter: for san in san_list: - if not re.match(san_filter, san, flags=re.IGNORECASE): + try: + if not re.match(san_filter, san, flags=re.IGNORECASE): + current_app.logger.exception( + "Exception uploading secret to vault: invalid SAN: {}".format(san), + exc_info=True) + os._exit(1) + except re.error: current_app.logger.exception( - "Exception uploading secret to vault: invalid SAN: {}".format(san), + "Exception compiling regex filter: invalid filter", exc_info=True) - os._exit(1) with open(token_file, 'r') as file: token = file.readline().rstrip('\n') From d1e5a40d2097e91aaf6dbcf9d9779e3d76fe0b6d Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 22 Apr 2019 09:49:03 -0700 Subject: [PATCH 181/357] updating requirements, addressing the urllib3 high severity warning --- requirements-dev.txt | 4 ++-- requirements-docs.txt | 8 ++++---- requirements-tests.txt | 14 +++++++------- requirements.txt | 8 ++++---- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index e62d1ee6..f9f1b8f3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -18,7 +18,7 @@ invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.15.1 +pre-commit==1.15.2 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer @@ -30,7 +30,7 @@ six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit tqdm==4.31.1 # via twine twine==1.13.0 -urllib3==1.24.1 # via requests +urllib3==1.24.2 # via requests virtualenv==16.4.3 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.3.3 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 3948e2a8..3ec6bb3b 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -17,12 +17,12 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.130 -botocore==1.12.130 +boto3==1.9.134 +botocore==1.12.134 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 -cffi==1.12.2 +cffi==1.12.3 chardet==3.0.4 click==7.0 cloudflare==2.1.0 @@ -97,7 +97,7 @@ sphinxcontrib-serializinghtml==1.1.3 # via sphinx sqlalchemy-utils==0.33.11 sqlalchemy==1.3.3 tabulate==0.8.3 -urllib3==1.24.1 +urllib3==1.24.2 vine==1.3.0 werkzeug==0.15.2 xmltodict==0.12.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index e2ddf830..9ae156a1 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -8,11 +8,11 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest aws-xray-sdk==0.95 # via moto -boto3==1.9.130 # via moto +boto3==1.9.134 # via moto boto==2.49.0 # via moto -botocore==1.12.130 # via boto3, moto, s3transfer +botocore==1.12.134 # via boto3, moto, s3transfer certifi==2019.3.9 # via requests -cffi==1.12.2 # via cryptography +cffi==1.12.3 # via cryptography chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.3 @@ -20,7 +20,7 @@ cryptography==2.6.1 # via moto docker-pycreds==0.4.0 # via docker docker==3.7.2 # via moto docutils==0.14 # via botocore -ecdsa==0.13 # via python-jose +ecdsa==0.13.2 # via python-jose factory-boy==2.11.1 faker==1.0.5 flask==1.0.2 # via pytest-flask @@ -40,12 +40,12 @@ nose==1.3.7 pbr==5.1.3 # via mock pluggy==0.9.0 # via pytest py==1.8.0 # via pytest -pyaml==18.11.0 # via moto +pyaml==19.4.1 # via moto pycparser==2.19 # via cffi pycryptodome==3.8.1 # via python-jose pyflakes==2.1.1 pytest-flask==0.14.0 -pytest-mock==1.10.3 +pytest-mock==1.10.4 pytest==4.4.1 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==2.0.2 # via moto @@ -57,7 +57,7 @@ responses==0.10.6 # via moto s3transfer==0.2.0 # via boto3 six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker -urllib3==1.24.1 # via botocore, requests +urllib3==1.24.2 # via botocore, requests websocket-client==0.56.0 # via docker werkzeug==0.15.2 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk diff --git a/requirements.txt b/requirements.txt index d16d4bb5..98b4d666 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,12 +15,12 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.130 -botocore==1.12.130 +boto3==1.9.134 +botocore==1.12.134 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 -cffi==1.12.2 # via bcrypt, cryptography, pynacl +cffi==1.12.3 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.1.0 @@ -82,7 +82,7 @@ six==1.12.0 sqlalchemy-utils==0.33.11 sqlalchemy==1.3.3 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 -urllib3==1.24.1 # via botocore, requests +urllib3==1.24.2 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.15.2 # via flask xmltodict==0.12.0 From 83784d7cb8591f9ecfbe78b7b73b1b8f84778e54 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 22 Apr 2019 21:50:48 -0700 Subject: [PATCH 182/357] Pinning pyjks to specific commit with pycryptodomex instead of pycryptodome --- requirements-docs.txt | 4 ++++ requirements-tests.txt | 29 +++++++++++++++++------------ requirements.in | 2 +- requirements.txt | 5 ++++- 4 files changed, 26 insertions(+), 14 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index 3ec6bb3b..db37206a 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,6 +4,7 @@ # # pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # +-e git+git://github.com/kurtbrose/pyjks.git@e742f80b0bbd06c5a6cf0535985af6416ba014a4#egg=pyjks acme==0.33.1 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 @@ -47,6 +48,7 @@ idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 +javaobj-py3==0.2.4 jinja2==2.10.1 jmespath==0.9.4 josepy==1.1.0 @@ -67,6 +69,7 @@ psycopg2==2.8.2 pyasn1-modules==0.2.4 pyasn1==0.4.5 pycparser==2.19 +pycryptodomex==3.8.1 pygments==2.3.1 # via sphinx pyjwt==1.7.1 pynacl==1.3.0 @@ -97,6 +100,7 @@ sphinxcontrib-serializinghtml==1.1.3 # via sphinx sqlalchemy-utils==0.33.11 sqlalchemy==1.3.3 tabulate==0.8.3 +twofish==0.3.0 urllib3==1.24.2 vine==1.3.0 werkzeug==0.15.2 diff --git a/requirements-tests.txt b/requirements-tests.txt index 9ae156a1..9dd01574 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -7,12 +7,14 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest -aws-xray-sdk==0.95 # via moto -boto3==1.9.134 # via moto +aws-sam-translator==1.10.0 # via cfn-lint +aws-xray-sdk==2.4.2 # via moto +boto3==1.9.134 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.134 # via boto3, moto, s3transfer +botocore==1.12.134 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography +cfn-lint==0.19.1 # via moto chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.3 @@ -25,37 +27,40 @@ factory-boy==2.11.1 faker==1.0.5 flask==1.0.2 # via pytest-flask freezegun==0.3.11 -future==0.17.1 # via python-jose -idna==2.8 # via requests +future==0.17.1 # via aws-xray-sdk, python-jose +idna==2.8 # via moto, requests itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask, moto jmespath==0.9.4 # via boto3, botocore -jsondiff==1.1.1 # via moto +jsondiff==1.1.2 # via moto +jsonpatch==1.23 # via cfn-lint jsonpickle==1.1 # via aws-xray-sdk +jsonpointer==2.0 # via jsonpatch +jsonschema==2.6.0 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 mock==2.0.0 # via moto more-itertools==7.0.0 # via pytest -moto==1.3.7 +moto==1.3.8 nose==1.3.7 pbr==5.1.3 # via mock pluggy==0.9.0 # via pytest py==1.8.0 # via pytest -pyaml==19.4.1 # via moto +pyasn1==0.4.5 # via rsa pycparser==2.19 # via cffi -pycryptodome==3.8.1 # via python-jose pyflakes==2.1.1 pytest-flask==0.14.0 pytest-mock==1.10.4 pytest==4.4.1 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto -python-jose==2.0.2 # via moto +python-jose==3.0.1 # via moto pytz==2019.1 # via moto pyyaml==5.1 requests-mock==1.5.2 -requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses +requests==2.21.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto +rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 -six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client +six==1.12.0 # via aws-sam-translator, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.2 # via botocore, requests websocket-client==0.56.0 # via docker diff --git a/requirements.in b/requirements.in index a3cabc18..ee3f301c 100644 --- a/requirements.in +++ b/requirements.in @@ -47,4 +47,4 @@ SQLAlchemy-Utils tabulate xmltodict pyyaml>=4.2b1 #high severity alert -pyjks +-e git://github.com/kurtbrose/pyjks.git@e742f80b0bbd06c5a6cf0535985af6416ba014a4#egg=pyjks # Needed until pyjks publishes a release with the contents of this PR diff --git a/requirements.txt b/requirements.txt index f63ffb6a..a4c707e1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,6 +4,7 @@ # # pip-compile --output-file requirements.txt requirements.in -U --no-index # +-e git+git://github.com/kurtbrose/pyjks.git@e742f80b0bbd06c5a6cf0535985af6416ba014a4#egg=pyjks acme==0.33.1 alembic-autogenerate-enums==0.0.2 alembic==1.0.9 # via flask-migrate @@ -44,6 +45,7 @@ hvac==0.8.2 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask +javaobj-py3==0.2.4 jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme @@ -63,6 +65,7 @@ psycopg2==2.8.2 pyasn1-modules==0.2.4 # via python-ldap pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap pycparser==2.19 # via cffi +pycryptodomex==3.8.1 pyjwt==1.7.1 pynacl==1.3.0 # via paramiko pyopenssl==19.0.0 @@ -82,8 +85,8 @@ six==1.12.0 sqlalchemy-utils==0.33.11 sqlalchemy==1.3.3 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 +twofish==0.3.0 urllib3==1.24.2 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.15.2 # via flask xmltodict==0.12.0 -pyjks==18.0.0 From 85efb6a99e9fae62e318652c841597c2c2beacf7 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Mon, 22 Apr 2019 09:54:19 -0400 Subject: [PATCH 183/357] cleanup tmp files --- lemur/plugins/lemur_vault_dest/.plugin.py.swp | Bin 16384 -> 0 bytes lemur/plugins/lemur_vault_dest/plugin.py | 1 + 2 files changed, 1 insertion(+) delete mode 100644 lemur/plugins/lemur_vault_dest/.plugin.py.swp diff --git a/lemur/plugins/lemur_vault_dest/.plugin.py.swp b/lemur/plugins/lemur_vault_dest/.plugin.py.swp deleted file mode 100644 index b82010b3d3acbd7728a41aa95e77a928b96bc16d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 16384 zcmeHOU2Ggz6`rJ}4TQ9%g$HPB6-JvLVfB&avpS^rp`72ixo3}eY zwcp?p{l><%HKW%sxUZRRdrOM)+S*!KbdLUdcBg4Mf&8U+jgB4absmIec|-GhTk^>p za}DGgcqbY-pnUr9_!v=sYUDn4*PRRRgh@`5YarJ^u7O+wxdw6#yfm{Q* z267GL8u&ldfKgYJ+acFYQ~<#9|M>j>Z#4cIcoBFWI1gCBCxF*)QjR4N zXW%8^3h*>=26z;x0`~xWfY)wTl&ipZfi<8FG=TQm-=1t&xz;nO_zys!i37`Pn z16+eWUjwcHE^q`O8}9`ME*4#$5w=*B1>A0EO&;p57h0}UP{n&0r*Ts)G1(Rrm6)jQ zcARiqsu{Yw6!kLAS0C+iCxF|vo#7({B%zWX zQmny#(Rjw(891O}@_HAtCqG{{CI+AOxlE+CjC{ zf8Xa9I+oAtxO>8|13qvY6na#Z48JNs+|9E0d$Cb?c`G{MSo?*7Im=yYzz0_H} zh_ya&Lp8dFWg9D4eigQ)^z~1aNRH23$L0@=S2#rk9!pU^mwL)iU zIYWh#jqv+`fhG4#%xbU|+i+G{CxE)0esv(KWu0hgY;?jHePVL-ixquz`A|j_nzfp- z!B!rXOm%y=aaYbm3nXra_pYZo(+NVh!kNK@*i0-~EKd{BC}0%0cbwh1!jhaEW=Yd4 zKU_R?e@2eMIS%`ox{E1h_KC#S|DzeoU50$-zMUs}cJ95JShSVB`efY?xbf$bIq{XwO=oa&1~V!DN@Skji6;kix=;`hK6Gf zYNJ_UPiq-Z zw2e?(F-&B>^?+Gz&&39dEK9r{9kmY=#vRum9Gi&viZ`t_DQEzW0Z-Ts2_qRw3Yi;g zozSuqf}tFF3xY_-geH??M-nxlpw(DXv*Y{R33bEslxUW%u3^UCeI9>v1BJA!X{_zm zJ2sbYRH?9GFJi|`@~qEgJ&;`Tvr*n1wv3ROhC_R)*MWc5S>0X3&mM!endc%OV~$(r zfhJagw4JbW>VSRTH;}Wl#Qn%Gh0|fnhEquaDz;^EC*Tz(GciiBIXg8yRhylviDBBV z&tdHliB}{;#dN)%Z#7$Cg%!*qJ3KZ%A%v@Vwh_Vs+ZDFpSnH{RDy-l1;#cPN`p74Q zR_XlzL!1}?fwLu@|EHtF;Z>aTF9XxSZNLHG8qV{-1zrH?oc|E;C!FE00AB~50vf=B zzykoC?|+Un{42nZfG+_KPz8l+8ZkBF$Ssx8=JDWgAJRM>~qWNa;Om59_4P-i;1L1p6EycqU;qJq;3ff8-9<` zmQS@MszgfN6{Mh%!cg%7F(pw7S%9vH2`V(YjR6&EyP;hvVbb_|h;13R4YXSPb=D0} zz%unz|1o3O83YU&u@~!fv?vIk$7kp0xSuiRIu%MOd&rJQ^Gk*oEL^KD zIp9kw&g0X^wYh~yPS&R8^~qE7wKKDm^V5qpeRguDmX0B+F;yl6p!%Zj@wQIC`st!x z&WhS)khMLTtQ6ajx4p2(s0p5!T#mRM#oR?T*sP5i5)3=SBipimFGwvHE+7-E3b znW$?&durJ!EKssY1)r+?Sdwclt*R3J@LC216`$$A=3)8qcrhu|ovy@!@+XT%iPPWwh-Sg3F~2f#@+g*PS~PeE!bwNL*q_CkckIiC6t%dBE<({ zk(mq;#Frar*d?s6FrKO)z88P)5V_(m^trphXgQ)1#Sp4#S5JZ@mUpqglppinU383l z!+wq%BHU{Ty=F^<4a^_URS@P3r^HYzXa-f>pMG@q@iVol$+=o4m=GJPy`U?ep(HUW zU1Z=-kw~&ko2gCgDv@{hZ6m2+Q&L!V=z%Eio`@a+4L=ag_|{gmV&Ca6Dh~6AJ$ow# V*#`;3lyMju7Fynk9<-!{{{?&E-R}SZ diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 8d2ca6c6..819ba22b 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -21,6 +21,7 @@ from lemur.plugins.bases import DestinationPlugin from cryptography import x509 from cryptography.hazmat.backends import default_backend + class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = 'Vault' From b6e09621f8290e27f2bb43449b618b1836d06606 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 23 Apr 2019 08:05:32 -0700 Subject: [PATCH 184/357] Use official pyjks version --- requirements-docs.txt | 2 +- requirements.in | 4 ++-- requirements.txt | 12 ++++++------ 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index db37206a..5f69328d 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,7 +4,6 @@ # # pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # --e git+git://github.com/kurtbrose/pyjks.git@e742f80b0bbd06c5a6cf0535985af6416ba014a4#egg=pyjks acme==0.33.1 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 @@ -71,6 +70,7 @@ pyasn1==0.4.5 pycparser==2.19 pycryptodomex==3.8.1 pygments==2.3.1 # via sphinx +pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 pyopenssl==19.0.0 diff --git a/requirements.in b/requirements.in index ee3f301c..e69c61d2 100644 --- a/requirements.in +++ b/requirements.in @@ -35,8 +35,10 @@ ndg-httpsclient paramiko # required for the SFTP destination plugin pem psycopg2 +pyjks >= 19 # pyjks < 19 depends on pycryptodome, which conflicts with dyn's usage of pycrypto pyjwt pyOpenSSL +pyyaml>=4.2b1 #high severity alert python_ldap raven[flask] redis @@ -46,5 +48,3 @@ six SQLAlchemy-Utils tabulate xmltodict -pyyaml>=4.2b1 #high severity alert --e git://github.com/kurtbrose/pyjks.git@e742f80b0bbd06c5a6cf0535985af6416ba014a4#egg=pyjks # Needed until pyjks publishes a release with the contents of this PR diff --git a/requirements.txt b/requirements.txt index a4c707e1..2d17b930 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,6 @@ # # pip-compile --output-file requirements.txt requirements.in -U --no-index # --e git+git://github.com/kurtbrose/pyjks.git@e742f80b0bbd06c5a6cf0535985af6416ba014a4#egg=pyjks acme==0.33.1 alembic-autogenerate-enums==0.0.2 alembic==1.0.9 # via flask-migrate @@ -45,7 +44,7 @@ hvac==0.8.2 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask -javaobj-py3==0.2.4 +javaobj-py3==0.2.4 # via pyjks jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme @@ -62,10 +61,11 @@ paramiko==2.4.2 pbr==5.1.3 # via mock pem==19.1.0 psycopg2==2.8.2 -pyasn1-modules==0.2.4 # via python-ldap -pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap +pyasn1-modules==0.2.4 # via pyjks, python-ldap +pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, pyjks, python-ldap pycparser==2.19 # via cffi -pycryptodomex==3.8.1 +pycryptodomex==3.8.1 # via pyjks +pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 # via paramiko pyopenssl==19.0.0 @@ -85,7 +85,7 @@ six==1.12.0 sqlalchemy-utils==0.33.11 sqlalchemy==1.3.3 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 -twofish==0.3.0 +twofish==0.3.0 # via pyjks urllib3==1.24.2 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.15.2 # via flask From 272285f64a030d2dd396ac8689d147a95561b8d0 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 24 Apr 2019 15:26:23 -0700 Subject: [PATCH 185/357] Better exception handling, logging, and metrics for ACME flow --- lemur/plugins/lemur_acme/dyn.py | 28 +++++++++++++---- lemur/plugins/lemur_acme/plugin.py | 48 +++++++++++++++++++++++++----- 2 files changed, 63 insertions(+), 13 deletions(-) diff --git a/lemur/plugins/lemur_acme/dyn.py b/lemur/plugins/lemur_acme/dyn.py index 5d419f7f..232c6ca3 100644 --- a/lemur/plugins/lemur_acme/dyn.py +++ b/lemur/plugins/lemur_acme/dyn.py @@ -10,13 +10,21 @@ from dyn.tm.session import DynectSession from dyn.tm.zones import Node, Zone, get_all_zones from flask import current_app +from lemur.extensions import metrics, sentry + def get_dynect_session(): - dynect_session = DynectSession( - current_app.config.get('ACME_DYN_CUSTOMER_NAME', ''), - current_app.config.get('ACME_DYN_USERNAME', ''), - current_app.config.get('ACME_DYN_PASSWORD', ''), - ) + try: + dynect_session = DynectSession( + current_app.config.get('ACME_DYN_CUSTOMER_NAME', ''), + current_app.config.get('ACME_DYN_USERNAME', ''), + current_app.config.get('ACME_DYN_PASSWORD', ''), + ) + except Exception as e: + sentry.captureException() + metrics.send('get_dynect_session_fail', 'counter', 1) + current_app.logger.debug("Unable to establish connection to Dyn", exc_info=True) + raise return dynect_session @@ -30,10 +38,12 @@ def _has_dns_propagated(name, token): for txt_record in rdata.strings: txt_records.append(txt_record.decode("utf-8")) except dns.exception.DNSException: + metrics.send('has_dns_propagated_fail', 'counter', 1) return False for txt_record in txt_records: if txt_record == token: + metrics.send('has_dns_propagated_success', 'counter', 1) return True return False @@ -46,10 +56,12 @@ def wait_for_dns_change(change_id, account_number=None): status = _has_dns_propagated(fqdn, token) current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) if status: + metrics.send('wait_for_dns_change_success', 'counter', 1) break time.sleep(20) if not status: # TODO: Delete associated DNS text record here + metrics.send('wait_for_dns_change_fail', 'counter', 1) raise Exception("Unable to query DNS token for fqdn {}.".format(fqdn)) return @@ -67,6 +79,7 @@ def get_zone_name(domain): if z.name.count(".") > zone_name.count("."): zone_name = z.name if not zone_name: + metrics.send('dyn_no_zone_name', 'counter', 1) raise Exception("No Dyn zone found for domain: {}".format(domain)) return zone_name @@ -99,6 +112,8 @@ def create_txt_record(domain, token, account_number): "Record already exists: {}".format(domain, token, e), exc_info=True ) else: + metrics.send('create_txt_record_error', 'counter', 1) + sentry.captureException() raise change_id = (fqdn, token) @@ -122,6 +137,8 @@ def delete_txt_record(change_id, account_number, domain, token): try: all_txt_records = node.get_all_records_by_type('TXT') except DynectGetError: + sentry.captureException() + metrics.send('delete_txt_record_error', 'counter', 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return for txt_record in all_txt_records: @@ -178,6 +195,7 @@ def get_authoritative_nameserver(domain): rcode = response.rcode() if rcode != dns.rcode.NOERROR: + metrics.send('get_authoritative_nameserver_error', 'counter', 1) if rcode == dns.rcode.NXDOMAIN: raise Exception('%s does not exist.' % sub) else: diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 59cde380..c998c0b8 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -28,6 +28,7 @@ from lemur.authorizations import service as authorization_service from lemur.common.utils import generate_private_key from lemur.dns_providers import service as dns_provider_service from lemur.exceptions import InvalidAuthority, InvalidConfiguration, UnknownProvider +from lemur.extensions import metrics, sentry from lemur.plugins import lemur_acme as acme from lemur.plugins.bases import IssuerPlugin from lemur.plugins.lemur_acme import cloudflare, dyn, route53 @@ -47,7 +48,9 @@ class AcmeHandler(object): try: self.all_dns_providers = dns_provider_service.get_all_dns_providers() except Exception as e: - current_app.logger.error("Unable to fetch DNS Providers: {}".format(e)) + metrics.send('AcmeHandler_init_error', 'counter', 1) + sentry.captureException() + current_app.logger.error(f"Unable to fetch DNS Providers: {e}") self.all_dns_providers = [] def find_dns_challenge(self, authorizations): @@ -94,6 +97,7 @@ class AcmeHandler(object): current_app.logger.debug("Finalizing DNS challenge for {0}".format(authz_record.authz[0].body.identifier.value)) dns_providers = self.dns_providers_for_domain.get(authz_record.host) if not dns_providers: + metrics.send('complete_dns_challenge_error_no_dnsproviders', 'counter', 1) raise Exception("No DNS providers found for domain: {}".format(authz_record.host)) for dns_provider in dns_providers: @@ -102,7 +106,15 @@ class AcmeHandler(object): account_number = dns_provider_options.get("account_id") dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) for change_id in authz_record.change_id: - dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number) + try: + dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number) + except Exception: + metrics.send('complete_dns_challenge_error', 'counter', 1) + sentry.captureException() + current_app.logger.debug( + f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: " + f"{account_number}", exc_info=True) + raise for dns_challenge in authz_record.dns_challenge: response = dns_challenge.response(acme_client.client.net.key) @@ -114,6 +126,7 @@ class AcmeHandler(object): ) if not verified: + metrics.send('complete_dns_challenge_verification_error', 'counter', 1) raise ValueError("Failed verification") time.sleep(5) @@ -129,7 +142,9 @@ class AcmeHandler(object): try: orderr = acme_client.finalize_order(order, deadline) except AcmeError: - current_app.logger.error("Unable to resolve Acme order: {}".format(order), exc_info=True) + sentry.captureException() + metrics.send('request_certificate_error', 'counter', 1) + current_app.logger.error(f"Unable to resolve Acme order: {order}", exc_info=True) raise pem_certificate = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, @@ -196,6 +211,7 @@ class AcmeHandler(object): for domain in order_info.domains: if not self.dns_providers_for_domain.get(domain): + metrics.send('get_authorizations_no_dns_provider_for_domain', 'counter', 1) raise Exception("No DNS providers found for domain: {}".format(domain)) for dns_provider in self.dns_providers_for_domain[domain]: dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) @@ -284,6 +300,8 @@ class AcmeHandler(object): except Exception as e: # If this fails, it's most likely because the record doesn't exist (It was already cleaned up) # or we're not authorized to modify it. + metrics.send('cleanup_dns_challenges_error', 'counter', 1) + sentry.captureException() pass def get_dns_provider(self, type): @@ -378,12 +396,15 @@ class ACMEIssuerPlugin(IssuerPlugin): try: order = acme_client.new_order(pending_cert.csr) except WildcardUnsupportedError: + metrics.send('get_ordered_certificate_wildcard_unsupported', 'counter', 1) raise Exception("The currently selected ACME CA endpoint does" " not support issuing wildcard certificates.") try: authorizations = self.acme.get_authorizations(acme_client, order, order_info) except ClientError: - current_app.logger.error("Unable to resolve pending cert: {}".format(pending_cert.name), exc_info=True) + sentry.captureException() + metrics.send('get_ordered_certificate_error', 'counter', 1) + current_app.logger.error(f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True) return False authorizations = self.acme.finalize_authorizations(acme_client, authorizations) @@ -418,6 +439,8 @@ class ACMEIssuerPlugin(IssuerPlugin): try: order = acme_client.new_order(pending_cert.csr) except WildcardUnsupportedError: + sentry.captureException() + metrics.send('get_ordered_certificates_wildcard_unsupported_error', 'counter', 1) raise Exception("The currently selected ACME CA endpoint does" " not support issuing wildcard certificates.") @@ -430,7 +453,13 @@ class ACMEIssuerPlugin(IssuerPlugin): "order": order, }) except (ClientError, ValueError, Exception) as e: - current_app.logger.error("Unable to resolve pending cert: {}".format(pending_cert), exc_info=True) + sentry.captureException() + metrics.send('get_ordered_certificates_pending_creation_error', 'counter', 1) + current_app.logger.error(f"Unable to resolve pending cert: {pending_cert}", exc_info=True) + + error = e + if globals().get("order") and order: + error += f" Order uri: {order.uri}" certs.append({ "cert": False, "pending_cert": pending_cert, @@ -459,14 +488,17 @@ class ACMEIssuerPlugin(IssuerPlugin): "pending_cert": entry["pending_cert"], }) except (PollError, AcmeError, Exception) as e: + sentry.captureException() + metrics.send('get_ordered_certificates_resolution_error', 'counter', 1) order_url = order.uri + error = f"{e}. Order URI: {order_url}" current_app.logger.error( - "Unable to resolve pending cert: {}. " - "Check out {} for more information.".format(pending_cert, order_url), exc_info=True) + f"Unable to resolve pending cert: {pending_cert}. " + f"Check out {order_url} for more information.", exc_info=True) certs.append({ "cert": False, "pending_cert": entry["pending_cert"], - "last_error": e, + "last_error": error, }) # Ensure DNS records get deleted self.acme.cleanup_dns_challenges( From 55f35b0f35752dbcc038d6ab697af6ff89b0996c Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 24 Apr 2019 17:48:18 -0700 Subject: [PATCH 186/357] removing sudo, since deprecated in Travis https://github.com/Netflix/lemur/issues/2758 --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index b540937d..50a3333f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,5 +1,4 @@ language: python -sudo: required dist: trusty node_js: From 2bc604e5a9821552906ed8706ecf2d5a7fe00e7a Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 25 Apr 2019 13:50:41 -0700 Subject: [PATCH 187/357] Better metrics and error reporting --- lemur/plugins/lemur_acme/dyn.py | 50 +++++++++++++++++++++++++----- lemur/plugins/lemur_acme/plugin.py | 7 +++-- lemur/plugins/lemur_aws/elb.py | 23 +++++++++++--- 3 files changed, 64 insertions(+), 16 deletions(-) diff --git a/lemur/plugins/lemur_acme/dyn.py b/lemur/plugins/lemur_acme/dyn.py index 232c6ca3..4eb01958 100644 --- a/lemur/plugins/lemur_acme/dyn.py +++ b/lemur/plugins/lemur_acme/dyn.py @@ -5,7 +5,7 @@ import dns.exception import dns.name import dns.query import dns.resolver -from dyn.tm.errors import DynectCreateError, DynectGetError +from dyn.tm.errors import DynectCreateError, DynectDeleteError, DynectGetError, DynectUpdateError from dyn.tm.session import DynectSession from dyn.tm.zones import Node, Zone, get_all_zones from flask import current_app @@ -51,17 +51,23 @@ def _has_dns_propagated(name, token): def wait_for_dns_change(change_id, account_number=None): fqdn, token = change_id - number_of_attempts = 10 + number_of_attempts = 20 for attempts in range(0, number_of_attempts): status = _has_dns_propagated(fqdn, token) current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) if status: metrics.send('wait_for_dns_change_success', 'counter', 1) break - time.sleep(20) + time.sleep(10) if not status: # TODO: Delete associated DNS text record here metrics.send('wait_for_dns_change_fail', 'counter', 1) + sentry.captureException( + extra={ + "fqdn": fqdn, "txt_record": token} + ) + metrics.send('wait_for_dns_change_error', 'counter', 1, + metric_tags={'fqdn': fqdn, 'txt_record': token}) raise Exception("Unable to query DNS token for fqdn {}.".format(fqdn)) return @@ -105,7 +111,7 @@ def create_txt_record(domain, token, account_number): zone.add_record(node_name, record_type='TXT', txtdata="\"{}\"".format(token), ttl=5) zone.publish() current_app.logger.debug("TXT record created: {0}, token: {1}".format(fqdn, token)) - except DynectCreateError as e: + except (DynectCreateError, DynectUpdateError) as e: if "Cannot duplicate existing record data" in e.message: current_app.logger.debug( "Unable to add record. Domain: {}. Token: {}. " @@ -138,14 +144,33 @@ def delete_txt_record(change_id, account_number, domain, token): all_txt_records = node.get_all_records_by_type('TXT') except DynectGetError: sentry.captureException() - metrics.send('delete_txt_record_error', 'counter', 1) + metrics.send('delete_txt_record_geterror', 'counter', 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return for txt_record in all_txt_records: if txt_record.txtdata == ("{}".format(token)): current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn)) - txt_record.delete() - zone.publish() + try: + txt_record.delete() + except DynectDeleteError: + sentry.captureException( + extra={ + "fqdn": fqdn, "zone_name": zone_name, "node_name": node_name, + "txt_record": txt_record.txtdata} + ) + metrics.send('delete_txt_record_deleteerror', 'counter', 1, + metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) + + try: + zone.publish() + except DynectUpdateError: + sentry.captureException( + extra={ + "fqdn": fqdn, "zone_name": zone_name, "node_name": node_name, + "txt_record": txt_record.txtdata} + ) + metrics.send('delete_txt_record_publish_error', 'counter', 1, + metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) def delete_acme_txt_records(domain): @@ -171,7 +196,16 @@ def delete_acme_txt_records(domain): all_txt_records = node.get_all_records_by_type('TXT') for txt_record in all_txt_records: current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn)) - txt_record.delete() + try: + txt_record.delete() + except DynectDeleteError: + sentry.captureException( + extra={ + "fqdn": fqdn, "zone_name": zone_name, "node_name": node_name, + "txt_record": txt_record.txtdata} + ) + metrics.send('delete_txt_record_deleteerror', 'counter', 1, + metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) zone.publish() diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index c998c0b8..4360e5b5 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -142,9 +142,9 @@ class AcmeHandler(object): try: orderr = acme_client.finalize_order(order, deadline) except AcmeError: - sentry.captureException() + sentry.captureException(extra={"order_url": order.uri}) metrics.send('request_certificate_error', 'counter', 1) - current_app.logger.error(f"Unable to resolve Acme order: {order}", exc_info=True) + current_app.logger.error(f"Unable to resolve Acme order: {order.uri}", exc_info=True) raise pem_certificate = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, @@ -289,9 +289,10 @@ class AcmeHandler(object): dns_challenges = authz_record.dns_challenge host_to_validate = self.maybe_remove_wildcard(authz_record.host) host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) + dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) for dns_challenge in dns_challenges: try: - dns_provider.delete_txt_record( + dns_provider_plugin.delete_txt_record( authz_record.change_id, account_number, dns_challenge.validation_domain_name(host_to_validate), diff --git a/lemur/plugins/lemur_aws/elb.py b/lemur/plugins/lemur_aws/elb.py index b4391dd8..43d99ff2 100644 --- a/lemur/plugins/lemur_aws/elb.py +++ b/lemur/plugins/lemur_aws/elb.py @@ -10,7 +10,7 @@ from flask import current_app from retrying import retry -from lemur.extensions import metrics +from lemur.extensions import metrics, sentry from lemur.exceptions import InvalidListener from lemur.plugins.lemur_aws.sts import sts_client @@ -149,7 +149,7 @@ def describe_listeners_v2(**kwargs): @sts_client('elb') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): """ Fetching all policies currently associated with an ELB. @@ -157,11 +157,18 @@ def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): :param load_balancer_name: :return: """ - return kwargs['client'].describe_load_balancer_policies(LoadBalancerName=load_balancer_name, PolicyNames=policy_names) + try: + return kwargs['client'].describe_load_balancer_policies(LoadBalancerName=load_balancer_name, + PolicyNames=policy_names) + except Exception as e: # noqa + metrics.send('describe_load_balancer_policies_fail', 'counter', 1, + metric_tags={"load_balancer_name": load_balancer_name, "policy_names": policy_names, "error": e}) + sentry.captureException(extra={"load_balancer_name": load_balancer_name, "policy_names": policy_names}) + raise @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_ssl_policies_v2(policy_names, **kwargs): """ Fetching all policies currently associated with an ELB. @@ -169,7 +176,13 @@ def describe_ssl_policies_v2(policy_names, **kwargs): :param policy_names: :return: """ - return kwargs['client'].describe_ssl_policies(Names=policy_names) + try: + return kwargs['client'].describe_ssl_policies(Names=policy_names) + except Exception as e: # noqa + metrics.send('describe_ssl_policies_v2_fail', 'counter', 1, + metric_tags={"policy_names": policy_names, "error": e}) + sentry.captureException(extra={"policy_names": policy_names}) + raise @sts_client('elb') From 39584f214b3a4a1dad6f8026048355b6d0c2d760 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 25 Apr 2019 15:12:52 -0700 Subject: [PATCH 188/357] Process DNS Challenges appropriately (1 challenge -> 1 domain) --- lemur/plugins/lemur_acme/plugin.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 4360e5b5..7519c4c7 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -53,9 +53,11 @@ class AcmeHandler(object): current_app.logger.error(f"Unable to fetch DNS Providers: {e}") self.all_dns_providers = [] - def find_dns_challenge(self, authorizations): + def find_dns_challenge(self, host, authorizations): dns_challenges = [] for authz in authorizations: + if not authz.body.identifier.value == host: + continue for combo in authz.body.challenges: if isinstance(combo.chall, challenges.DNS01): dns_challenges.append(combo) @@ -72,13 +74,13 @@ class AcmeHandler(object): def start_dns_challenge(self, acme_client, account_number, host, dns_provider, order, dns_provider_options): current_app.logger.debug("Starting DNS challenge for {0}".format(host)) - dns_challenges = self.find_dns_challenge(order.authorizations) change_ids = [] host_to_validate = self.maybe_remove_wildcard(host) host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) + dns_challenges = self.find_dns_challenge(host_to_validate, order.authorizations) - for dns_challenge in self.find_dns_challenge(order.authorizations): + for dns_challenge in dns_challenges: change_id = dns_provider.create_txt_record( dns_challenge.validation_domain_name(host_to_validate), dns_challenge.validation(acme_client.client.net.key), @@ -140,7 +142,7 @@ class AcmeHandler(object): deadline = datetime.datetime.now() + datetime.timedelta(seconds=90) try: - orderr = acme_client.finalize_order(order, deadline) + orderr = acme_client.poll_and_finalize(order, deadline) except AcmeError: sentry.captureException(extra={"order_url": order.uri}) metrics.send('request_certificate_error', 'counter', 1) From dcdfb3288350809b4290d8e7d2ceea67c4ec0934 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 25 Apr 2019 19:14:15 -0700 Subject: [PATCH 189/357] Expose verisign exceptions --- lemur/plugins/lemur_verisign/plugin.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_verisign/plugin.py b/lemur/plugins/lemur_verisign/plugin.py index 3f16f997..e5207def 100644 --- a/lemur/plugins/lemur_verisign/plugin.py +++ b/lemur/plugins/lemur_verisign/plugin.py @@ -14,7 +14,7 @@ from cryptography import x509 from flask import current_app from lemur.common.utils import get_psuedo_random_string -from lemur.extensions import metrics +from lemur.extensions import metrics, sentry from lemur.plugins import lemur_verisign as verisign from lemur.plugins.bases import IssuerPlugin, SourcePlugin @@ -201,7 +201,13 @@ class VerisignIssuerPlugin(IssuerPlugin): current_app.logger.info("Requesting a new verisign certificate: {0}".format(data)) response = self.session.post(url, data=data) - cert = handle_response(response.content)['Response']['Certificate'] + try: + cert = handle_response(response.content)['Response']['Certificate'] + except KeyError: + metrics.send('verisign_create_certificate_error', 'counter', 1, + metric_tags={"common_name": issuer_options.get("common_name", "")}) + sentry.captureException(extra={"common_name": issuer_options.get("common_name", "")}) + raise Exception(f"Error with Verisign: {response.content}") # TODO add external id return cert, current_app.config.get('VERISIGN_INTERMEDIATE'), None From 1e64851d791f9c0f03625f958496f5d2c87d17de Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Fri, 26 Apr 2019 10:16:18 -0700 Subject: [PATCH 190/357] Strip out self-polling logic and rely on ACME; Enhance ELB logging and retries --- lemur/plugins/lemur_acme/dyn.py | 17 ++-- lemur/plugins/lemur_acme/plugin.py | 44 ++++------- lemur/plugins/lemur_aws/elb.py | 121 +++++++++++++++++++---------- 3 files changed, 104 insertions(+), 78 deletions(-) diff --git a/lemur/plugins/lemur_acme/dyn.py b/lemur/plugins/lemur_acme/dyn.py index 4eb01958..4159532c 100644 --- a/lemur/plugins/lemur_acme/dyn.py +++ b/lemur/plugins/lemur_acme/dyn.py @@ -64,11 +64,10 @@ def wait_for_dns_change(change_id, account_number=None): metrics.send('wait_for_dns_change_fail', 'counter', 1) sentry.captureException( extra={ - "fqdn": fqdn, "txt_record": token} + "fqdn": str(fqdn), "txt_record": str(token)} ) metrics.send('wait_for_dns_change_error', 'counter', 1, metric_tags={'fqdn': fqdn, 'txt_record': token}) - raise Exception("Unable to query DNS token for fqdn {}.".format(fqdn)) return @@ -155,8 +154,8 @@ def delete_txt_record(change_id, account_number, domain, token): except DynectDeleteError: sentry.captureException( extra={ - "fqdn": fqdn, "zone_name": zone_name, "node_name": node_name, - "txt_record": txt_record.txtdata} + "fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name), + "txt_record": str(txt_record.txtdata)} ) metrics.send('delete_txt_record_deleteerror', 'counter', 1, metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) @@ -166,11 +165,11 @@ def delete_txt_record(change_id, account_number, domain, token): except DynectUpdateError: sentry.captureException( extra={ - "fqdn": fqdn, "zone_name": zone_name, "node_name": node_name, - "txt_record": txt_record.txtdata} + "fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name), + "txt_record": str(txt_record.txtdata)} ) metrics.send('delete_txt_record_publish_error', 'counter', 1, - metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) + metric_tags={'fqdn': str(fqdn), 'txt_record': str(txt_record.txtdata)}) def delete_acme_txt_records(domain): @@ -201,8 +200,8 @@ def delete_acme_txt_records(domain): except DynectDeleteError: sentry.captureException( extra={ - "fqdn": fqdn, "zone_name": zone_name, "node_name": node_name, - "txt_record": txt_record.txtdata} + "fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name), + "txt_record": str(txt_record.txtdata)} ) metrics.send('delete_txt_record_deleteerror', 'counter', 1, metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 7519c4c7..3350682c 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -102,49 +102,33 @@ class AcmeHandler(object): metrics.send('complete_dns_challenge_error_no_dnsproviders', 'counter', 1) raise Exception("No DNS providers found for domain: {}".format(authz_record.host)) - for dns_provider in dns_providers: - # Grab account number (For Route53) - dns_provider_options = json.loads(dns_provider.credentials) - account_number = dns_provider_options.get("account_id") - dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) - for change_id in authz_record.change_id: - try: - dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number) - except Exception: - metrics.send('complete_dns_challenge_error', 'counter', 1) - sentry.captureException() - current_app.logger.debug( - f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: " - f"{account_number}", exc_info=True) - raise + for dns_challenge in authz_record.dns_challenge: + response = dns_challenge.response(acme_client.client.net.key) - for dns_challenge in authz_record.dns_challenge: - response = dns_challenge.response(acme_client.client.net.key) + verified = response.simple_verify( + dns_challenge.chall, + authz_record.host, + acme_client.client.net.key.public_key() + ) - verified = response.simple_verify( - dns_challenge.chall, - authz_record.host, - acme_client.client.net.key.public_key() - ) + if not verified: + metrics.send('complete_dns_challenge_verification_error', 'counter', 1) + raise ValueError("Failed verification") - if not verified: - metrics.send('complete_dns_challenge_verification_error', 'counter', 1) - raise ValueError("Failed verification") - - time.sleep(5) - acme_client.answer_challenge(dns_challenge, response) + time.sleep(5) + acme_client.answer_challenge(dns_challenge, response) def request_certificate(self, acme_client, authorizations, order): for authorization in authorizations: for authz in authorization.authz: authorization_resource, _ = acme_client.poll(authz) - deadline = datetime.datetime.now() + datetime.timedelta(seconds=90) + deadline = datetime.datetime.now() + datetime.timedelta(seconds=360) try: orderr = acme_client.poll_and_finalize(order, deadline) except AcmeError: - sentry.captureException(extra={"order_url": order.uri}) + sentry.captureException(extra={"order_url": str(order.uri)}) metrics.send('request_certificate_error', 'counter', 1) current_app.logger.error(f"Unable to resolve Acme order: {order.uri}", exc_info=True) raise diff --git a/lemur/plugins/lemur_aws/elb.py b/lemur/plugins/lemur_aws/elb.py index 43d99ff2..77e99d18 100644 --- a/lemur/plugins/lemur_aws/elb.py +++ b/lemur/plugins/lemur_aws/elb.py @@ -21,14 +21,22 @@ def retry_throttled(exception): :param exception: :return: """ + + # Log details about the exception + try: + raise exception + except Exception as e: + current_app.logger.error("ELB retry_throttled triggered", exc_info=True) + metrics.send('elb_retry', 'counter', 1, + metric_tags={"exception": e}) + sentry.captureException() + if isinstance(exception, botocore.exceptions.ClientError): if exception.response['Error']['Code'] == 'LoadBalancerNotFound': return False if exception.response['Error']['Code'] == 'CertificateNotFound': return False - - metrics.send('elb_retry', 'counter', 1) return True @@ -63,16 +71,20 @@ def get_all_elbs(**kwargs): :return: """ elbs = [] + try: + while True: + response = get_elbs(**kwargs) - while True: - response = get_elbs(**kwargs) + elbs += response['LoadBalancerDescriptions'] - elbs += response['LoadBalancerDescriptions'] - - if not response.get('NextMarker'): - return elbs - else: - kwargs.update(dict(Marker=response['NextMarker'])) + if not response.get('NextMarker'): + return elbs + else: + kwargs.update(dict(Marker=response['NextMarker'])) + except Exception as e: # noqa + metrics.send('get_all_elbs_error', 'counter', 1) + sentry.captureException() + raise def get_all_elbs_v2(**kwargs): @@ -84,18 +96,23 @@ def get_all_elbs_v2(**kwargs): """ elbs = [] - while True: - response = get_elbs_v2(**kwargs) - elbs += response['LoadBalancers'] + try: + while True: + response = get_elbs_v2(**kwargs) + elbs += response['LoadBalancers'] - if not response.get('NextMarker'): - return elbs - else: - kwargs.update(dict(Marker=response['NextMarker'])) + if not response.get('NextMarker'): + return elbs + else: + kwargs.update(dict(Marker=response['NextMarker'])) + except Exception as e: # noqa + metrics.send('get_all_elbs_v2_error', 'counter', 1) + sentry.captureException() + raise @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs): """ Get a listener ARN from an endpoint. @@ -103,27 +120,40 @@ def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs): :param endpoint_port: :return: """ - client = kwargs.pop('client') - elbs = client.describe_load_balancers(Names=[endpoint_name]) - for elb in elbs['LoadBalancers']: - listeners = client.describe_listeners(LoadBalancerArn=elb['LoadBalancerArn']) - for listener in listeners['Listeners']: - if listener['Port'] == endpoint_port: - return listener['ListenerArn'] + try: + client = kwargs.pop('client') + elbs = client.describe_load_balancers(Names=[endpoint_name]) + for elb in elbs['LoadBalancers']: + listeners = client.describe_listeners(LoadBalancerArn=elb['LoadBalancerArn']) + for listener in listeners['Listeners']: + if listener['Port'] == endpoint_port: + return listener['ListenerArn'] + except Exception as e: # noqa + metrics.send('get_listener_arn_from_endpoint_error', 'counter', 1, + metric_tags={"error": e, "endpoint_name": endpoint_name, "endpoint_port": endpoint_port}) + sentry.captureException(extra={"endpoint_name": str(endpoint_name), + "endpoint_port": str(endpoint_port)}) + raise @sts_client('elb') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def get_elbs(**kwargs): """ Fetches one page elb objects for a given account and region. """ - client = kwargs.pop('client') - return client.describe_load_balancers(**kwargs) + try: + client = kwargs.pop('client') + return client.describe_load_balancers(**kwargs) + except Exception as e: # noqa + metrics.send('get_elbs_error', 'counter', 1, + metric_tags={"error": e}) + sentry.captureException() + raise @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def get_elbs_v2(**kwargs): """ Fetches one page of elb objects for a given account and region. @@ -131,12 +161,18 @@ def get_elbs_v2(**kwargs): :param kwargs: :return: """ - client = kwargs.pop('client') - return client.describe_load_balancers(**kwargs) + try: + client = kwargs.pop('client') + return client.describe_load_balancers(**kwargs) + except Exception as e: # noqa + metrics.send('get_elbs_v2_error', 'counter', 1, + metric_tags={"error": e}) + sentry.captureException() + raise @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_listeners_v2(**kwargs): """ Fetches one page of listener objects for a given elb arn. @@ -144,8 +180,14 @@ def describe_listeners_v2(**kwargs): :param kwargs: :return: """ - client = kwargs.pop('client') - return client.describe_listeners(**kwargs) + try: + client = kwargs.pop('client') + return client.describe_listeners(**kwargs) + except Exception as e: # noqa + metrics.send('describe_listeners_v2_error', 'counter', 1, + metric_tags={"error": e}) + sentry.captureException() + raise @sts_client('elb') @@ -157,11 +199,12 @@ def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): :param load_balancer_name: :return: """ + try: return kwargs['client'].describe_load_balancer_policies(LoadBalancerName=load_balancer_name, PolicyNames=policy_names) except Exception as e: # noqa - metrics.send('describe_load_balancer_policies_fail', 'counter', 1, + metrics.send('describe_load_balancer_policies_error', 'counter', 1, metric_tags={"load_balancer_name": load_balancer_name, "policy_names": policy_names, "error": e}) sentry.captureException(extra={"load_balancer_name": load_balancer_name, "policy_names": policy_names}) raise @@ -179,14 +222,14 @@ def describe_ssl_policies_v2(policy_names, **kwargs): try: return kwargs['client'].describe_ssl_policies(Names=policy_names) except Exception as e: # noqa - metrics.send('describe_ssl_policies_v2_fail', 'counter', 1, + metrics.send('describe_ssl_policies_v2_error', 'counter', 1, metric_tags={"policy_names": policy_names, "error": e}) sentry.captureException(extra={"policy_names": policy_names}) raise @sts_client('elb') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_load_balancer_types(policies, **kwargs): """ Describe the policies with policy details. @@ -198,7 +241,7 @@ def describe_load_balancer_types(policies, **kwargs): @sts_client('elb') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def attach_certificate(name, port, certificate_id, **kwargs): """ Attaches a certificate to a listener, throws exception @@ -218,7 +261,7 @@ def attach_certificate(name, port, certificate_id, **kwargs): @sts_client('elbv2') -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def attach_certificate_v2(listener_arn, port, certificates, **kwargs): """ Attaches a certificate to a listener, throws exception From 1a3ba46873c8437d11829a76f2451feaa0235397 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Fri, 26 Apr 2019 10:18:54 -0700 Subject: [PATCH 191/357] More retry changes --- lemur/plugins/lemur_aws/elb.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_aws/elb.py b/lemur/plugins/lemur_aws/elb.py index 77e99d18..618f75e8 100644 --- a/lemur/plugins/lemur_aws/elb.py +++ b/lemur/plugins/lemur_aws/elb.py @@ -206,7 +206,8 @@ def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): except Exception as e: # noqa metrics.send('describe_load_balancer_policies_error', 'counter', 1, metric_tags={"load_balancer_name": load_balancer_name, "policy_names": policy_names, "error": e}) - sentry.captureException(extra={"load_balancer_name": load_balancer_name, "policy_names": policy_names}) + sentry.captureException(extra={"load_balancer_name": str(load_balancer_name), + "policy_names": str(policy_names)}) raise @@ -224,7 +225,7 @@ def describe_ssl_policies_v2(policy_names, **kwargs): except Exception as e: # noqa metrics.send('describe_ssl_policies_v2_error', 'counter', 1, metric_tags={"policy_names": policy_names, "error": e}) - sentry.captureException(extra={"policy_names": policy_names}) + sentry.captureException(extra={"policy_names": str(policy_names)}) raise From 333ba8030a37ec11e1036fc456c1efe78ea7f69d Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Fri, 26 Apr 2019 15:45:04 -0700 Subject: [PATCH 192/357] Ensure hostname is lowercase when comparing DNS challenges. ACME will automatically lowercase the hostname --- lemur/plugins/lemur_acme/plugin.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 3350682c..8380c966 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -19,7 +19,7 @@ import OpenSSL.crypto import josepy as jose from acme import challenges, messages from acme.client import BackwardsCompatibleClientV2, ClientNetwork -from acme.errors import PollError, WildcardUnsupportedError +from acme.errors import PollError, TimeoutError, WildcardUnsupportedError from acme.messages import Error as AcmeError from botocore.exceptions import ClientError from flask import current_app @@ -56,7 +56,7 @@ class AcmeHandler(object): def find_dns_challenge(self, host, authorizations): dns_challenges = [] for authz in authorizations: - if not authz.body.identifier.value == host: + if not authz.body.identifier.value.lower() == host.lower(): continue for combo in authz.body.challenges: if isinstance(combo.chall, challenges.DNS01): @@ -79,6 +79,10 @@ class AcmeHandler(object): host_to_validate = self.maybe_remove_wildcard(host) host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) dns_challenges = self.find_dns_challenge(host_to_validate, order.authorizations) + if not dns_challenges: + sentry.captureException() + metrics.send('start_dns_challenge_error_no_dns_challenges', 'counter', 1) + raise Exception("Unable to determine DNS challenges from authorizations") for dns_challenge in dns_challenges: change_id = dns_provider.create_txt_record( @@ -127,7 +131,7 @@ class AcmeHandler(object): try: orderr = acme_client.poll_and_finalize(order, deadline) - except AcmeError: + except (AcmeError, TimeoutError): sentry.captureException(extra={"order_url": str(order.uri)}) metrics.send('request_certificate_error', 'counter', 1) current_app.logger.error(f"Unable to resolve Acme order: {order.uri}", exc_info=True) From 1a90e71884078ad1f8ec16f856542308dbdd5cd9 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Fri, 26 Apr 2019 17:27:44 -0700 Subject: [PATCH 193/357] Move ACME host validation logic prior to R53 host modification --- lemur/plugins/lemur_acme/plugin.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 8380c966..02f08f9a 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -77,8 +77,9 @@ class AcmeHandler(object): change_ids = [] host_to_validate = self.maybe_remove_wildcard(host) - host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) dns_challenges = self.find_dns_challenge(host_to_validate, order.authorizations) + host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) + if not dns_challenges: sentry.captureException() metrics.send('start_dns_challenge_error_no_dns_challenges', 'counter', 1) From 6e3f394cff0d89356542ddaa62f56c424ba67d89 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 29 Apr 2019 13:55:26 -0700 Subject: [PATCH 194/357] Updated requirements ; Revert change and require DNS validation by provider --- lemur/plugins/lemur_acme/plugin.py | 42 ++++++++++++++++++++++++------ requirements-dev.txt | 6 ++--- requirements-docs.txt | 14 +++++----- requirements-tests.txt | 10 +++---- requirements.txt | 14 +++++----- 5 files changed, 56 insertions(+), 30 deletions(-) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 02f08f9a..b6a5dbbf 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -107,21 +107,45 @@ class AcmeHandler(object): metrics.send('complete_dns_challenge_error_no_dnsproviders', 'counter', 1) raise Exception("No DNS providers found for domain: {}".format(authz_record.host)) - for dns_challenge in authz_record.dns_challenge: - response = dns_challenge.response(acme_client.client.net.key) + for dns_provider in dns_providers: + # Grab account number (For Route53) + dns_provider_options = json.loads(dns_provider.credentials) + account_number = dns_provider_options.get("account_id") + dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) + for change_id in authz_record.change_id: + try: + dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number) + except Exception: + metrics.send('complete_dns_challenge_error', 'counter', 1) + sentry.captureException() + current_app.logger.debug( + f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: " + f"{account_number}", exc_info=True) + raise - verified = response.simple_verify( - dns_challenge.chall, - authz_record.host, - acme_client.client.net.key.public_key() - ) + for dns_challenge in authz_record.dns_challenge: + response = dns_challenge.response(acme_client.client.net.key) + + verified = response.simple_verify( + dns_challenge.chall, + authz_record.host, + acme_client.client.net.key.public_key() + ) if not verified: metrics.send('complete_dns_challenge_verification_error', 'counter', 1) raise ValueError("Failed verification") time.sleep(5) - acme_client.answer_challenge(dns_challenge, response) + res = acme_client.answer_challenge(dns_challenge, response) + current_app.logger.debug(f"answer_challenge response: {res}") + + def get_dns_challenge(self, authzr): + for challenge in authzr.body.challenges: + if challenge.chall.typ == 'dns-01': + return challenge + else: + raise Exception("Could not find an HTTP challenge!") def request_certificate(self, acme_client, authorizations, order): for authorization in authorizations: @@ -132,6 +156,7 @@ class AcmeHandler(object): try: orderr = acme_client.poll_and_finalize(order, deadline) + except (AcmeError, TimeoutError): sentry.captureException(extra={"order_url": str(order.uri)}) metrics.send('request_certificate_error', 'counter', 1) @@ -480,6 +505,7 @@ class ACMEIssuerPlugin(IssuerPlugin): "pending_cert": entry["pending_cert"], }) except (PollError, AcmeError, Exception) as e: + raise sentry.captureException() metrics.send('get_ordered_certificates_resolution_error', 'counter', 1) order_url = order.uri diff --git a/requirements-dev.txt b/requirements-dev.txt index f9f1b8f3..0652df34 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -11,7 +11,7 @@ cfgv==1.6.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.4.1 # via pre-commit +identify==1.4.2 # via pre-commit idna==2.8 # via requests importlib-metadata==0.9 # via pre-commit invoke==1.2.0 @@ -31,6 +31,6 @@ toml==0.10.0 # via pre-commit tqdm==4.31.1 # via twine twine==1.13.0 urllib3==1.24.2 # via requests -virtualenv==16.4.3 # via pre-commit +virtualenv==16.5.0 # via pre-commit webencodings==0.5.1 # via bleach -zipp==0.3.3 # via importlib-metadata +zipp==0.4.0 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 5f69328d..4b75a502 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -7,7 +7,7 @@ acme==0.33.1 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.0.9 +alembic==1.0.10 amqp==2.4.2 aniso8601==6.0.0 arrow==0.13.1 @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.134 -botocore==1.12.134 +boto3==1.9.138 +botocore==1.12.138 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -38,7 +38,7 @@ flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 -flask-sqlalchemy==2.3.2 +flask-sqlalchemy==2.4.0 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 @@ -47,7 +47,7 @@ idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 -javaobj-py3==0.2.4 +javaobj-py3==0.3.0 jinja2==2.10.1 jmespath==0.9.4 josepy==1.1.0 @@ -62,10 +62,10 @@ mock==2.0.0 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 -pbr==5.1.3 +pbr==5.2.0 pem==19.1.0 psycopg2==2.8.2 -pyasn1-modules==0.2.4 +pyasn1-modules==0.2.5 pyasn1==0.4.5 pycparser==2.19 pycryptodomex==3.8.1 diff --git a/requirements-tests.txt b/requirements-tests.txt index 9dd01574..0a4660d0 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -7,11 +7,11 @@ asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest -aws-sam-translator==1.10.0 # via cfn-lint +aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto -boto3==1.9.134 # via aws-sam-translator, moto +boto3==1.9.138 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.134 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.138 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography cfn-lint==0.19.1 # via moto @@ -42,7 +42,7 @@ mock==2.0.0 # via moto more-itertools==7.0.0 # via pytest moto==1.3.8 nose==1.3.7 -pbr==5.1.3 # via mock +pbr==5.2.0 # via mock pluggy==0.9.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa @@ -55,7 +55,7 @@ python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==3.0.1 # via moto pytz==2019.1 # via moto pyyaml==5.1 -requests-mock==1.5.2 +requests-mock==1.6.0 requests==2.21.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose diff --git a/requirements.txt b/requirements.txt index 2d17b930..74290471 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,7 +6,7 @@ # acme==0.33.1 alembic-autogenerate-enums==0.0.2 -alembic==1.0.9 # via flask-migrate +alembic==1.0.10 # via flask-migrate amqp==2.4.2 # via kombu aniso8601==6.0.0 # via flask-restful arrow==0.13.1 @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.134 -botocore==1.12.134 +boto3==1.9.138 +botocore==1.12.138 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -36,7 +36,7 @@ flask-migrate==2.4.0 flask-principal==0.4.0 flask-restful==0.3.7 flask-script==2.0.6 -flask-sqlalchemy==2.3.2 +flask-sqlalchemy==2.4.0 flask==1.0.2 future==0.17.1 gunicorn==19.9.0 @@ -44,7 +44,7 @@ hvac==0.8.2 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask -javaobj-py3==0.2.4 # via pyjks +javaobj-py3==0.3.0 # via pyjks jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme @@ -58,10 +58,10 @@ marshmallow==2.19.2 mock==2.0.0 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 -pbr==5.1.3 # via mock +pbr==5.2.0 # via mock pem==19.1.0 psycopg2==2.8.2 -pyasn1-modules==0.2.4 # via pyjks, python-ldap +pyasn1-modules==0.2.5 # via pyjks, python-ldap pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, pyjks, python-ldap pycparser==2.19 # via cffi pycryptodomex==3.8.1 # via pyjks From 3a1da724194b0f02612badf3f6c388793d4f1d15 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 29 Apr 2019 13:57:04 -0700 Subject: [PATCH 195/357] nt --- lemur/plugins/lemur_acme/plugin.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index b6a5dbbf..06dec882 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -140,13 +140,6 @@ class AcmeHandler(object): res = acme_client.answer_challenge(dns_challenge, response) current_app.logger.debug(f"answer_challenge response: {res}") - def get_dns_challenge(self, authzr): - for challenge in authzr.body.challenges: - if challenge.chall.typ == 'dns-01': - return challenge - else: - raise Exception("Could not find an HTTP challenge!") - def request_certificate(self, acme_client, authorizations, order): for authorization in authorizations: for authz in authorization.authz: @@ -505,7 +498,6 @@ class ACMEIssuerPlugin(IssuerPlugin): "pending_cert": entry["pending_cert"], }) except (PollError, AcmeError, Exception) as e: - raise sentry.captureException() metrics.send('get_ordered_certificates_resolution_error', 'counter', 1) order_url = order.uri From 8ed6187697e13c30e6d9e76682dc88ff97b2a8ad Mon Sep 17 00:00:00 2001 From: Garfield Carneiro Date: Fri, 3 May 2019 13:49:02 +0530 Subject: [PATCH 196/357] Package name has changed python-software-properties was renamed to software-properties-common https://askubuntu.com/questions/422975/e-package-python-software-properties-has-no-installation-candidate --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 46efd50a..b9d7335e 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM python:3.5 RUN apt-get update -RUN apt-get install -y make python-software-properties curl +RUN apt-get install -y make software-properties-common curl RUN curl -sL https://deb.nodesource.com/setup_7.x | bash - RUN apt-get update RUN apt-get install -y nodejs libldap2-dev libsasl2-dev libldap2-dev libssl-dev From 2063baefc911ccff0c9d3a09b671eadcda45744d Mon Sep 17 00:00:00 2001 From: Jose Plana Date: Wed, 1 May 2019 00:47:56 +0200 Subject: [PATCH 197/357] Fixes userinfo using Bearer token --- lemur/auth/views.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lemur/auth/views.py b/lemur/auth/views.py index 7a1bb34c..6dad88d2 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -113,7 +113,10 @@ def retrieve_user(user_api_url, access_token): user_params = dict(access_token=access_token, schema='profile') # retrieve information about the current user. - r = requests.get(user_api_url, params=user_params) + r = requests.get( + user_api_url, + params=user_params, + headers={'Authorization': 'Bearer {}'.format(access_token)}) profile = r.json() user = user_service.get_by_email(profile['email']) From 6c99e76c9abd18df15b30a0bf6ec630842cd64c6 Mon Sep 17 00:00:00 2001 From: Jose Plana Date: Wed, 1 May 2019 01:03:25 +0200 Subject: [PATCH 198/357] Better error management in jwks token validation --- lemur/auth/views.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lemur/auth/views.py b/lemur/auth/views.py index 7a1bb34c..3a2a8c68 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -331,8 +331,9 @@ class Ping(Resource): ) jwks_url = current_app.config.get('PING_JWKS_URL') - validate_id_token(id_token, args['clientId'], jwks_url) - + result = validate_id_token(id_token, args['clientId'], jwks_url) + if result: + return result user, profile = retrieve_user(user_api_url, access_token) roles = create_user_roles(profile) update_user(user, profile, roles) @@ -380,7 +381,9 @@ class OAuth2(Resource): ) jwks_url = current_app.config.get('PING_JWKS_URL') - validate_id_token(id_token, args['clientId'], jwks_url) + result = validate_id_token(id_token, args['clientId'], jwks_url) + if result: + return result user, profile = retrieve_user(user_api_url, access_token) roles = create_user_roles(profile) From deed1b968541b03552278290eaeda50bf42a8112 Mon Sep 17 00:00:00 2001 From: Jose Plana Date: Wed, 1 May 2019 01:15:52 +0200 Subject: [PATCH 199/357] Don't fail if googleGroups is not found in user profile --- lemur/auth/views.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/lemur/auth/views.py b/lemur/auth/views.py index 7a1bb34c..a18d37fe 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -129,13 +129,16 @@ def create_user_roles(profile): roles = [] # update their google 'roles' - for group in profile['googleGroups']: - role = role_service.get_by_name(group) - if not role: - role = role_service.create(group, description='This is a google group based role created by Lemur', third_party=True) - if not role.third_party: - role = role_service.set_third_party(role.id, third_party_status=True) - roles.append(role) + if 'googleGroups' in profile: + for group in profile['googleGroups']: + role = role_service.get_by_name(group) + if not role: + role = role_service.create(group, description='This is a google group based role created by Lemur', third_party=True) + if not role.third_party: + role = role_service.set_third_party(role.id, third_party_status=True) + roles.append(role) + else: + current_app.logger.warning("'googleGroups' not sent by identity provider, no specific roles will assigned to the user.") role = role_service.get_by_name(profile['email']) From 47595e20737420b3311c0ad3ce49d9f3a67f8555 Mon Sep 17 00:00:00 2001 From: Jose Plana Date: Wed, 1 May 2019 01:21:54 +0200 Subject: [PATCH 200/357] Enable gulp server to proxy backend --- gulp/server.js | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/gulp/server.js b/gulp/server.js index 777100f6..6c61273e 100644 --- a/gulp/server.js +++ b/gulp/server.js @@ -6,31 +6,31 @@ var browserSync = require('browser-sync'); var httpProxy = require('http-proxy'); /* This configuration allow you to configure browser sync to proxy your backend */ -/* - var proxyTarget = 'http://localhost/context/'; // The location of your backend - var proxyApiPrefix = 'api'; // The element in the URL which differentiate between API request and static file request + + var proxyTarget = 'http://localhost:8000/'; // The location of your backend + var proxyApiPrefix = '/api/'; // The element in the URL which differentiate between API request and static file request var proxy = httpProxy.createProxyServer({ - target: proxyTarget + target: proxyTarget }); function proxyMiddleware(req, res, next) { - if (req.url.indexOf(proxyApiPrefix) !== -1) { - proxy.web(req, res); - } else { - next(); + if (req.url.indexOf(proxyApiPrefix) !== -1) { + proxy.web(req, res); + } else { + next(); + } } - } - */ function browserSyncInit(baseDir, files, browser) { browser = browser === undefined ? 'default' : browser; browserSync.instance = browserSync.init(files, { startPath: '/index.html', - server: { - baseDir: baseDir, - routes: { - '/bower_components': './bower_components' - } + server: { + middleware: [proxyMiddleware], + baseDir: baseDir, + routes: { + '/bower_components': './bower_components' + } }, browser: browser, ghostMode: false From 6d5552afd38a82ecf0345730ccb770b48803106f Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 6 May 2019 16:31:50 -0700 Subject: [PATCH 201/357] updating requirements --- requirements-dev.txt | 4 ++-- requirements-docs.txt | 13 ++++++------- requirements-tests.txt | 9 ++++----- requirements.txt | 13 ++++++------- 4 files changed, 18 insertions(+), 21 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 0652df34..29509d99 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -18,7 +18,7 @@ invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.15.2 +pre-commit==1.16.0 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.3.1 # via readme-renderer @@ -30,7 +30,7 @@ six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit tqdm==4.31.1 # via twine twine==1.13.0 -urllib3==1.24.2 # via requests +urllib3==1.24.3 # via requests virtualenv==16.5.0 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.4.0 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 4b75a502..fef37c08 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index # -acme==0.33.1 +acme==0.34.1 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.10 @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.138 -botocore==1.12.138 +boto3==1.9.143 +botocore==1.12.143 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -56,13 +56,12 @@ kombu==4.5.0 lockfile==0.12.2 mako==1.0.9 markupsafe==1.1.1 -marshmallow-sqlalchemy==0.16.2 +marshmallow-sqlalchemy==0.16.3 marshmallow==2.19.2 -mock==2.0.0 +mock==3.0.4 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 -pbr==5.2.0 pem==19.1.0 psycopg2==2.8.2 pyasn1-modules==0.2.5 @@ -101,7 +100,7 @@ sqlalchemy-utils==0.33.11 sqlalchemy==1.3.3 tabulate==0.8.3 twofish==0.3.0 -urllib3==1.24.2 +urllib3==1.24.3 vine==1.3.0 werkzeug==0.15.2 xmltodict==0.12.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 0a4660d0..5d28412c 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -9,9 +9,9 @@ atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto -boto3==1.9.138 # via aws-sam-translator, moto +boto3==1.9.143 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.138 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.143 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography cfn-lint==0.19.1 # via moto @@ -38,11 +38,10 @@ jsonpickle==1.1 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch jsonschema==2.6.0 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 -mock==2.0.0 # via moto +mock==3.0.4 # via moto more-itertools==7.0.0 # via pytest moto==1.3.8 nose==1.3.7 -pbr==5.2.0 # via mock pluggy==0.9.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa @@ -62,7 +61,7 @@ rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 six==1.12.0 # via aws-sam-translator, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker -urllib3==1.24.2 # via botocore, requests +urllib3==1.24.3 # via botocore, requests websocket-client==0.56.0 # via docker werkzeug==0.15.2 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk diff --git a/requirements.txt b/requirements.txt index 74290471..fe27838b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file requirements.txt requirements.in -U --no-index # -acme==0.33.1 +acme==0.34.1 alembic-autogenerate-enums==0.0.2 alembic==1.0.10 # via flask-migrate amqp==2.4.2 # via kombu @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.138 -botocore==1.12.138 +boto3==1.9.143 +botocore==1.12.143 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -53,12 +53,11 @@ kombu==4.5.0 lockfile==0.12.2 mako==1.0.9 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.16.2 +marshmallow-sqlalchemy==0.16.3 marshmallow==2.19.2 -mock==2.0.0 # via acme +mock==3.0.4 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 -pbr==5.2.0 # via mock pem==19.1.0 psycopg2==2.8.2 pyasn1-modules==0.2.5 # via pyjks, python-ldap @@ -86,7 +85,7 @@ sqlalchemy-utils==0.33.11 sqlalchemy==1.3.3 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 twofish==0.3.0 # via pyjks -urllib3==1.24.2 # via botocore, requests +urllib3==1.24.3 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.15.2 # via flask xmltodict==0.12.0 From a7af3cf8d279c00b74eac1b094653eab95a2a6e7 Mon Sep 17 00:00:00 2001 From: Daniel Iancu Date: Tue, 7 May 2019 02:39:49 +0300 Subject: [PATCH 202/357] Fix Cloudflare DNS --- lemur/plugins/lemur_acme/cloudflare.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/lemur/plugins/lemur_acme/cloudflare.py b/lemur/plugins/lemur_acme/cloudflare.py index 77052242..a6308025 100644 --- a/lemur/plugins/lemur_acme/cloudflare.py +++ b/lemur/plugins/lemur_acme/cloudflare.py @@ -66,11 +66,12 @@ def create_txt_record(host, value, account_number): return zone_id, r['id'] -def delete_txt_record(change_id, account_number, host, value): +def delete_txt_record(change_ids, account_number, host, value): cf = cf_api_call() - zone_id, record_id = change_id - current_app.logger.debug("Removing record with id {0}".format(record_id)) - try: - cf.zones.dns_records.delete(zone_id, record_id) - except Exception as e: - current_app.logger.error('/zones.dns_records.post: %s' % e) + for change_id in change_ids: + zone_id, record_id = change_id + current_app.logger.debug("Removing record with id {0}".format(record_id)) + try: + cf.zones.dns_records.delete(zone_id, record_id) + except Exception as e: + current_app.logger.error('/zones.dns_records.post: %s' % e) From fb3f0bd72a8f868284f7efb5805a2d132c4706ad Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 7 May 2019 09:37:30 -0400 Subject: [PATCH 203/357] adding Vault Source plugin --- lemur/plugins/lemur_vault_dest/plugin.py | 133 +++++++++++++++++++++-- setup.py | 1 + 2 files changed, 127 insertions(+), 7 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 819ba22b..0ed035d0 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -17,11 +17,124 @@ from flask import current_app from lemur.common.defaults import common_name from lemur.common.utils import parse_certificate from lemur.plugins.bases import DestinationPlugin +from lemur.plugins.bases import SourcePlugin from cryptography import x509 from cryptography.hazmat.backends import default_backend +class VaultSourcePlugin(SourcePlugin): + """ Class for importing certificates from Hashicorp Vault""" + title = 'Vault' + slug = 'vault-source' + description = 'Discovers all certificates in a given path' + + author = 'Christopher Jolley' + author_url = 'https://github.com/alwaysjolley/lemur' + + options = [ + { + 'name': 'vaultUrl', + 'type': 'str', + 'required': True, + 'validation': '^https?://[a-zA-Z0-9.:-]+$', + 'helpMessage': 'Valid URL to Hashi Vault instance' + }, + { + 'name': 'vaultKvApiVersion', + 'type': 'select', + 'value': '2', + 'available': [ + '1', + '2' + ], + 'required': True, + 'helpMessage': 'Version of the Vault KV API to use' + }, + { + 'name': 'vaultAuthTokenFile', + 'type': 'str', + 'required': True, + 'validation': '(/[^/]+)+', + 'helpMessage': 'Must be a valid file path!' + }, + { + 'name': 'vaultMount', + 'type': 'str', + 'required': True, + 'validation': r'^\S+$', + 'helpMessage': 'Must be a valid Vault secrets mount name!' + }, + { + 'name': 'vaultPath', + 'type': 'str', + 'required': True, + 'validation': '^([a-zA-Z0-9_-]+/?)+$', + 'helpMessage': 'Must be a valid Vault secrets path' + }, + { + 'name': 'objectName', + 'type': 'str', + 'required': True, + 'validation': '[0-9a-zA-Z:_-]+', + 'helpMessage': 'Object Name to search' + }, + ] + + + def get_certificates(self, options, **kwargs): + """Pull certificates from objects in Hashicorp Vault""" + data = [] + cert = [] + body = '' + url = self.get_option('vaultUrl', options) + token_file = self.get_option('vaultAuthTokenFile', options) + mount = self.get_option('vaultMount', options) + path = self.get_option('vaultPath', options) + obj_name = self.get_option('objectName', options) + api_version = self.get_option('vaultKvApiVersion', options) + cert_filter = '-----BEGIN CERTIFICATE-----' + cert_delimiter = '-----END CERTIFICATE-----' + + with open(token_file, 'r') as tfile: + token = tfile.readline().rstrip('\n') + + client = hvac.Client(url=url, token=token) + client.secrets.kv.default_kv_version = api_version + + path = '{0}/{1}'.format(path, obj_name) + + secret = get_secret(client, mount, path) + for cname in secret['data']: + #current_app.logger.info("Certificate Data: {0}".format(secret['data'][cname])) + if 'crt' in secret['data'][cname]: + cert = secret['data'][cname]['crt'].split(cert_delimiter+'\n') + elif 'pem' in secret['data'][cname]: + cert = secret['data'][cname]['pem'].split(cert_delimiter+'\n') + else: + for key in secret['data'][cname]: + if secret['data'][cname][key].startswith(cert_filter): + cert = secret['data'][cname][key].split(cert_delimiter+'\n') + break + body = cert[0]+cert_delimiter + if 'chain' in secret['data'][cname]: + chain = secret['data'][cname]['chain'] + elif len(cert) > 1: + if cert[1].startswith(cert_filter): + chain = cert[1]+cert_delimiter + else: + chain = None + else: + chain = None + data.append({'body': body, 'chain': chain, 'name': cname}) + return [dict(body=c['body'], chain=c.get('chain'), name=c['name']) for c in data] + + def get_endpoints(self, options, **kwargs): + """ Not implemented yet """ + endpoints = [] + return endpoints + + class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" title = 'Vault' @@ -61,7 +174,7 @@ class VaultDestinationPlugin(DestinationPlugin): 'name': 'vaultMount', 'type': 'str', 'required': True, - 'validation': '^\S+$', + 'validation': r'^\S+$', 'helpMessage': 'Must be a valid Vault secrets mount name!' }, { @@ -85,6 +198,7 @@ class VaultDestinationPlugin(DestinationPlugin): 'available': [ 'Nginx', 'Apache', + 'PEM', 'no chain' ], 'required': True, @@ -100,6 +214,7 @@ class VaultDestinationPlugin(DestinationPlugin): } ] + def __init__(self, *args, **kwargs): super(VaultDestinationPlugin, self).__init__(*args, **kwargs) @@ -136,8 +251,8 @@ class VaultDestinationPlugin(DestinationPlugin): "Exception compiling regex filter: invalid filter", exc_info=True) - with open(token_file, 'r') as file: - token = file.readline().rstrip('\n') + with open(token_file, 'r') as tfile: + token = tfile.readline().rstrip('\n') client = hvac.Client(url=url, token=token) client.secrets.kv.default_kv_version = api_version @@ -150,14 +265,18 @@ class VaultDestinationPlugin(DestinationPlugin): secret = get_secret(client, mount, path) secret['data'][cname] = {} - if bundle == 'Nginx' and cert_chain: + if bundle == 'Nginx': secret['data'][cname]['crt'] = '{0}\n{1}'.format(body, cert_chain) - elif bundle == 'Apache' and cert_chain: + secret['data'][cname]['key'] = private_key + elif bundle == 'Apache': secret['data'][cname]['crt'] = body secret['data'][cname]['chain'] = cert_chain + secret['data'][cname]['key'] = private_key + elif bundle == 'PEM': + secret['data'][cname]['pem'] = '{0}\n{1}\n{2}'.format(body, cert_chain, private_key) else: secret['data'][cname]['crt'] = body - secret['data'][cname]['key'] = private_key + secret['data'][cname]['key'] = private_key if isinstance(san_list, list): secret['data'][cname]['san'] = san_list try: @@ -184,7 +303,7 @@ def get_san_list(body): def get_secret(client, mount, path): - """ retreiive existing data from mount path and return dictionary """ + """ retreive existing data from mount path and return dictionary """ result = {'data': {}} try: if client.secrets.kv.default_kv_version == '1': diff --git a/setup.py b/setup.py index 6fc55420..a01c110f 100644 --- a/setup.py +++ b/setup.py @@ -155,6 +155,7 @@ setup( 'digicert_cis_source = lemur.plugins.lemur_digicert.plugin:DigiCertCISSourcePlugin', 'csr_export = lemur.plugins.lemur_csr.plugin:CSRExportPlugin', 'sftp_destination = lemur.plugins.lemur_sftp.plugin:SFTPDestinationPlugin', + 'vault_source = lemur.plugins.lemur_vault_dest.plugin:VaultSourcePlugin', 'vault_desination = lemur.plugins.lemur_vault_dest.plugin:VaultDestinationPlugin', 'adcs_issuer = lemur.plugins.lemur_adcs.plugin:ADCSIssuerPlugin', 'adcs_source = lemur.plugins.lemur_adcs.plugin:ADCSSourcePlugin' From b0c8901b0ac9cc80c0ac8927eb21e50d0e6bf0fe Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 7 May 2019 10:05:01 -0400 Subject: [PATCH 204/357] lint cleanup --- lemur/plugins/lemur_vault_dest/plugin.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 0ed035d0..803b0a0c 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -81,7 +81,6 @@ class VaultSourcePlugin(SourcePlugin): }, ] - def get_certificates(self, options, **kwargs): """Pull certificates from objects in Hashicorp Vault""" data = [] @@ -106,22 +105,21 @@ class VaultSourcePlugin(SourcePlugin): secret = get_secret(client, mount, path) for cname in secret['data']: - #current_app.logger.info("Certificate Data: {0}".format(secret['data'][cname])) if 'crt' in secret['data'][cname]: - cert = secret['data'][cname]['crt'].split(cert_delimiter+'\n') + cert = secret['data'][cname]['crt'].split(cert_delimiter + '\n') elif 'pem' in secret['data'][cname]: - cert = secret['data'][cname]['pem'].split(cert_delimiter+'\n') + cert = secret['data'][cname]['pem'].split(cert_delimiter + '\n') else: for key in secret['data'][cname]: if secret['data'][cname][key].startswith(cert_filter): - cert = secret['data'][cname][key].split(cert_delimiter+'\n') + cert = secret['data'][cname][key].split(cert_delimiter + '\n') break - body = cert[0]+cert_delimiter + body = cert[0] + cert_delimiter if 'chain' in secret['data'][cname]: chain = secret['data'][cname]['chain'] elif len(cert) > 1: if cert[1].startswith(cert_filter): - chain = cert[1]+cert_delimiter + chain = cert[1] + cert_delimiter else: chain = None else: @@ -214,7 +212,6 @@ class VaultDestinationPlugin(DestinationPlugin): } ] - def __init__(self, *args, **kwargs): super(VaultDestinationPlugin, self).__init__(*args, **kwargs) From 4e6e7edf271f0ef5bf4bb22d601f5b59c1d1554d Mon Sep 17 00:00:00 2001 From: Jose Plana Date: Tue, 7 May 2019 22:53:01 +0200 Subject: [PATCH 205/357] Rename return variable for better readability --- lemur/auth/views.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lemur/auth/views.py b/lemur/auth/views.py index 1ca27bf4..87f460f3 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -337,9 +337,9 @@ class Ping(Resource): ) jwks_url = current_app.config.get('PING_JWKS_URL') - result = validate_id_token(id_token, args['clientId'], jwks_url) - if result: - return result + error_code = validate_id_token(id_token, args['clientId'], jwks_url) + if error_code: + return error_code user, profile = retrieve_user(user_api_url, access_token) roles = create_user_roles(profile) update_user(user, profile, roles) @@ -387,9 +387,9 @@ class OAuth2(Resource): ) jwks_url = current_app.config.get('PING_JWKS_URL') - result = validate_id_token(id_token, args['clientId'], jwks_url) - if result: - return result + error_code = validate_id_token(id_token, args['clientId'], jwks_url) + if error_code: + return error_code user, profile = retrieve_user(user_api_url, access_token) roles = create_user_roles(profile) From 0eacbd42d7b7624d9a0c5f413f02444c99f91f14 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 7 May 2019 15:31:42 -0700 Subject: [PATCH 206/357] Converting userinfo authorization to a config var --- lemur/auth/views.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lemur/auth/views.py b/lemur/auth/views.py index 6c479f0e..98ba747d 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -112,11 +112,17 @@ def retrieve_user(user_api_url, access_token): """ user_params = dict(access_token=access_token, schema='profile') + headers = {} + + if current_app.config.get('PING_INCLUDE_BEARER_TOKEN'): + headers = {'Authorization': f'Bearer {access_token}'} + # retrieve information about the current user. r = requests.get( user_api_url, params=user_params, - headers={'Authorization': 'Bearer {}'.format(access_token)}) + headers=headers, + ) profile = r.json() user = user_service.get_by_email(profile['email']) From 15eb7689ed42a08eaab9411cb730b8da87863f81 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Wed, 8 May 2019 09:32:46 -0400 Subject: [PATCH 207/357] Adding documentation for Vault and CFSSL Plugin changes --- docs/administration.rst | 49 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 48 insertions(+), 1 deletion(-) diff --git a/docs/administration.rst b/docs/administration.rst index 352318f5..55b7988d 100644 --- a/docs/administration.rst +++ b/docs/administration.rst @@ -642,7 +642,7 @@ for those plugins. Digicert Issuer Plugin -~~~~~~~~~~~~~~~~~~~~~~ +^^^^^^^^^^^^^^^^^^^^^^ The following configuration properties are required to use the Digicert issuer plugin. @@ -709,6 +709,33 @@ The following configuration properties are required to use the CFSSL issuer plug This is the intermediate to be used for your CA chain +.. data:: CFSSL_KEY + :noindex: + + This is the hmac key to authenticate to the CFSSL service. (Optional) + + +Hashicorp Vault Source/Destination Plugin +^^^^^^^^^^^^^^^^^^^^^^ + +Lemur can import and export certificate data to and from a Hashicorp Vault secrets store. Lemur can connect to a different Vault service per source/destination. + +.. note:: This plugin does not supersede or overlap the 3rd party Vault Issuer plugin. + +.. note:: Vault does not have any configuration properties however it does read from a file on disk for a vault access token. The Lemur service account needs read access to this file. + +Vault Source +"""""""""""" + +The Vault Source Plugin will read from one Vault object location per source defined. There is expected to be one or more certificates defined in each object in Vault. + +Vault Destination +""""""""""""""""" + +A Vault destination can be one object in Vault or a directory where all certificates will be stored as their own object by CN. + +Vault Destination supports a regex filter to prevent certificates with SAN that do not match the regex filter from being deployed. This is an optional feature per destination defined. + AWS Source/Destination Plugin ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -1194,6 +1221,26 @@ CFSSL :Description: Basic support for generating certificates from the private certificate authority CFSSL +Vault +----- + +:Authors: + Christopher Jolley +:Type: + Source +:Description: + Source plugin imports certificates from Hashicorp Vault secret store. + +Vault +----- + +:Authors: + Christopher Jolley +:Type: + Destination +:Description: + Destination plugin to deploy certificates to Hashicorp Vault secret store. + 3rd Party Plugins ================= From 029efeb03af78bcb84fe09458ce5d4feab7d8bb2 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Wed, 8 May 2019 09:45:13 -0400 Subject: [PATCH 208/357] fixing syntax --- docs/administration.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/administration.rst b/docs/administration.rst index 55b7988d..e0dd090e 100644 --- a/docs/administration.rst +++ b/docs/administration.rst @@ -716,7 +716,7 @@ The following configuration properties are required to use the CFSSL issuer plug Hashicorp Vault Source/Destination Plugin -^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Lemur can import and export certificate data to and from a Hashicorp Vault secrets store. Lemur can connect to a different Vault service per source/destination. From 87470602fdd123306530f192b1d5260bb2506916 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 8 May 2019 07:48:08 -0700 Subject: [PATCH 209/357] Gather more metrics on certificate reissue/rotate jobs --- lemur/certificates/cli.py | 52 +++++++++++++++++++++++++-------------- 1 file changed, 34 insertions(+), 18 deletions(-) diff --git a/lemur/certificates/cli.py b/lemur/certificates/cli.py index c4a95187..04b8ec9a 100644 --- a/lemur/certificates/cli.py +++ b/lemur/certificates/cli.py @@ -153,15 +153,11 @@ def request_reissue(certificate, commit): status = SUCCESS_METRIC_STATUS except Exception as e: - sentry.captureException() - current_app.logger.exception("Error reissuing certificate.", exc_info=True) - print( - "[!] Failed to reissue certificates. Reason: {}".format( - e - ) - ) + sentry.captureException(extra={"certificate_name": str(certificate.name)}) + current_app.logger.exception(f"Error reissuing certificate: {certificate.name}", exc_info=True) + print(f"[!] Failed to reissue certificate: {certificate.name}. Reason: {e}") - metrics.send('certificate_reissue', 'counter', 1, metric_tags={'status': status}) + metrics.send('certificate_reissue', 'counter', 1, metric_tags={'status': status, 'certificate': certificate.name}) @manager.option('-e', '--endpoint', dest='endpoint_name', help='Name of the endpoint you wish to rotate.') @@ -187,35 +183,55 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c endpoint = validate_endpoint(endpoint_name) if endpoint and new_cert: - print("[+] Rotating endpoint: {0} to certificate {1}".format(endpoint.name, new_cert.name)) + print(f"[+] Rotating endpoint: {endpoint.name} to certificate {new_cert.name}") request_rotation(endpoint, new_cert, message, commit) elif old_cert and new_cert: - print("[+] Rotating all endpoints from {0} to {1}".format(old_cert.name, new_cert.name)) + print(f"[+] Rotating all endpoints from {old_cert.name} to {new_cert.name}") for endpoint in old_cert.endpoints: - print("[+] Rotating {0}".format(endpoint.name)) + print(f"[+] Rotating {endpoint.name}") request_rotation(endpoint, new_cert, message, commit) else: print("[+] Rotating all endpoints that have new certificates available") for endpoint in endpoint_service.get_all_pending_rotation(): if len(endpoint.certificate.replaced) == 1: - print("[+] Rotating {0} to {1}".format(endpoint.name, endpoint.certificate.replaced[0].name)) + print(f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}") request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit) else: - metrics.send('endpoint_rotation', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) - print("[!] Failed to rotate endpoint {0} reason: Multiple replacement certificates found.".format( - endpoint.name - )) + metrics.send('endpoint_rotation', 'counter', 1, metric_tags={ + 'status': FAILURE_METRIC_STATUS, + "old_certificate_name": str(old_cert), + "new_certificate_name": str(endpoint.certificate.replaced[0].name), + "endpoint_name": str(endpoint.name), + "message": str(message), + }) + print( + f"[!] Failed to rotate endpoint {endpoint.name} reason: " + "Multiple replacement certificates found." + ) status = SUCCESS_METRIC_STATUS print("[+] Done!") except Exception as e: - sentry.captureException() + sentry.captureException( + extra={ + "old_certificate_name": str(old_certificate_name), + "new_certificate_name": str(new_certificate_name), + "endpoint_name": str(endpoint_name), + "message": str(message), + }) - metrics.send('endpoint_rotation_job', 'counter', 1, metric_tags={'status': status}) + metrics.send('endpoint_rotation_job', 'counter', 1, metric_tags={ + "status": status, + "old_certificate_name": str(old_certificate_name), + "new_certificate_name": str(new_certificate_name), + "endpoint_name": str(endpoint_name), + "message": str(message), + "endpoint": str(globals().get("endpoint")) + }) @manager.option('-o', '--old-certificate', dest='old_certificate_name', help='Name of the certificate you wish to reissue.') From e33a103ca1f45020c919d873860fb02265bebb20 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 9 May 2019 14:36:56 -0700 Subject: [PATCH 210/357] Allow searching for certificates by name via API --- lemur/certificates/service.py | 13 +++ lemur/certificates/views.py | 109 ++++++++++++++++++ .../app/angular/certificates/view/view.js | 5 + .../angular/certificates/view/view.tpl.html | 2 +- 4 files changed, 128 insertions(+), 1 deletion(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 23a9a3b9..8a1b74d2 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -388,6 +388,19 @@ def render(args): return result +def query_name(certificate_name, args): + """ + Helper function that queries for a certificate by name + + :param args: + :return: + """ + query = database.session_query(Certificate) + query = query.filter(Certificate.name == certificate_name) + result = database.sort_and_page(query, Certificate, args) + return result + + def create_csr(**csr_config): """ Given a list of domains create the appropriate csr diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index fe1a1f9c..17aa418f 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -37,6 +37,114 @@ mod = Blueprint('certificates', __name__) api = Api(mod) +class CertificatesNameQuery(AuthenticatedResource): + """ Defines the 'certificates/name' endpoint """ + + def __init__(self): + self.reqparse = reqparse.RequestParser() + super(CertificatesNameQuery, self).__init__() + + @validate_schema(None, certificates_output_schema) + def get(self, certificate_name): + """ + .. http:get:: /certificates/name/ + + The current list of certificates + + **Example request**: + + .. sourcecode:: http + + GET /certificates/name/WILDCARD.test.example.net-SymantecCorporation-20160603-20180112 HTTP/1.1 + Host: example.com + Accept: application/json, text/javascript + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/javascript + + { + "items": [{ + "status": null, + "cn": "*.test.example.net", + "chain": "", + "csr": "-----BEGIN CERTIFICATE REQUEST-----" + "authority": { + "active": true, + "owner": "secure@example.com", + "id": 1, + "description": "verisign test authority", + "name": "verisign" + }, + "owner": "joe@example.com", + "serial": "82311058732025924142789179368889309156", + "id": 2288, + "issuer": "SymantecCorporation", + "dateCreated": "2016-06-03T06:09:42.133769+00:00", + "notBefore": "2016-06-03T00:00:00+00:00", + "notAfter": "2018-01-12T23:59:59+00:00", + "destinations": [], + "bits": 2048, + "body": "-----BEGIN CERTIFICATE-----...", + "description": null, + "deleted": null, + "notifications": [{ + "id": 1 + }], + "signingAlgorithm": "sha256", + "user": { + "username": "jane", + "active": true, + "email": "jane@example.com", + "id": 2 + }, + "active": true, + "domains": [{ + "sensitive": false, + "id": 1090, + "name": "*.test.example.net" + }], + "replaces": [], + "replaced": [], + "name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112", + "roles": [{ + "id": 464, + "description": "This is a google group based role created by Lemur", + "name": "joe@example.com" + }], + "san": null + }], + "total": 1 + } + + :query sortBy: field to sort on + :query sortDir: asc or desc + :query page: int. default is 1 + :query filter: key value pair format is k;v + :query count: count number. default is 10 + :reqheader Authorization: OAuth token to authenticate + :statuscode 200: no error + :statuscode 403: unauthenticated + + """ + parser = paginated_parser.copy() + parser.add_argument('timeRange', type=int, dest='time_range', location='args') + parser.add_argument('owner', type=inputs.boolean, location='args') + parser.add_argument('id', type=str, location='args') + parser.add_argument('active', type=inputs.boolean, location='args') + parser.add_argument('destinationId', type=int, dest="destination_id", location='args') + parser.add_argument('creator', type=str, location='args') + parser.add_argument('show', type=str, location='args') + + args = parser.parse_args() + args['user'] = g.user + return service.query_name(certificate_name, args) + + class CertificatesList(AuthenticatedResource): """ Defines the 'certificates' endpoint """ @@ -1080,6 +1188,7 @@ class CertificateRevoke(AuthenticatedResource): api.add_resource(CertificateRevoke, '/certificates//revoke', endpoint='revokeCertificate') +api.add_resource(CertificatesNameQuery, '/certificates/name/', endpoint='certificatesNameQuery') api.add_resource(CertificatesList, '/certificates', endpoint='certificates') api.add_resource(Certificates, '/certificates/', endpoint='certificate') api.add_resource(CertificatesStats, '/certificates/stats', endpoint='certificateStats') diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 0008dd64..3eb0ebb2 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -14,6 +14,11 @@ angular.module('lemur') url: '/certificates/:name', templateUrl: '/angular/certificates/view/view.tpl.html', controller: 'CertificatesViewController' + }) + .state('certificate_name', { + url: '/certificates/name/:name', + templateUrl: '/angular/certificates/view/view.tpl.html', + controller: 'CertificatesViewController' }); }) diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index 28b4e08e..4e60b5cc 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -47,7 +47,7 @@
- Permalink + Permalink From ed18df22db6ee96d0157e4f72d68d1d7038f8d38 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 9 May 2019 14:54:44 -0700 Subject: [PATCH 211/357] remove permalink change --- lemur/static/app/angular/certificates/view/view.js | 5 ----- lemur/static/app/angular/certificates/view/view.tpl.html | 2 +- 2 files changed, 1 insertion(+), 6 deletions(-) diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 3eb0ebb2..0008dd64 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -14,11 +14,6 @@ angular.module('lemur') url: '/certificates/:name', templateUrl: '/angular/certificates/view/view.tpl.html', controller: 'CertificatesViewController' - }) - .state('certificate_name', { - url: '/certificates/name/:name', - templateUrl: '/angular/certificates/view/view.tpl.html', - controller: 'CertificatesViewController' }); }) diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index 4e60b5cc..28b4e08e 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -47,7 +47,7 @@
- Permalink + Permalink From 3f10b43254bec0c850322cd84445aad1d3ed14c3 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 9 May 2019 15:00:09 -0700 Subject: [PATCH 212/357] Ignore bandit error --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index d5b1698c..dfa96543 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -68,7 +68,7 @@ copyright = u'2018, Netflix Inc.' base_dir = os.path.join(os.path.dirname(__file__), os.pardir) about = {} with open(os.path.join(base_dir, "lemur", "__about__.py")) as f: - exec(f.read(), about) + exec(f.read(), about) # nosec version = release = about["__version__"] From f452a7ce68d891459c4a868f0e81f623fd5ec6e6 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Sat, 11 May 2019 18:06:51 -0700 Subject: [PATCH 213/357] adding a new API for faster certificate lookup. The new API api/1/certificates/valid returns only non-expired (not_after >= today) certs which have auto-rotate enabled: cn is a required parameter: http://localhost:8000/api/1/certificates/valid?filter=cn;example.com cn can also be a database string wildcard ('%'): http://localhost:8000/api/1/certificates/valid?filter=cn;% owner is the additional parameter, and must be the email address of the owner: http://localhost:8000/api/1/certificates/valid?filter=cn;example.com&owner=hossein@example.com given owner and a database string wildcard ('%') one can retrieve all certs for that owner, which are still valid, and have auto-rotate enabled: http://localhost:8000/api/1/certificates/valid?filter=cn;%&owner=hossein@example.com --- lemur/certificates/service.py | 24 +++++++++ lemur/certificates/views.py | 97 +++++++++++++++++++++++++++++++++++ lemur/common/utils.py | 1 + 3 files changed, 122 insertions(+) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 8a1b74d2..815349ff 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -401,6 +401,30 @@ def query_name(certificate_name, args): return result +def query_common_name(common_name, args): + """ + Helper function that queries for not expired certificates by common name and owner which have auto-rotate enabled + + :param common_name: + :param args: + :return: + """ + owner = args.pop('owner') + if not owner: + owner = '%' + + # only not expired certificates + current_time = arrow.utcnow() + + result = Certificate.query.filter(Certificate.cn.ilike(common_name)) \ + .filter(Certificate.owner.ilike(owner))\ + .filter(Certificate.not_after >= current_time.format('YYYY-MM-DD')) \ + .filter(Certificate.rotation.is_(True))\ + .all() + + return result + + def create_csr(**csr_config): """ Given a list of domains create the appropriate csr diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 17aa418f..48f6d672 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -37,6 +37,102 @@ mod = Blueprint('certificates', __name__) api = Api(mod) +class CertificatesListValid(AuthenticatedResource): + """ Defines the 'certificates/valid' endpoint """ + + def __init__(self): + self.reqparse = reqparse.RequestParser() + super(CertificatesListValid, self).__init__() + + @validate_schema(None, certificates_output_schema) + def get(self): + """ + .. http:get:: /certificates/valid/ + + The current list of not-expired certificates for a given common name, and owner + + **Example request**: + + .. sourcecode:: http + GET /certificates/valid?filter=cn;*.test.example.net&owner=joe@example.com + HTTP/1.1 + Host: example.com + Accept: application/json, text/javascript + + **Example response**: + + .. sourcecode:: http + + HTTP/1.1 200 OK + Vary: Accept + Content-Type: text/javascript + + { + "items": [{ + "status": null, + "cn": "*.test.example.net", + "chain": "", + "csr": "-----BEGIN CERTIFICATE REQUEST-----" + "authority": { + "active": true, + "owner": "secure@example.com", + "id": 1, + "description": "verisign test authority", + "name": "verisign" + }, + "owner": "joe@example.com", + "serial": "82311058732025924142789179368889309156", + "id": 2288, + "issuer": "SymantecCorporation", + "dateCreated": "2016-06-03T06:09:42.133769+00:00", + "notBefore": "2016-06-03T00:00:00+00:00", + "notAfter": "2018-01-12T23:59:59+00:00", + "destinations": [], + "bits": 2048, + "body": "-----BEGIN CERTIFICATE-----...", + "description": null, + "deleted": null, + "notifications": [{ + "id": 1 + }], + "signingAlgorithm": "sha256", + "user": { + "username": "jane", + "active": true, + "email": "jane@example.com", + "id": 2 + }, + "active": true, + "domains": [{ + "sensitive": false, + "id": 1090, + "name": "*.test.example.net" + }], + "replaces": [], + "replaced": [], + "name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112", + "roles": [{ + "id": 464, + "description": "This is a google group based role created by Lemur", + "name": "joe@example.com" + }], + "san": null + }], + "total": 1 + } + + :reqheader Authorization: OAuth token to authenticate + :statuscode 200: no error + :statuscode 403: unauthenticated + + """ + parser = paginated_parser.copy() + args = parser.parse_args() + args['user'] = g.user + common_name = args['filter'].split(';')[1] + return service.query_common_name(common_name, args) + + class CertificatesNameQuery(AuthenticatedResource): """ Defines the 'certificates/name' endpoint """ @@ -1190,6 +1286,7 @@ class CertificateRevoke(AuthenticatedResource): api.add_resource(CertificateRevoke, '/certificates//revoke', endpoint='revokeCertificate') api.add_resource(CertificatesNameQuery, '/certificates/name/', endpoint='certificatesNameQuery') api.add_resource(CertificatesList, '/certificates', endpoint='certificates') +api.add_resource(CertificatesListValid, '/certificates/valid', endpoint='certificatesListValid') api.add_resource(Certificates, '/certificates/', endpoint='certificate') api.add_resource(CertificatesStats, '/certificates/stats', endpoint='certificateStats') api.add_resource(CertificatesUpload, '/certificates/upload', endpoint='certificateUpload') diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 24ff5784..40f828f3 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -30,6 +30,7 @@ paginated_parser.add_argument('page', type=int, default=1, location='args') paginated_parser.add_argument('sortDir', type=str, dest='sort_dir', location='args') paginated_parser.add_argument('sortBy', type=str, dest='sort_by', location='args') paginated_parser.add_argument('filter', type=str, location='args') +paginated_parser.add_argument('owner', type=str, location='args') def get_psuedo_random_string(): From 565142f98557f02cd54e93e88c83edcb46ca11ab Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 14 May 2019 12:52:30 -0700 Subject: [PATCH 214/357] Add soft timeouts to celery jobs; Check for PEM in LE order --- lemur/common/celery.py | 38 ++++++++++++++++++++++++------ lemur/plugins/lemur_acme/plugin.py | 7 +++++- requirements-dev.txt | 14 +++++------ requirements-docs.txt | 14 +++++------ requirements-tests.txt | 21 +++++++++-------- requirements.txt | 12 +++++----- 6 files changed, 68 insertions(+), 38 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 10747d31..45e3fd78 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -12,9 +12,11 @@ import sys from datetime import datetime, timezone, timedelta from celery import Celery +from celery.exceptions import SoftTimeLimitExceeded from flask import current_app from lemur.authorities.service import get as get_authority +from lemur.extensions import metrics, sentry from lemur.factory import create_app from lemur.notifications.messaging import send_pending_failure_notification from lemur.pending_certificates import service as pending_certificate_service @@ -62,7 +64,7 @@ def is_task_active(fun, task_id, args): return False -@celery.task() +@celery.task(soft_time_limit=600) def fetch_acme_cert(id): """ Attempt to get the full certificate for the pending certificate listed. @@ -70,11 +72,24 @@ def fetch_acme_cert(id): Args: id: an id of a PendingCertificate """ + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + log_data = { "function": "{}.{}".format(__name__, sys._getframe().f_code.co_name), - "message": "Resolving pending certificate {}".format(id) + "message": "Resolving pending certificate {}".format(id), + "task_id": task_id, + "id": id, } + current_app.logger.debug(log_data) + + if task_id and is_task_active(log_data["function"], task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + pending_certs = pending_certificate_service.get_pending_certs([id]) new = 0 failed = 0 @@ -192,7 +207,7 @@ def remove_old_acme_certs(): log_data['pending_cert_name'] = cert.name log_data['message'] = "Deleting pending certificate" current_app.logger.debug(log_data) - pending_certificate_service.delete(cert.id) + pending_certificate_service.delete(cert) @celery.task() @@ -231,7 +246,7 @@ def sync_all_sources(): sync_source.delay(source.label) -@celery.task() +@celery.task(soft_time_limit=3600) def sync_source(source): """ This celery task will sync the specified source. @@ -241,7 +256,9 @@ def sync_source(source): """ function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) - task_id = celery.current_task.request.id + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id log_data = { "function": function, "message": "Syncing source", @@ -250,11 +267,18 @@ def sync_source(source): } current_app.logger.debug(log_data) - if is_task_active(function, task_id, (source,)): + if task_id and is_task_active(function, task_id, (source,)): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return - sync([source]) + try: + sync([source]) + except SoftTimeLimitExceeded: + log_data["message"] = "Error syncing source: Time limit exceeded." + sentry.captureException() + metrics.send('sync_source_timeout', 'counter', 1, metric_tags={'source': source}) + return + log_data["message"] = "Done syncing source" current_app.logger.debug(log_data) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 06dec882..d9c41968 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -17,7 +17,7 @@ import time import OpenSSL.crypto import josepy as jose -from acme import challenges, messages +from acme import challenges, errors, messages from acme.client import BackwardsCompatibleClientV2, ClientNetwork from acme.errors import PollError, TimeoutError, WildcardUnsupportedError from acme.messages import Error as AcmeError @@ -155,6 +155,11 @@ class AcmeHandler(object): metrics.send('request_certificate_error', 'counter', 1) current_app.logger.error(f"Unable to resolve Acme order: {order.uri}", exc_info=True) raise + except errors.ValidationError: + if order.fullchain_pem: + orderr = order + else: + raise pem_certificate = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, diff --git a/requirements-dev.txt b/requirements-dev.txt index 29509d99..1a5b5f9d 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file requirements-dev.txt requirements-dev.in -U --no-index +# pip-compile --no-index --output-file=requirements-dev.txt requirements-dev.in # aspy.yaml==1.2.0 # via pre-commit bleach==3.1.0 # via readme-renderer @@ -11,26 +11,26 @@ cfgv==1.6.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.4.2 # via pre-commit +identify==1.4.3 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.9 # via pre-commit +importlib-metadata==0.12 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.16.0 +pre-commit==1.16.1 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 -pygments==2.3.1 # via readme-renderer +pygments==2.4.0 # via readme-renderer pyyaml==5.1 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.21.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.31.1 # via twine +tqdm==4.32.1 # via twine twine==1.13.0 urllib3==1.24.3 # via requests virtualenv==16.5.0 # via pre-commit webencodings==0.5.1 # via bleach -zipp==0.4.0 # via importlib-metadata +zipp==0.5.0 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index fef37c08..f23de8f4 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -2,9 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index +# pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # -acme==0.34.1 +acme==0.34.2 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.10 @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.143 -botocore==1.12.143 +boto3==1.9.147 +botocore==1.12.147 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -54,11 +54,11 @@ josepy==1.1.0 jsonlines==1.2.0 kombu==4.5.0 lockfile==0.12.2 -mako==1.0.9 +mako==1.0.10 markupsafe==1.1.1 marshmallow-sqlalchemy==0.16.3 marshmallow==2.19.2 -mock==3.0.4 +mock==3.0.5 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx paramiko==2.4.2 @@ -68,7 +68,7 @@ pyasn1-modules==0.2.5 pyasn1==0.4.5 pycparser==2.19 pycryptodomex==3.8.1 -pygments==2.3.1 # via sphinx +pygments==2.4.0 # via sphinx pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index 5d28412c..27837359 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -2,19 +2,19 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file requirements-tests.txt requirements-tests.in -U --no-index +# pip-compile --no-index --output-file=requirements-tests.txt requirements-tests.in # asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via pytest aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto -boto3==1.9.143 # via aws-sam-translator, moto +boto3==1.9.147 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.143 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.147 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography -cfn-lint==0.19.1 # via moto +cfn-lint==0.20.1 # via moto chardet==3.0.4 # via requests click==7.0 # via flask coverage==4.5.3 @@ -23,8 +23,8 @@ docker-pycreds==0.4.0 # via docker docker==3.7.2 # via moto docutils==0.14 # via botocore ecdsa==0.13.2 # via python-jose -factory-boy==2.11.1 -faker==1.0.5 +factory-boy==2.12.0 +faker==1.0.7 flask==1.0.2 # via pytest-flask freezegun==0.3.11 future==0.17.1 # via aws-xray-sdk, python-jose @@ -38,18 +38,18 @@ jsonpickle==1.1 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch jsonschema==2.6.0 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 -mock==3.0.4 # via moto +mock==3.0.5 # via moto more-itertools==7.0.0 # via pytest moto==1.3.8 nose==1.3.7 -pluggy==0.9.0 # via pytest +pluggy==0.11.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa pycparser==2.19 # via cffi pyflakes==2.1.1 -pytest-flask==0.14.0 +pytest-flask==0.15.0 pytest-mock==1.10.4 -pytest==4.4.1 +pytest==4.5.0 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==3.0.1 # via moto pytz==2019.1 # via moto @@ -62,6 +62,7 @@ s3transfer==0.2.0 # via boto3 six==1.12.0 # via aws-sam-translator, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker urllib3==1.24.3 # via botocore, requests +wcwidth==0.1.7 # via pytest websocket-client==0.56.0 # via docker werkzeug==0.15.2 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk diff --git a/requirements.txt b/requirements.txt index fe27838b..935e85ca 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,9 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file requirements.txt requirements.in -U --no-index +# pip-compile --no-index --output-file=requirements.txt requirements.in # -acme==0.34.1 +acme==0.34.2 alembic-autogenerate-enums==0.0.2 alembic==1.0.10 # via flask-migrate amqp==2.4.2 # via kombu @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.143 -botocore==1.12.143 +boto3==1.9.147 +botocore==1.12.147 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -51,11 +51,11 @@ josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare kombu==4.5.0 lockfile==0.12.2 -mako==1.0.9 # via alembic +mako==1.0.10 # via alembic markupsafe==1.1.1 # via jinja2, mako marshmallow-sqlalchemy==0.16.3 marshmallow==2.19.2 -mock==3.0.4 # via acme +mock==3.0.5 # via acme ndg-httpsclient==0.5.1 paramiko==2.4.2 pem==19.1.0 From 5d8f71c3e405d8bede63f844383f8de97b47f296 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 14 May 2019 13:02:24 -0700 Subject: [PATCH 215/357] nt --- lemur/common/celery.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 45e3fd78..ce386ffd 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -275,6 +275,7 @@ def sync_source(source): sync([source]) except SoftTimeLimitExceeded: log_data["message"] = "Error syncing source: Time limit exceeded." + current_app.logger.error(log_data) sentry.captureException() metrics.send('sync_source_timeout', 'counter', 1, metric_tags={'source': source}) return From 7e92edc70af1d10e506e520075415b88ce2f16f9 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 15 May 2019 11:43:59 -0700 Subject: [PATCH 216/357] Set resolved cert ID before resolving cert; Ignore sentry exceptions when no records on deletion --- lemur/common/celery.py | 4 ++-- lemur/plugins/lemur_acme/dyn.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index ce386ffd..23eabddb 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -121,11 +121,11 @@ def fetch_acme_cert(id): final_cert = pending_certificate_service.create_certificate(pending_cert, real_cert, pending_cert.user) pending_certificate_service.update( cert.get("pending_cert").id, - resolved=True + resolved_cert_id=final_cert.id ) pending_certificate_service.update( cert.get("pending_cert").id, - resolved_cert_id=final_cert.id + resolved=True ) # add metrics to metrics extension new += 1 diff --git a/lemur/plugins/lemur_acme/dyn.py b/lemur/plugins/lemur_acme/dyn.py index 4159532c..db33caf0 100644 --- a/lemur/plugins/lemur_acme/dyn.py +++ b/lemur/plugins/lemur_acme/dyn.py @@ -142,7 +142,6 @@ def delete_txt_record(change_id, account_number, domain, token): try: all_txt_records = node.get_all_records_by_type('TXT') except DynectGetError: - sentry.captureException() metrics.send('delete_txt_record_geterror', 'counter', 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return From 26d10e8b9825122457d68aa6e3d1260d06cdc6c5 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 15 May 2019 11:47:53 -0700 Subject: [PATCH 217/357] change ordering in more places --- lemur/pending_certificates/cli.py | 8 ++++---- lemur/pending_certificates/service.py | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lemur/pending_certificates/cli.py b/lemur/pending_certificates/cli.py index ccad8de5..65e2e19a 100644 --- a/lemur/pending_certificates/cli.py +++ b/lemur/pending_certificates/cli.py @@ -42,11 +42,11 @@ def fetch(ids): final_cert = pending_certificate_service.create_certificate(cert, real_cert, cert.user) pending_certificate_service.update( cert.id, - resolved=True + resolved_cert_id=final_cert.id ) pending_certificate_service.update( cert.id, - resolved_cert_id=final_cert.id + resolved=True ) # add metrics to metrics extension new += 1 @@ -100,11 +100,11 @@ def fetch_all_acme(): final_cert = pending_certificate_service.create_certificate(pending_cert, real_cert, pending_cert.user) pending_certificate_service.update( pending_cert.id, - resolved=True + resolved_cert_id=final_cert.id ) pending_certificate_service.update( pending_cert.id, - resolved_cert_id=final_cert.id + resolved=True ) # add metrics to metrics extension new += 1 diff --git a/lemur/pending_certificates/service.py b/lemur/pending_certificates/service.py index 56b6e097..287bd42b 100644 --- a/lemur/pending_certificates/service.py +++ b/lemur/pending_certificates/service.py @@ -259,13 +259,13 @@ def upload(pending_certificate_id, **kwargs): final_cert = create_certificate(pending_cert, partial_cert, pending_cert.user) - update( - pending_cert.id, - resolved=True - ) pending_cert_final_result = update( pending_cert.id, resolved_cert_id=final_cert.id ) + update( + pending_cert.id, + resolved=True + ) return pending_cert_final_result From e3c5490d25e1925bcc883da03714aa9984575233 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 15 May 2019 13:36:40 -0700 Subject: [PATCH 218/357] Expose exact response from digicert as error --- lemur/plugins/lemur_digicert/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_digicert/plugin.py b/lemur/plugins/lemur_digicert/plugin.py index 619b24e7..a65c02ff 100644 --- a/lemur/plugins/lemur_digicert/plugin.py +++ b/lemur/plugins/lemur_digicert/plugin.py @@ -187,7 +187,7 @@ def handle_cis_response(response): :return: """ if response.status_code > 399: - raise Exception(response.json()['errors'][0]['message']) + raise Exception(response.text) return response.json() From 68fd1556b2462aa8243f5e5d350c23079d94cedf Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 16 May 2019 07:57:02 -0700 Subject: [PATCH 219/357] Black lint all the things --- .pre-commit-config.yaml | 5 + lemur/__about__.py | 12 +- lemur/__init__.py | 36 +- lemur/api_keys/cli.py | 21 +- lemur/api_keys/models.py | 11 +- lemur/api_keys/schemas.py | 10 +- lemur/api_keys/service.py | 10 +- lemur/api_keys/views.py | 94 +- lemur/auth/ldap.py | 110 +- lemur/auth/permissions.py | 30 +- lemur/auth/service.py | 85 +- lemur/auth/views.py | 376 +++--- lemur/authorities/models.py | 62 +- lemur/authorities/schemas.py | 49 +- lemur/authorities/service.py | 73 +- lemur/authorities/views.py | 45 +- lemur/authorizations/models.py | 2 +- lemur/certificates/cli.py | 238 +++- lemur/certificates/hooks.py | 19 +- lemur/certificates/models.py | 280 ++-- lemur/certificates/schemas.py | 175 ++- lemur/certificates/service.py | 310 +++-- lemur/certificates/utils.py | 15 +- lemur/certificates/verify.py | 56 +- lemur/certificates/views.py | 252 ++-- lemur/common/celery.py | 68 +- lemur/common/defaults.py | 69 +- lemur/common/fields.py | 272 ++-- lemur/common/health.py | 10 +- lemur/common/managers.py | 10 +- lemur/common/missing.py | 10 +- lemur/common/schema.py | 58 +- lemur/common/utils.py | 100 +- lemur/common/validators.py | 104 +- lemur/constants.py | 44 +- lemur/database.py | 36 +- lemur/default.conf.py | 1 + lemur/defaults/views.py | 23 +- lemur/destinations/models.py | 2 +- lemur/destinations/schemas.py | 2 +- lemur/destinations/service.py | 34 +- lemur/destinations/views.py | 46 +- lemur/dns_providers/cli.py | 6 +- lemur/dns_providers/models.py | 17 +- lemur/dns_providers/service.py | 72 +- lemur/dns_providers/views.py | 27 +- lemur/domains/models.py | 11 +- lemur/domains/service.py | 6 +- lemur/domains/views.py | 28 +- lemur/endpoints/cli.py | 21 +- lemur/endpoints/models.py | 60 +- lemur/endpoints/service.py | 48 +- lemur/endpoints/views.py | 9 +- lemur/exceptions.py | 4 +- lemur/extensions.py | 8 + lemur/factory.py | 85 +- lemur/logs/models.py | 18 +- lemur/logs/service.py | 24 +- lemur/logs/views.py | 9 +- lemur/manage.py | 176 ++- lemur/metrics.py | 8 +- lemur/migrations/env.py | 25 +- lemur/migrations/versions/131ec6accff5_.py | 23 +- lemur/migrations/versions/1ae8e3104db8_.py | 12 +- lemur/migrations/versions/1db4f82bc780_.py | 10 +- lemur/migrations/versions/29d8c8455c86_.py | 84 +- lemur/migrations/versions/318b66568358_.py | 6 +- lemur/migrations/versions/3307381f3b88_.py | 160 ++- lemur/migrations/versions/33de094da890_.py | 20 +- lemur/migrations/versions/3adfdd6598df_.py | 122 +- lemur/migrations/versions/412b22cb656a_.py | 119 +- lemur/migrations/versions/449c3d5c7299_.py | 10 +- lemur/migrations/versions/4c50b903d1ae_.py | 9 +- lemur/migrations/versions/556ceb3e3c3e_.py | 204 ++- lemur/migrations/versions/5770674184de_.py | 8 +- lemur/migrations/versions/5ae0ecefb01f_.py | 15 +- lemur/migrations/versions/5bc47fa7cac4_.py | 10 +- lemur/migrations/versions/5e680529b666_.py | 16 +- lemur/migrations/versions/6006c79b6011_.py | 8 +- lemur/migrations/versions/7ead443ba911_.py | 9 +- lemur/migrations/versions/7f71c0cea31a_.py | 24 +- lemur/migrations/versions/8ae67285ff14_.py | 22 +- lemur/migrations/versions/932525b82f1a_.py | 8 +- lemur/migrations/versions/9392b9f9a805_.py | 19 +- lemur/migrations/versions/984178255c83_.py | 14 +- lemur/migrations/versions/9f79024fe67b_.py | 18 +- lemur/migrations/versions/a02a678ddc25_.py | 51 +- lemur/migrations/versions/ac483cfeb230_.py | 22 +- lemur/migrations/versions/b29e2c4bf8c9_.py | 24 +- lemur/migrations/versions/c05a8998b371_.py | 26 +- lemur/migrations/versions/c87cb989af04_.py | 8 +- lemur/migrations/versions/ce547319f7be_.py | 8 +- lemur/migrations/versions/e3691fc396e9_.py | 31 +- lemur/migrations/versions/ee827d1e1974_.py | 41 +- lemur/migrations/versions/f2383bf08fbc_.py | 15 +- lemur/models.py | 254 ++-- lemur/notifications/cli.py | 16 +- lemur/notifications/messaging.py | 120 +- lemur/notifications/models.py | 12 +- lemur/notifications/schemas.py | 8 +- lemur/notifications/service.py | 70 +- lemur/notifications/views.py | 49 +- lemur/pending_certificates/cli.py | 53 +- lemur/pending_certificates/models.py | 141 +- lemur/pending_certificates/schemas.py | 41 +- lemur/pending_certificates/service.py | 131 +- lemur/pending_certificates/views.py | 98 +- lemur/plugins/base/manager.py | 25 +- lemur/plugins/base/v1.py | 10 +- lemur/plugins/bases/destination.py | 24 +- lemur/plugins/bases/export.py | 3 +- lemur/plugins/bases/issuer.py | 3 +- lemur/plugins/bases/metric.py | 6 +- lemur/plugins/bases/notification.py | 28 +- lemur/plugins/bases/source.py | 12 +- lemur/plugins/lemur_acme/__init__.py | 5 +- lemur/plugins/lemur_acme/cloudflare.py | 32 +- lemur/plugins/lemur_acme/dyn.py | 129 +- lemur/plugins/lemur_acme/plugin.py | 383 +++--- lemur/plugins/lemur_acme/route53.py | 43 +- lemur/plugins/lemur_acme/tests/test_acme.py | 240 ++-- lemur/plugins/lemur_adcs/__init__.py | 5 +- lemur/plugins/lemur_adcs/plugin.py | 102 +- lemur/plugins/lemur_atlas/__init__.py | 5 +- lemur/plugins/lemur_atlas/plugin.py | 71 +- lemur/plugins/lemur_aws/__init__.py | 5 +- lemur/plugins/lemur_aws/ec2.py | 10 +- lemur/plugins/lemur_aws/elb.py | 147 ++- lemur/plugins/lemur_aws/iam.py | 58 +- lemur/plugins/lemur_aws/plugin.py | 291 +++-- lemur/plugins/lemur_aws/s3.py | 20 +- lemur/plugins/lemur_aws/sts.py | 46 +- lemur/plugins/lemur_aws/tests/test_elb.py | 21 +- lemur/plugins/lemur_aws/tests/test_iam.py | 16 +- lemur/plugins/lemur_aws/tests/test_plugin.py | 3 +- lemur/plugins/lemur_cfssl/__init__.py | 5 +- lemur/plugins/lemur_cfssl/plugin.py | 81 +- lemur/plugins/lemur_cfssl/tests/test_cfssl.py | 3 +- lemur/plugins/lemur_cryptography/__init__.py | 5 +- lemur/plugins/lemur_cryptography/plugin.py | 105 +- .../tests/test_cryptography.py | 34 +- lemur/plugins/lemur_csr/__init__.py | 5 +- lemur/plugins/lemur_csr/plugin.py | 32 +- .../lemur_csr/tests/test_csr_export.py | 3 +- lemur/plugins/lemur_digicert/__init__.py | 5 +- lemur/plugins/lemur_digicert/plugin.py | 347 ++--- .../lemur_digicert/tests/test_digicert.py | 192 +-- lemur/plugins/lemur_email/__init__.py | 5 +- lemur/plugins/lemur_email/plugin.py | 70 +- lemur/plugins/lemur_email/templates/config.py | 18 +- lemur/plugins/lemur_email/tests/test_email.py | 15 +- lemur/plugins/lemur_jks/__init__.py | 5 +- lemur/plugins/lemur_jks/plugin.py | 90 +- lemur/plugins/lemur_jks/tests/test_jks.py | 83 +- lemur/plugins/lemur_kubernetes/__init__.py | 5 +- lemur/plugins/lemur_kubernetes/plugin.py | 279 ++-- lemur/plugins/lemur_openssl/__init__.py | 5 +- lemur/plugins/lemur_openssl/plugin.py | 85 +- .../lemur_openssl/tests/test_openssl.py | 8 +- lemur/plugins/lemur_sftp/__init__.py | 5 +- lemur/plugins/lemur_sftp/plugin.py | 164 +-- lemur/plugins/lemur_slack/__init__.py | 5 +- lemur/plugins/lemur_slack/plugin.py | 157 +-- lemur/plugins/lemur_slack/tests/test_slack.py | 36 +- .../lemur_statsd/lemur_statsd/__init__.py | 4 +- .../lemur_statsd/lemur_statsd/plugin.py | 30 +- lemur/plugins/lemur_statsd/setup.py | 19 +- lemur/plugins/lemur_vault_dest/__init__.py | 5 +- lemur/plugins/lemur_vault_dest/plugin.py | 314 ++--- lemur/plugins/lemur_verisign/__init__.py | 5 +- lemur/plugins/lemur_verisign/plugin.py | 168 +-- .../lemur_verisign/tests/test_verisign.py | 4 +- lemur/plugins/utils.py | 8 +- lemur/plugins/views.py | 14 +- lemur/policies/cli.py | 4 +- lemur/policies/models.py | 6 +- lemur/policies/service.py | 2 +- lemur/reporting/cli.py | 68 +- lemur/reporting/service.py | 18 +- lemur/roles/models.py | 36 +- lemur/roles/service.py | 22 +- lemur/roles/views.py | 76 +- lemur/schemas.py | 87 +- lemur/sources/cli.py | 92 +- lemur/sources/models.py | 2 +- lemur/sources/schemas.py | 2 +- lemur/sources/service.py | 116 +- lemur/sources/views.py | 39 +- lemur/tests/conf.py | 74 +- lemur/tests/conftest.py | 85 +- lemur/tests/factories.py | 102 +- lemur/tests/plugins/destination_plugin.py | 10 +- lemur/tests/plugins/issuer_plugin.py | 24 +- lemur/tests/plugins/notification_plugin.py | 10 +- lemur/tests/plugins/source_plugin.py | 10 +- lemur/tests/test_api_keys.py | 475 ++++--- lemur/tests/test_authorities.py | 390 ++++-- lemur/tests/test_certificates.py | 1159 +++++++++++------ lemur/tests/test_defaults.py | 135 +- lemur/tests/test_destinations.py | 207 ++- lemur/tests/test_domains.py | 182 ++- lemur/tests/test_endpoints.py | 187 ++- lemur/tests/test_ldap.py | 54 +- lemur/tests/test_logs.py | 27 +- lemur/tests/test_messaging.py | 33 +- lemur/tests/test_missing.py | 9 +- lemur/tests/test_notifications.py | 211 ++- lemur/tests/test_pending_certificates.py | 84 +- lemur/tests/test_roles.py | 247 ++-- lemur/tests/test_schemas.py | 24 +- lemur/tests/test_sources.py | 219 ++-- lemur/tests/test_users.py | 225 ++-- lemur/tests/test_utils.py | 61 +- lemur/tests/test_validators.py | 14 +- lemur/tests/test_verify.py | 38 +- lemur/tests/vectors.py | 15 +- lemur/users/models.py | 32 +- lemur/users/schemas.py | 6 +- lemur/users/service.py | 8 +- lemur/users/views.py | 45 +- lemur/utils.py | 10 +- requirements-dev.txt | 6 +- requirements-docs.txt | 6 +- requirements-tests.in | 1 + requirements-tests.txt | 17 +- requirements.txt | 6 +- 226 files changed, 9340 insertions(+), 5940 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f3d19151..995a8508 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,3 +8,8 @@ sha: v2.9.5 hooks: - id: jshint +- repo: https://github.com/ambv/black + rev: stable + hooks: + - id: black + language_version: python3.7 \ No newline at end of file diff --git a/lemur/__about__.py b/lemur/__about__.py index d15b7dea..766d3668 100644 --- a/lemur/__about__.py +++ b/lemur/__about__.py @@ -1,12 +1,18 @@ from __future__ import absolute_import, division, print_function __all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", ] __title__ = "lemur" -__summary__ = ("Certificate management and orchestration service") +__summary__ = "Certificate management and orchestration service" __uri__ = "https://github.com/Netflix/lemur" __version__ = "0.7.0" diff --git a/lemur/__init__.py b/lemur/__init__.py index 769e0cec..6229a3d1 100644 --- a/lemur/__init__.py +++ b/lemur/__init__.py @@ -32,14 +32,26 @@ from lemur.pending_certificates.views import mod as pending_certificates_bp from lemur.dns_providers.views import mod as dns_providers_bp from lemur.__about__ import ( - __author__, __copyright__, __email__, __license__, __summary__, __title__, - __uri__, __version__ + __author__, + __copyright__, + __email__, + __license__, + __summary__, + __title__, + __uri__, + __version__, ) __all__ = [ - "__title__", "__summary__", "__uri__", "__version__", "__author__", - "__email__", "__license__", "__copyright__", + "__title__", + "__summary__", + "__uri__", + "__version__", + "__author__", + "__email__", + "__license__", + "__copyright__", ] LEMUR_BLUEPRINTS = ( @@ -63,7 +75,9 @@ LEMUR_BLUEPRINTS = ( def create_app(config_path=None): - app = factory.create_app(app_name=__name__, blueprints=LEMUR_BLUEPRINTS, config=config_path) + app = factory.create_app( + app_name=__name__, blueprints=LEMUR_BLUEPRINTS, config=config_path + ) configure_hook(app) return app @@ -93,7 +107,7 @@ def configure_hook(app): @app.after_request def after_request(response): # Return early if we don't have the start time - if not hasattr(g, 'request_start_time'): + if not hasattr(g, "request_start_time"): return response # Get elapsed time in milliseconds @@ -102,12 +116,12 @@ def configure_hook(app): # Collect request/response tags tags = { - 'endpoint': request.endpoint, - 'request_method': request.method.lower(), - 'status_code': response.status_code + "endpoint": request.endpoint, + "request_method": request.method.lower(), + "status_code": response.status_code, } # Record our response time metric - metrics.send('response_time', 'TIMER', elapsed, metric_tags=tags) - metrics.send('status_code_{}'.format(response.status_code), 'counter', 1) + metrics.send("response_time", "TIMER", elapsed, metric_tags=tags) + metrics.send("status_code_{}".format(response.status_code), "counter", 1) return response diff --git a/lemur/api_keys/cli.py b/lemur/api_keys/cli.py index 2259d774..8aed0497 100644 --- a/lemur/api_keys/cli.py +++ b/lemur/api_keys/cli.py @@ -14,23 +14,32 @@ from datetime import datetime manager = Manager(usage="Handles all api key related tasks.") -@manager.option('-u', '--user-id', dest='uid', help='The User ID this access key belongs too.') -@manager.option('-n', '--name', dest='name', help='The name of this API Key.') -@manager.option('-t', '--ttl', dest='ttl', help='The TTL of this API Key. -1 for forever.') +@manager.option( + "-u", "--user-id", dest="uid", help="The User ID this access key belongs too." +) +@manager.option("-n", "--name", dest="name", help="The name of this API Key.") +@manager.option( + "-t", "--ttl", dest="ttl", help="The TTL of this API Key. -1 for forever." +) def create(uid, name, ttl): """ Create a new api key for a user. :return: """ print("[+] Creating a new api key.") - key = api_key_service.create(user_id=uid, name=name, - ttl=ttl, issued_at=int(datetime.utcnow().timestamp()), revoked=False) + key = api_key_service.create( + user_id=uid, + name=name, + ttl=ttl, + issued_at=int(datetime.utcnow().timestamp()), + revoked=False, + ) print("[+] Successfully created a new api key. Generating a JWT...") jwt = create_token(uid, key.id, key.ttl) print("[+] Your JWT is: {jwt}".format(jwt=jwt)) -@manager.option('-a', '--api-key-id', dest='aid', help='The API Key ID to revoke.') +@manager.option("-a", "--api-key-id", dest="aid", help="The API Key ID to revoke.") def revoke(aid): """ Revokes an api key for a user. diff --git a/lemur/api_keys/models.py b/lemur/api_keys/models.py index df77edb1..fbcc3e44 100644 --- a/lemur/api_keys/models.py +++ b/lemur/api_keys/models.py @@ -12,14 +12,19 @@ from lemur.database import db class ApiKey(db.Model): - __tablename__ = 'api_keys' + __tablename__ = "api_keys" id = Column(Integer, primary_key=True) name = Column(String) - user_id = Column(Integer, ForeignKey('users.id')) + user_id = Column(Integer, ForeignKey("users.id")) ttl = Column(BigInteger) issued_at = Column(BigInteger) revoked = Column(Boolean) def __repr__(self): return "ApiKey(name={name}, user_id={user_id}, ttl={ttl}, issued_at={iat}, revoked={revoked})".format( - user_id=self.user_id, name=self.name, ttl=self.ttl, iat=self.issued_at, revoked=self.revoked) + user_id=self.user_id, + name=self.name, + ttl=self.ttl, + iat=self.issued_at, + revoked=self.revoked, + ) diff --git a/lemur/api_keys/schemas.py b/lemur/api_keys/schemas.py index a3c11417..e690b859 100644 --- a/lemur/api_keys/schemas.py +++ b/lemur/api_keys/schemas.py @@ -13,12 +13,18 @@ from lemur.users.schemas import UserNestedOutputSchema, UserInputSchema def current_user_id(): - return {'id': g.current_user.id, 'email': g.current_user.email, 'username': g.current_user.username} + return { + "id": g.current_user.id, + "email": g.current_user.email, + "username": g.current_user.username, + } class ApiKeyInputSchema(LemurInputSchema): name = fields.String(required=False) - user = fields.Nested(UserInputSchema, missing=current_user_id, default=current_user_id) + user = fields.Nested( + UserInputSchema, missing=current_user_id, default=current_user_id + ) ttl = fields.Integer() diff --git a/lemur/api_keys/service.py b/lemur/api_keys/service.py index 5ddb8a3a..ea681a62 100644 --- a/lemur/api_keys/service.py +++ b/lemur/api_keys/service.py @@ -34,7 +34,7 @@ def revoke(aid): :return: """ api_key = get(aid) - setattr(api_key, 'revoked', False) + setattr(api_key, "revoked", False) return database.update(api_key) @@ -80,10 +80,10 @@ def render(args): :return: """ query = database.session_query(ApiKey) - user_id = args.pop('user_id', None) - aid = args.pop('id', None) - has_permission = args.pop('has_permission', False) - requesting_user_id = args.pop('requesting_user_id') + user_id = args.pop("user_id", None) + aid = args.pop("id", None) + has_permission = args.pop("has_permission", False) + requesting_user_id = args.pop("requesting_user_id") if user_id: query = query.filter(ApiKey.user_id == user_id) diff --git a/lemur/api_keys/views.py b/lemur/api_keys/views.py index b7af2944..ee09d3f7 100644 --- a/lemur/api_keys/views.py +++ b/lemur/api_keys/views.py @@ -19,10 +19,16 @@ from lemur.auth.permissions import ApiKeyCreatorPermission from lemur.common.schema import validate_schema from lemur.common.utils import paginated_parser -from lemur.api_keys.schemas import api_key_input_schema, api_key_revoke_schema, api_key_output_schema, \ - api_keys_output_schema, api_key_described_output_schema, user_api_key_input_schema +from lemur.api_keys.schemas import ( + api_key_input_schema, + api_key_revoke_schema, + api_key_output_schema, + api_keys_output_schema, + api_key_described_output_schema, + user_api_key_input_schema, +) -mod = Blueprint('api_keys', __name__) +mod = Blueprint("api_keys", __name__) api = Api(mod) @@ -81,8 +87,8 @@ class ApiKeyList(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['has_permission'] = ApiKeyCreatorPermission().can() - args['requesting_user_id'] = g.current_user.id + args["has_permission"] = ApiKeyCreatorPermission().can() + args["requesting_user_id"] = g.current_user.id return service.render(args) @validate_schema(api_key_input_schema, api_key_output_schema) @@ -124,12 +130,26 @@ class ApiKeyList(AuthenticatedResource): :statuscode 403: unauthenticated """ if not ApiKeyCreatorPermission().can(): - if data['user']['id'] != g.current_user.id: - return dict(message="You are not authorized to create tokens for: {0}".format(data['user']['username'])), 403 + if data["user"]["id"] != g.current_user.id: + return ( + dict( + message="You are not authorized to create tokens for: {0}".format( + data["user"]["username"] + ) + ), + 403, + ) - access_token = service.create(name=data['name'], user_id=data['user']['id'], ttl=data['ttl'], - revoked=False, issued_at=int(datetime.utcnow().timestamp())) - return dict(jwt=create_token(access_token.user_id, access_token.id, access_token.ttl)) + access_token = service.create( + name=data["name"], + user_id=data["user"]["id"], + ttl=data["ttl"], + revoked=False, + issued_at=int(datetime.utcnow().timestamp()), + ) + return dict( + jwt=create_token(access_token.user_id, access_token.id, access_token.ttl) + ) class ApiKeyUserList(AuthenticatedResource): @@ -186,9 +206,9 @@ class ApiKeyUserList(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['has_permission'] = ApiKeyCreatorPermission().can() - args['requesting_user_id'] = g.current_user.id - args['user_id'] = user_id + args["has_permission"] = ApiKeyCreatorPermission().can() + args["requesting_user_id"] = g.current_user.id + args["user_id"] = user_id return service.render(args) @validate_schema(user_api_key_input_schema, api_key_output_schema) @@ -230,11 +250,25 @@ class ApiKeyUserList(AuthenticatedResource): """ if not ApiKeyCreatorPermission().can(): if user_id != g.current_user.id: - return dict(message="You are not authorized to create tokens for: {0}".format(user_id)), 403 + return ( + dict( + message="You are not authorized to create tokens for: {0}".format( + user_id + ) + ), + 403, + ) - access_token = service.create(name=data['name'], user_id=user_id, ttl=data['ttl'], - revoked=False, issued_at=int(datetime.utcnow().timestamp())) - return dict(jwt=create_token(access_token.user_id, access_token.id, access_token.ttl)) + access_token = service.create( + name=data["name"], + user_id=user_id, + ttl=data["ttl"], + revoked=False, + issued_at=int(datetime.utcnow().timestamp()), + ) + return dict( + jwt=create_token(access_token.user_id, access_token.id, access_token.ttl) + ) class ApiKeys(AuthenticatedResource): @@ -329,7 +363,9 @@ class ApiKeys(AuthenticatedResource): if not ApiKeyCreatorPermission().can(): return dict(message="You are not authorized to update this token!"), 403 - service.update(access_key, name=data['name'], revoked=data['revoked'], ttl=data['ttl']) + service.update( + access_key, name=data["name"], revoked=data["revoked"], ttl=data["ttl"] + ) return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl)) def delete(self, aid): @@ -371,7 +407,7 @@ class ApiKeys(AuthenticatedResource): return dict(message="You are not authorized to delete this token!"), 403 service.delete(access_key) - return {'result': True} + return {"result": True} class UserApiKeys(AuthenticatedResource): @@ -472,7 +508,9 @@ class UserApiKeys(AuthenticatedResource): if access_key.user_id != uid: return dict(message="You are not authorized to update this token!"), 403 - service.update(access_key, name=data['name'], revoked=data['revoked'], ttl=data['ttl']) + service.update( + access_key, name=data["name"], revoked=data["revoked"], ttl=data["ttl"] + ) return dict(jwt=create_token(access_key.user_id, access_key.id, access_key.ttl)) def delete(self, uid, aid): @@ -517,7 +555,7 @@ class UserApiKeys(AuthenticatedResource): return dict(message="You are not authorized to delete this token!"), 403 service.delete(access_key) - return {'result': True} + return {"result": True} class ApiKeysDescribed(AuthenticatedResource): @@ -572,8 +610,12 @@ class ApiKeysDescribed(AuthenticatedResource): return access_key -api.add_resource(ApiKeyList, '/keys', endpoint='api_keys') -api.add_resource(ApiKeys, '/keys/', endpoint='api_key') -api.add_resource(ApiKeysDescribed, '/keys//described', endpoint='api_key_described') -api.add_resource(ApiKeyUserList, '/users//keys', endpoint='user_api_keys') -api.add_resource(UserApiKeys, '/users//keys/', endpoint='user_api_key') +api.add_resource(ApiKeyList, "/keys", endpoint="api_keys") +api.add_resource(ApiKeys, "/keys/", endpoint="api_key") +api.add_resource( + ApiKeysDescribed, "/keys//described", endpoint="api_key_described" +) +api.add_resource(ApiKeyUserList, "/users//keys", endpoint="user_api_keys") +api.add_resource( + UserApiKeys, "/users//keys/", endpoint="user_api_key" +) diff --git a/lemur/auth/ldap.py b/lemur/auth/ldap.py index 7eded060..f4ceab03 100644 --- a/lemur/auth/ldap.py +++ b/lemur/auth/ldap.py @@ -14,35 +14,41 @@ from lemur.roles import service as role_service from lemur.common.utils import validate_conf, get_psuedo_random_string -class LdapPrincipal(): +class LdapPrincipal: """ Provides methods for authenticating against an LDAP server. """ + def __init__(self, args): self._ldap_validate_conf() # setup ldap config - if not args['username']: + if not args["username"]: raise Exception("missing ldap username") - if not args['password']: + if not args["password"]: self.error_message = "missing ldap password" raise Exception("missing ldap password") - self.ldap_principal = args['username'] + self.ldap_principal = args["username"] self.ldap_email_domain = current_app.config.get("LDAP_EMAIL_DOMAIN", None) - if '@' not in self.ldap_principal: - self.ldap_principal = '%s@%s' % (self.ldap_principal, self.ldap_email_domain) - self.ldap_username = args['username'] - if '@' in self.ldap_username: - self.ldap_username = args['username'].split("@")[0] - self.ldap_password = args['password'] - self.ldap_server = current_app.config.get('LDAP_BIND_URI', None) + if "@" not in self.ldap_principal: + self.ldap_principal = "%s@%s" % ( + self.ldap_principal, + self.ldap_email_domain, + ) + self.ldap_username = args["username"] + if "@" in self.ldap_username: + self.ldap_username = args["username"].split("@")[0] + self.ldap_password = args["password"] + self.ldap_server = current_app.config.get("LDAP_BIND_URI", None) self.ldap_base_dn = current_app.config.get("LDAP_BASE_DN", None) self.ldap_use_tls = current_app.config.get("LDAP_USE_TLS", False) self.ldap_cacert_file = current_app.config.get("LDAP_CACERT_FILE", None) self.ldap_default_role = current_app.config.get("LEMUR_DEFAULT_ROLE", None) self.ldap_required_group = current_app.config.get("LDAP_REQUIRED_GROUP", None) self.ldap_groups_to_roles = current_app.config.get("LDAP_GROUPS_TO_ROLES", None) - self.ldap_is_active_directory = current_app.config.get("LDAP_IS_ACTIVE_DIRECTORY", False) - self.ldap_attrs = ['memberOf'] + self.ldap_is_active_directory = current_app.config.get( + "LDAP_IS_ACTIVE_DIRECTORY", False + ) + self.ldap_attrs = ["memberOf"] self.ldap_client = None self.ldap_groups = None @@ -60,8 +66,8 @@ class LdapPrincipal(): get_psuedo_random_string(), self.ldap_principal, True, - '', # thumbnailPhotoUrl - list(roles) + "", # thumbnailPhotoUrl + list(roles), ) else: # we add 'lemur' specific roles, so they do not get marked as removed @@ -76,7 +82,7 @@ class LdapPrincipal(): self.ldap_principal, user.active, user.profile_picture, - list(roles) + list(roles), ) return user @@ -105,9 +111,12 @@ class LdapPrincipal(): # update their 'roles' role = role_service.get_by_name(self.ldap_principal) if not role: - description = "auto generated role based on owner: {0}".format(self.ldap_principal) - role = role_service.create(self.ldap_principal, description=description, - third_party=True) + description = "auto generated role based on owner: {0}".format( + self.ldap_principal + ) + role = role_service.create( + self.ldap_principal, description=description, third_party=True + ) if not role.third_party: role = role_service.set_third_party(role.id, third_party_status=True) roles.add(role) @@ -118,9 +127,15 @@ class LdapPrincipal(): role = role_service.get_by_name(role_name) if role: if ldap_group_name in self.ldap_groups: - current_app.logger.debug("assigning role {0} to ldap user {1}".format(self.ldap_principal, role)) + current_app.logger.debug( + "assigning role {0} to ldap user {1}".format( + self.ldap_principal, role + ) + ) if not role.third_party: - role = role_service.set_third_party(role.id, third_party_status=True) + role = role_service.set_third_party( + role.id, third_party_status=True + ) roles.add(role) return roles @@ -132,7 +147,7 @@ class LdapPrincipal(): self._bind() roles = self._authorize() if not roles: - raise Exception('ldap authorization failed') + raise Exception("ldap authorization failed") return self._update_user(roles) def _bind(self): @@ -141,9 +156,12 @@ class LdapPrincipal(): list groups for a user. raise an exception on error. """ - if '@' not in self.ldap_principal: - self.ldap_principal = '%s@%s' % (self.ldap_principal, self.ldap_email_domain) - ldap_filter = 'userPrincipalName=%s' % self.ldap_principal + if "@" not in self.ldap_principal: + self.ldap_principal = "%s@%s" % ( + self.ldap_principal, + self.ldap_email_domain, + ) + ldap_filter = "userPrincipalName=%s" % self.ldap_principal # query ldap for auth try: @@ -159,37 +177,47 @@ class LdapPrincipal(): self.ldap_client.set_option(ldap.OPT_X_TLS_DEMAND, True) self.ldap_client.set_option(ldap.OPT_DEBUG_LEVEL, 255) if self.ldap_cacert_file: - self.ldap_client.set_option(ldap.OPT_X_TLS_CACERTFILE, self.ldap_cacert_file) + self.ldap_client.set_option( + ldap.OPT_X_TLS_CACERTFILE, self.ldap_cacert_file + ) self.ldap_client.simple_bind_s(self.ldap_principal, self.ldap_password) except ldap.INVALID_CREDENTIALS: self.ldap_client.unbind() - raise Exception('The supplied ldap credentials are invalid') + raise Exception("The supplied ldap credentials are invalid") except ldap.SERVER_DOWN: - raise Exception('ldap server unavailable') + raise Exception("ldap server unavailable") except ldap.LDAPError as e: raise Exception("ldap error: {0}".format(e)) if self.ldap_is_active_directory: # Lookup user DN, needed to search for group membership - userdn = self.ldap_client.search_s(self.ldap_base_dn, - ldap.SCOPE_SUBTREE, ldap_filter, - ['distinguishedName'])[0][1]['distinguishedName'][0] - userdn = userdn.decode('utf-8') + userdn = self.ldap_client.search_s( + self.ldap_base_dn, + ldap.SCOPE_SUBTREE, + ldap_filter, + ["distinguishedName"], + )[0][1]["distinguishedName"][0] + userdn = userdn.decode("utf-8") # Search all groups that have the userDN as a member - groupfilter = '(&(objectclass=group)(member:1.2.840.113556.1.4.1941:={0}))'.format(userdn) - lgroups = self.ldap_client.search_s(self.ldap_base_dn, ldap.SCOPE_SUBTREE, groupfilter, ['cn']) + groupfilter = "(&(objectclass=group)(member:1.2.840.113556.1.4.1941:={0}))".format( + userdn + ) + lgroups = self.ldap_client.search_s( + self.ldap_base_dn, ldap.SCOPE_SUBTREE, groupfilter, ["cn"] + ) # Create a list of group CN's from the result self.ldap_groups = [] for group in lgroups: (dn, values) = group - self.ldap_groups.append(values['cn'][0].decode('ascii')) + self.ldap_groups.append(values["cn"][0].decode("ascii")) else: - lgroups = self.ldap_client.search_s(self.ldap_base_dn, - ldap.SCOPE_SUBTREE, ldap_filter, self.ldap_attrs)[0][1]['memberOf'] + lgroups = self.ldap_client.search_s( + self.ldap_base_dn, ldap.SCOPE_SUBTREE, ldap_filter, self.ldap_attrs + )[0][1]["memberOf"] # lgroups is a list of utf-8 encoded strings # convert to a single string of groups to allow matching - self.ldap_groups = b''.join(lgroups).decode('ascii') + self.ldap_groups = b"".join(lgroups).decode("ascii") self.ldap_client.unbind() @@ -197,9 +225,5 @@ class LdapPrincipal(): """ Confirms required ldap config settings exist. """ - required_vars = [ - 'LDAP_BIND_URI', - 'LDAP_BASE_DN', - 'LDAP_EMAIL_DOMAIN', - ] + required_vars = ["LDAP_BIND_URI", "LDAP_BASE_DN", "LDAP_EMAIL_DOMAIN"] validate_conf(current_app, required_vars) diff --git a/lemur/auth/permissions.py b/lemur/auth/permissions.py index 68c48773..c3c57356 100644 --- a/lemur/auth/permissions.py +++ b/lemur/auth/permissions.py @@ -12,21 +12,21 @@ from collections import namedtuple from flask_principal import Permission, RoleNeed # Permissions -operator_permission = Permission(RoleNeed('operator')) -admin_permission = Permission(RoleNeed('admin')) +operator_permission = Permission(RoleNeed("operator")) +admin_permission = Permission(RoleNeed("admin")) -CertificateOwner = namedtuple('certificate', ['method', 'value']) -CertificateOwnerNeed = partial(CertificateOwner, 'role') +CertificateOwner = namedtuple("certificate", ["method", "value"]) +CertificateOwnerNeed = partial(CertificateOwner, "role") class SensitiveDomainPermission(Permission): def __init__(self): - super(SensitiveDomainPermission, self).__init__(RoleNeed('admin')) + super(SensitiveDomainPermission, self).__init__(RoleNeed("admin")) class CertificatePermission(Permission): def __init__(self, owner, roles): - needs = [RoleNeed('admin'), RoleNeed(owner), RoleNeed('creator')] + needs = [RoleNeed("admin"), RoleNeed(owner), RoleNeed("creator")] for r in roles: needs.append(CertificateOwnerNeed(str(r))) # Backwards compatibility with mixed-case role names @@ -38,29 +38,29 @@ class CertificatePermission(Permission): class ApiKeyCreatorPermission(Permission): def __init__(self): - super(ApiKeyCreatorPermission, self).__init__(RoleNeed('admin')) + super(ApiKeyCreatorPermission, self).__init__(RoleNeed("admin")) -RoleMember = namedtuple('role', ['method', 'value']) -RoleMemberNeed = partial(RoleMember, 'member') +RoleMember = namedtuple("role", ["method", "value"]) +RoleMemberNeed = partial(RoleMember, "member") class RoleMemberPermission(Permission): def __init__(self, role_id): - needs = [RoleNeed('admin'), RoleMemberNeed(role_id)] + needs = [RoleNeed("admin"), RoleMemberNeed(role_id)] super(RoleMemberPermission, self).__init__(*needs) -AuthorityCreator = namedtuple('authority', ['method', 'value']) -AuthorityCreatorNeed = partial(AuthorityCreator, 'authorityUse') +AuthorityCreator = namedtuple("authority", ["method", "value"]) +AuthorityCreatorNeed = partial(AuthorityCreator, "authorityUse") -AuthorityOwner = namedtuple('authority', ['method', 'value']) -AuthorityOwnerNeed = partial(AuthorityOwner, 'role') +AuthorityOwner = namedtuple("authority", ["method", "value"]) +AuthorityOwnerNeed = partial(AuthorityOwner, "role") class AuthorityPermission(Permission): def __init__(self, authority_id, roles): - needs = [RoleNeed('admin'), AuthorityCreatorNeed(str(authority_id))] + needs = [RoleNeed("admin"), AuthorityCreatorNeed(str(authority_id))] for r in roles: needs.append(AuthorityOwnerNeed(str(r))) diff --git a/lemur/auth/service.py b/lemur/auth/service.py index c862aa2e..0e1521b3 100644 --- a/lemur/auth/service.py +++ b/lemur/auth/service.py @@ -39,13 +39,13 @@ def get_rsa_public_key(n, e): :param e: :return: a RSA Public Key in PEM format """ - n = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(n, 'utf-8'))), 16) - e = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(e, 'utf-8'))), 16) + n = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(n, "utf-8"))), 16) + e = int(binascii.hexlify(jwt.utils.base64url_decode(bytes(e, "utf-8"))), 16) pub = RSAPublicNumbers(e, n).public_key(default_backend()) return pub.public_bytes( encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo + format=serialization.PublicFormat.SubjectPublicKeyInfo, ) @@ -57,28 +57,27 @@ def create_token(user, aid=None, ttl=None): :param user: :return: """ - expiration_delta = timedelta(days=int(current_app.config.get('LEMUR_TOKEN_EXPIRATION', 1))) - payload = { - 'iat': datetime.utcnow(), - 'exp': datetime.utcnow() + expiration_delta - } + expiration_delta = timedelta( + days=int(current_app.config.get("LEMUR_TOKEN_EXPIRATION", 1)) + ) + payload = {"iat": datetime.utcnow(), "exp": datetime.utcnow() + expiration_delta} # Handle Just a User ID & User Object. if isinstance(user, int): - payload['sub'] = user + payload["sub"] = user else: - payload['sub'] = user.id + payload["sub"] = user.id if aid is not None: - payload['aid'] = aid + payload["aid"] = aid # Custom TTLs are only supported on Access Keys. if ttl is not None and aid is not None: # Tokens that are forever until revoked. if ttl == -1: - del payload['exp'] + del payload["exp"] else: - payload['exp'] = ttl - token = jwt.encode(payload, current_app.config['LEMUR_TOKEN_SECRET']) - return token.decode('unicode_escape') + payload["exp"] = ttl + token = jwt.encode(payload, current_app.config["LEMUR_TOKEN_SECRET"]) + return token.decode("unicode_escape") def login_required(f): @@ -88,49 +87,54 @@ def login_required(f): :param f: :return: """ + @wraps(f) def decorated_function(*args, **kwargs): - if not request.headers.get('Authorization'): - response = jsonify(message='Missing authorization header') + if not request.headers.get("Authorization"): + response = jsonify(message="Missing authorization header") response.status_code = 401 return response try: - token = request.headers.get('Authorization').split()[1] + token = request.headers.get("Authorization").split()[1] except Exception as e: - return dict(message='Token is invalid'), 403 + return dict(message="Token is invalid"), 403 try: - payload = jwt.decode(token, current_app.config['LEMUR_TOKEN_SECRET']) + payload = jwt.decode(token, current_app.config["LEMUR_TOKEN_SECRET"]) except jwt.DecodeError: - return dict(message='Token is invalid'), 403 + return dict(message="Token is invalid"), 403 except jwt.ExpiredSignatureError: - return dict(message='Token has expired'), 403 + return dict(message="Token has expired"), 403 except jwt.InvalidTokenError: - return dict(message='Token is invalid'), 403 + return dict(message="Token is invalid"), 403 - if 'aid' in payload: - access_key = api_key_service.get(payload['aid']) + if "aid" in payload: + access_key = api_key_service.get(payload["aid"]) if access_key.revoked: - return dict(message='Token has been revoked'), 403 + return dict(message="Token has been revoked"), 403 if access_key.ttl != -1: current_time = datetime.utcnow() - expired_time = datetime.fromtimestamp(access_key.issued_at + access_key.ttl) + expired_time = datetime.fromtimestamp( + access_key.issued_at + access_key.ttl + ) if current_time >= expired_time: - return dict(message='Token has expired'), 403 + return dict(message="Token has expired"), 403 - user = user_service.get(payload['sub']) + user = user_service.get(payload["sub"]) if not user.active: - return dict(message='User is not currently active'), 403 + return dict(message="User is not currently active"), 403 g.current_user = user if not g.current_user: - return dict(message='You are not logged in'), 403 + return dict(message="You are not logged in"), 403 # Tell Flask-Principal the identity changed - identity_changed.send(current_app._get_current_object(), identity=Identity(g.current_user.id)) + identity_changed.send( + current_app._get_current_object(), identity=Identity(g.current_user.id) + ) return f(*args, **kwargs) @@ -144,18 +148,18 @@ def fetch_token_header(token): :param token: :return: :raise jwt.DecodeError: """ - token = token.encode('utf-8') + token = token.encode("utf-8") try: - signing_input, crypto_segment = token.rsplit(b'.', 1) - header_segment, payload_segment = signing_input.split(b'.', 1) + signing_input, crypto_segment = token.rsplit(b".", 1) + header_segment, payload_segment = signing_input.split(b".", 1) except ValueError: - raise jwt.DecodeError('Not enough segments') + raise jwt.DecodeError("Not enough segments") try: - return json.loads(jwt.utils.base64url_decode(header_segment).decode('utf-8')) + return json.loads(jwt.utils.base64url_decode(header_segment).decode("utf-8")) except TypeError as e: current_app.logger.exception(e) - raise jwt.DecodeError('Invalid header padding') + raise jwt.DecodeError("Invalid header padding") @identity_loaded.connect @@ -174,13 +178,13 @@ def on_identity_loaded(sender, identity): identity.provides.add(UserNeed(identity.id)) # identity with the roles that the user provides - if hasattr(user, 'roles'): + if hasattr(user, "roles"): for role in user.roles: identity.provides.add(RoleNeed(role.name)) identity.provides.add(RoleMemberNeed(role.id)) # apply ownership for authorities - if hasattr(user, 'authorities'): + if hasattr(user, "authorities"): for authority in user.authorities: identity.provides.add(AuthorityCreatorNeed(authority.id)) @@ -191,6 +195,7 @@ class AuthenticatedResource(Resource): """ Inherited by all resources that need to be protected by authentication. """ + method_decorators = [login_required] def __init__(self): diff --git a/lemur/auth/views.py b/lemur/auth/views.py index 0c319b5b..e7f87356 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -24,11 +24,13 @@ from lemur.auth.service import create_token, fetch_token_header, get_rsa_public_ from lemur.auth import ldap -mod = Blueprint('auth', __name__) +mod = Blueprint("auth", __name__) api = Api(mod) -def exchange_for_access_token(code, redirect_uri, client_id, secret, access_token_url=None, verify_cert=True): +def exchange_for_access_token( + code, redirect_uri, client_id, secret, access_token_url=None, verify_cert=True +): """ Exchanges authorization code for access token. @@ -43,28 +45,32 @@ def exchange_for_access_token(code, redirect_uri, client_id, secret, access_toke """ # take the information we have received from the provider to create a new request params = { - 'grant_type': 'authorization_code', - 'scope': 'openid email profile address', - 'code': code, - 'redirect_uri': redirect_uri, - 'client_id': client_id + "grant_type": "authorization_code", + "scope": "openid email profile address", + "code": code, + "redirect_uri": redirect_uri, + "client_id": client_id, } # the secret and cliendId will be given to you when you signup for the provider - token = '{0}:{1}'.format(client_id, secret) + token = "{0}:{1}".format(client_id, secret) - basic = base64.b64encode(bytes(token, 'utf-8')) + basic = base64.b64encode(bytes(token, "utf-8")) headers = { - 'Content-Type': 'application/x-www-form-urlencoded', - 'authorization': 'basic {0}'.format(basic.decode('utf-8')) + "Content-Type": "application/x-www-form-urlencoded", + "authorization": "basic {0}".format(basic.decode("utf-8")), } # exchange authorization code for access token. - r = requests.post(access_token_url, headers=headers, params=params, verify=verify_cert) + r = requests.post( + access_token_url, headers=headers, params=params, verify=verify_cert + ) if r.status_code == 400: - r = requests.post(access_token_url, headers=headers, data=params, verify=verify_cert) - id_token = r.json()['id_token'] - access_token = r.json()['access_token'] + r = requests.post( + access_token_url, headers=headers, data=params, verify=verify_cert + ) + id_token = r.json()["id_token"] + access_token = r.json()["access_token"] return id_token, access_token @@ -83,23 +89,25 @@ def validate_id_token(id_token, client_id, jwks_url): # retrieve the key material as specified by the token header r = requests.get(jwks_url) - for key in r.json()['keys']: - if key['kid'] == header_data['kid']: - secret = get_rsa_public_key(key['n'], key['e']) - algo = header_data['alg'] + for key in r.json()["keys"]: + if key["kid"] == header_data["kid"]: + secret = get_rsa_public_key(key["n"], key["e"]) + algo = header_data["alg"] break else: - return dict(message='Key not found'), 401 + return dict(message="Key not found"), 401 # validate your token based on the key it was signed with try: - jwt.decode(id_token, secret.decode('utf-8'), algorithms=[algo], audience=client_id) + jwt.decode( + id_token, secret.decode("utf-8"), algorithms=[algo], audience=client_id + ) except jwt.DecodeError: - return dict(message='Token is invalid'), 401 + return dict(message="Token is invalid"), 401 except jwt.ExpiredSignatureError: - return dict(message='Token has expired'), 401 + return dict(message="Token has expired"), 401 except jwt.InvalidTokenError: - return dict(message='Token is invalid'), 401 + return dict(message="Token is invalid"), 401 def retrieve_user(user_api_url, access_token): @@ -110,22 +118,18 @@ def retrieve_user(user_api_url, access_token): :param access_token: :return: """ - user_params = dict(access_token=access_token, schema='profile') + user_params = dict(access_token=access_token, schema="profile") headers = {} - if current_app.config.get('PING_INCLUDE_BEARER_TOKEN'): - headers = {'Authorization': f'Bearer {access_token}'} + if current_app.config.get("PING_INCLUDE_BEARER_TOKEN"): + headers = {"Authorization": f"Bearer {access_token}"} # retrieve information about the current user. - r = requests.get( - user_api_url, - params=user_params, - headers=headers, - ) + r = requests.get(user_api_url, params=user_params, headers=headers) profile = r.json() - user = user_service.get_by_email(profile['email']) + user = user_service.get_by_email(profile["email"]) return user, profile @@ -138,31 +142,44 @@ def create_user_roles(profile): roles = [] # update their google 'roles' - if 'googleGroups' in profile: - for group in profile['googleGroups']: + if "googleGroups" in profile: + for group in profile["googleGroups"]: role = role_service.get_by_name(group) if not role: - role = role_service.create(group, description='This is a google group based role created by Lemur', third_party=True) + role = role_service.create( + group, + description="This is a google group based role created by Lemur", + third_party=True, + ) if not role.third_party: role = role_service.set_third_party(role.id, third_party_status=True) roles.append(role) else: - current_app.logger.warning("'googleGroups' not sent by identity provider, no specific roles will assigned to the user.") + current_app.logger.warning( + "'googleGroups' not sent by identity provider, no specific roles will assigned to the user." + ) - role = role_service.get_by_name(profile['email']) + role = role_service.get_by_name(profile["email"]) if not role: - role = role_service.create(profile['email'], description='This is a user specific role', third_party=True) + role = role_service.create( + profile["email"], + description="This is a user specific role", + third_party=True, + ) if not role.third_party: role = role_service.set_third_party(role.id, third_party_status=True) roles.append(role) # every user is an operator (tied to a default role) - if current_app.config.get('LEMUR_DEFAULT_ROLE'): - default = role_service.get_by_name(current_app.config['LEMUR_DEFAULT_ROLE']) + if current_app.config.get("LEMUR_DEFAULT_ROLE"): + default = role_service.get_by_name(current_app.config["LEMUR_DEFAULT_ROLE"]) if not default: - default = role_service.create(current_app.config['LEMUR_DEFAULT_ROLE'], description='This is the default Lemur role.') + default = role_service.create( + current_app.config["LEMUR_DEFAULT_ROLE"], + description="This is the default Lemur role.", + ) if not default.third_party: role_service.set_third_party(default.id, third_party_status=True) roles.append(default) @@ -181,12 +198,12 @@ def update_user(user, profile, roles): # if we get an sso user create them an account if not user: user = user_service.create( - profile['email'], + profile["email"], get_psuedo_random_string(), - profile['email'], + profile["email"], True, - profile.get('thumbnailPhotoUrl'), - roles + profile.get("thumbnailPhotoUrl"), + roles, ) else: @@ -198,11 +215,11 @@ def update_user(user, profile, roles): # update any changes to the user user_service.update( user.id, - profile['email'], - profile['email'], + profile["email"], + profile["email"], True, - profile.get('thumbnailPhotoUrl'), # profile isn't google+ enabled - roles + profile.get("thumbnailPhotoUrl"), # profile isn't google+ enabled + roles, ) @@ -223,6 +240,7 @@ class Login(Resource): on your uses cases but. It is important to not that there is currently no build in method to revoke a users token \ and force re-authentication. """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(Login, self).__init__() @@ -263,23 +281,26 @@ class Login(Resource): :statuscode 401: invalid credentials :statuscode 200: no error """ - self.reqparse.add_argument('username', type=str, required=True, location='json') - self.reqparse.add_argument('password', type=str, required=True, location='json') + self.reqparse.add_argument("username", type=str, required=True, location="json") + self.reqparse.add_argument("password", type=str, required=True, location="json") args = self.reqparse.parse_args() - if '@' in args['username']: - user = user_service.get_by_email(args['username']) + if "@" in args["username"]: + user = user_service.get_by_email(args["username"]) else: - user = user_service.get_by_username(args['username']) + user = user_service.get_by_username(args["username"]) # default to local authentication - if user and user.check_password(args['password']) and user.active: + if user and user.check_password(args["password"]) and user.active: # Tell Flask-Principal the identity changed - identity_changed.send(current_app._get_current_object(), - identity=Identity(user.id)) + identity_changed.send( + current_app._get_current_object(), identity=Identity(user.id) + ) - metrics.send('login', 'counter', 1, metric_tags={'status': SUCCESS_METRIC_STATUS}) + metrics.send( + "login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS} + ) return dict(token=create_token(user)) # try ldap login @@ -289,19 +310,29 @@ class Login(Resource): user = ldap_principal.authenticate() if user and user.active: # Tell Flask-Principal the identity changed - identity_changed.send(current_app._get_current_object(), - identity=Identity(user.id)) - metrics.send('login', 'counter', 1, metric_tags={'status': SUCCESS_METRIC_STATUS}) + identity_changed.send( + current_app._get_current_object(), identity=Identity(user.id) + ) + metrics.send( + "login", + "counter", + 1, + metric_tags={"status": SUCCESS_METRIC_STATUS}, + ) return dict(token=create_token(user)) except Exception as e: - current_app.logger.error("ldap error: {0}".format(e)) - ldap_message = 'ldap error: %s' % e - metrics.send('login', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) - return dict(message=ldap_message), 403 + current_app.logger.error("ldap error: {0}".format(e)) + ldap_message = "ldap error: %s" % e + metrics.send( + "login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS} + ) + return dict(message=ldap_message), 403 # if not valid user - no certificates for you - metrics.send('login', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) - return dict(message='The supplied credentials are invalid'), 403 + metrics.send( + "login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS} + ) + return dict(message="The supplied credentials are invalid"), 403 class Ping(Resource): @@ -314,36 +345,39 @@ class Ping(Resource): provider uses for its callbacks. 2. Add or change the Lemur AngularJS Configuration to point to your new provider """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(Ping, self).__init__() def get(self): - return 'Redirecting...' + return "Redirecting..." def post(self): - self.reqparse.add_argument('clientId', type=str, required=True, location='json') - self.reqparse.add_argument('redirectUri', type=str, required=True, location='json') - self.reqparse.add_argument('code', type=str, required=True, location='json') + self.reqparse.add_argument("clientId", type=str, required=True, location="json") + self.reqparse.add_argument( + "redirectUri", type=str, required=True, location="json" + ) + self.reqparse.add_argument("code", type=str, required=True, location="json") args = self.reqparse.parse_args() # you can either discover these dynamically or simply configure them - access_token_url = current_app.config.get('PING_ACCESS_TOKEN_URL') - user_api_url = current_app.config.get('PING_USER_API_URL') + access_token_url = current_app.config.get("PING_ACCESS_TOKEN_URL") + user_api_url = current_app.config.get("PING_USER_API_URL") - secret = current_app.config.get('PING_SECRET') + secret = current_app.config.get("PING_SECRET") id_token, access_token = exchange_for_access_token( - args['code'], - args['redirectUri'], - args['clientId'], + args["code"], + args["redirectUri"], + args["clientId"], secret, - access_token_url=access_token_url + access_token_url=access_token_url, ) - jwks_url = current_app.config.get('PING_JWKS_URL') - error_code = validate_id_token(id_token, args['clientId'], jwks_url) + jwks_url = current_app.config.get("PING_JWKS_URL") + error_code = validate_id_token(id_token, args["clientId"], jwks_url) if error_code: return error_code user, profile = retrieve_user(user_api_url, access_token) @@ -351,13 +385,19 @@ class Ping(Resource): update_user(user, profile, roles) if not user or not user.active: - metrics.send('login', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) - return dict(message='The supplied credentials are invalid'), 403 + metrics.send( + "login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS} + ) + return dict(message="The supplied credentials are invalid"), 403 # Tell Flask-Principal the identity changed - identity_changed.send(current_app._get_current_object(), identity=Identity(user.id)) + identity_changed.send( + current_app._get_current_object(), identity=Identity(user.id) + ) - metrics.send('login', 'counter', 1, metric_tags={'status': SUCCESS_METRIC_STATUS}) + metrics.send( + "login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS} + ) return dict(token=create_token(user)) @@ -367,33 +407,35 @@ class OAuth2(Resource): super(OAuth2, self).__init__() def get(self): - return 'Redirecting...' + return "Redirecting..." def post(self): - self.reqparse.add_argument('clientId', type=str, required=True, location='json') - self.reqparse.add_argument('redirectUri', type=str, required=True, location='json') - self.reqparse.add_argument('code', type=str, required=True, location='json') + self.reqparse.add_argument("clientId", type=str, required=True, location="json") + self.reqparse.add_argument( + "redirectUri", type=str, required=True, location="json" + ) + self.reqparse.add_argument("code", type=str, required=True, location="json") args = self.reqparse.parse_args() # you can either discover these dynamically or simply configure them - access_token_url = current_app.config.get('OAUTH2_ACCESS_TOKEN_URL') - user_api_url = current_app.config.get('OAUTH2_USER_API_URL') - verify_cert = current_app.config.get('OAUTH2_VERIFY_CERT') + access_token_url = current_app.config.get("OAUTH2_ACCESS_TOKEN_URL") + user_api_url = current_app.config.get("OAUTH2_USER_API_URL") + verify_cert = current_app.config.get("OAUTH2_VERIFY_CERT") - secret = current_app.config.get('OAUTH2_SECRET') + secret = current_app.config.get("OAUTH2_SECRET") id_token, access_token = exchange_for_access_token( - args['code'], - args['redirectUri'], - args['clientId'], + args["code"], + args["redirectUri"], + args["clientId"], secret, access_token_url=access_token_url, - verify_cert=verify_cert + verify_cert=verify_cert, ) - jwks_url = current_app.config.get('PING_JWKS_URL') - error_code = validate_id_token(id_token, args['clientId'], jwks_url) + jwks_url = current_app.config.get("PING_JWKS_URL") + error_code = validate_id_token(id_token, args["clientId"], jwks_url) if error_code: return error_code @@ -402,13 +444,19 @@ class OAuth2(Resource): update_user(user, profile, roles) if not user.active: - metrics.send('login', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) - return dict(message='The supplied credentials are invalid'), 403 + metrics.send( + "login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS} + ) + return dict(message="The supplied credentials are invalid"), 403 # Tell Flask-Principal the identity changed - identity_changed.send(current_app._get_current_object(), identity=Identity(user.id)) + identity_changed.send( + current_app._get_current_object(), identity=Identity(user.id) + ) - metrics.send('login', 'counter', 1, metric_tags={'status': SUCCESS_METRIC_STATUS}) + metrics.send( + "login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS} + ) return dict(token=create_token(user)) @@ -419,44 +467,52 @@ class Google(Resource): super(Google, self).__init__() def post(self): - access_token_url = 'https://accounts.google.com/o/oauth2/token' - people_api_url = 'https://www.googleapis.com/plus/v1/people/me/openIdConnect' + access_token_url = "https://accounts.google.com/o/oauth2/token" + people_api_url = "https://www.googleapis.com/plus/v1/people/me/openIdConnect" - self.reqparse.add_argument('clientId', type=str, required=True, location='json') - self.reqparse.add_argument('redirectUri', type=str, required=True, location='json') - self.reqparse.add_argument('code', type=str, required=True, location='json') + self.reqparse.add_argument("clientId", type=str, required=True, location="json") + self.reqparse.add_argument( + "redirectUri", type=str, required=True, location="json" + ) + self.reqparse.add_argument("code", type=str, required=True, location="json") args = self.reqparse.parse_args() # Step 1. Exchange authorization code for access token payload = { - 'client_id': args['clientId'], - 'grant_type': 'authorization_code', - 'redirect_uri': args['redirectUri'], - 'code': args['code'], - 'client_secret': current_app.config.get('GOOGLE_SECRET') + "client_id": args["clientId"], + "grant_type": "authorization_code", + "redirect_uri": args["redirectUri"], + "code": args["code"], + "client_secret": current_app.config.get("GOOGLE_SECRET"), } r = requests.post(access_token_url, data=payload) token = r.json() # Step 2. Retrieve information about the current user - headers = {'Authorization': 'Bearer {0}'.format(token['access_token'])} + headers = {"Authorization": "Bearer {0}".format(token["access_token"])} r = requests.get(people_api_url, headers=headers) profile = r.json() - user = user_service.get_by_email(profile['email']) + user = user_service.get_by_email(profile["email"]) if not (user and user.active): - metrics.send('login', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) - return dict(message='The supplied credentials are invalid.'), 403 + metrics.send( + "login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS} + ) + return dict(message="The supplied credentials are invalid."), 403 if user: - metrics.send('login', 'counter', 1, metric_tags={'status': SUCCESS_METRIC_STATUS}) + metrics.send( + "login", "counter", 1, metric_tags={"status": SUCCESS_METRIC_STATUS} + ) return dict(token=create_token(user)) - metrics.send('login', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) + metrics.send( + "login", "counter", 1, metric_tags={"status": FAILURE_METRIC_STATUS} + ) class Providers(Resource): @@ -467,47 +523,57 @@ class Providers(Resource): provider = provider.lower() if provider == "google": - active_providers.append({ - 'name': 'google', - 'clientId': current_app.config.get("GOOGLE_CLIENT_ID"), - 'url': api.url_for(Google) - }) + active_providers.append( + { + "name": "google", + "clientId": current_app.config.get("GOOGLE_CLIENT_ID"), + "url": api.url_for(Google), + } + ) elif provider == "ping": - active_providers.append({ - 'name': current_app.config.get("PING_NAME"), - 'url': current_app.config.get('PING_REDIRECT_URI'), - 'redirectUri': current_app.config.get("PING_REDIRECT_URI"), - 'clientId': current_app.config.get("PING_CLIENT_ID"), - 'responseType': 'code', - 'scope': ['openid', 'email', 'profile', 'address'], - 'scopeDelimiter': ' ', - 'authorizationEndpoint': current_app.config.get("PING_AUTH_ENDPOINT"), - 'requiredUrlParams': ['scope'], - 'type': '2.0' - }) + active_providers.append( + { + "name": current_app.config.get("PING_NAME"), + "url": current_app.config.get("PING_REDIRECT_URI"), + "redirectUri": current_app.config.get("PING_REDIRECT_URI"), + "clientId": current_app.config.get("PING_CLIENT_ID"), + "responseType": "code", + "scope": ["openid", "email", "profile", "address"], + "scopeDelimiter": " ", + "authorizationEndpoint": current_app.config.get( + "PING_AUTH_ENDPOINT" + ), + "requiredUrlParams": ["scope"], + "type": "2.0", + } + ) elif provider == "oauth2": - active_providers.append({ - 'name': current_app.config.get("OAUTH2_NAME"), - 'url': current_app.config.get('OAUTH2_REDIRECT_URI'), - 'redirectUri': current_app.config.get("OAUTH2_REDIRECT_URI"), - 'clientId': current_app.config.get("OAUTH2_CLIENT_ID"), - 'responseType': 'code', - 'scope': ['openid', 'email', 'profile', 'groups'], - 'scopeDelimiter': ' ', - 'authorizationEndpoint': current_app.config.get("OAUTH2_AUTH_ENDPOINT"), - 'requiredUrlParams': ['scope', 'state', 'nonce'], - 'state': 'STATE', - 'nonce': get_psuedo_random_string(), - 'type': '2.0' - }) + active_providers.append( + { + "name": current_app.config.get("OAUTH2_NAME"), + "url": current_app.config.get("OAUTH2_REDIRECT_URI"), + "redirectUri": current_app.config.get("OAUTH2_REDIRECT_URI"), + "clientId": current_app.config.get("OAUTH2_CLIENT_ID"), + "responseType": "code", + "scope": ["openid", "email", "profile", "groups"], + "scopeDelimiter": " ", + "authorizationEndpoint": current_app.config.get( + "OAUTH2_AUTH_ENDPOINT" + ), + "requiredUrlParams": ["scope", "state", "nonce"], + "state": "STATE", + "nonce": get_psuedo_random_string(), + "type": "2.0", + } + ) return active_providers -api.add_resource(Login, '/auth/login', endpoint='login') -api.add_resource(Ping, '/auth/ping', endpoint='ping') -api.add_resource(Google, '/auth/google', endpoint='google') -api.add_resource(OAuth2, '/auth/oauth2', endpoint='oauth2') -api.add_resource(Providers, '/auth/providers', endpoint='providers') +api.add_resource(Login, "/auth/login", endpoint="login") +api.add_resource(Ping, "/auth/ping", endpoint="ping") +api.add_resource(Google, "/auth/google", endpoint="google") +api.add_resource(OAuth2, "/auth/oauth2", endpoint="oauth2") +api.add_resource(Providers, "/auth/providers", endpoint="providers") diff --git a/lemur/authorities/models.py b/lemur/authorities/models.py index 6c5f790b..ccd1fab8 100644 --- a/lemur/authorities/models.py +++ b/lemur/authorities/models.py @@ -7,7 +7,17 @@ .. moduleauthor:: Kevin Glisson """ from sqlalchemy.orm import relationship -from sqlalchemy import Column, Integer, String, Text, func, ForeignKey, DateTime, PassiveDefault, Boolean +from sqlalchemy import ( + Column, + Integer, + String, + Text, + func, + ForeignKey, + DateTime, + PassiveDefault, + Boolean, +) from sqlalchemy.dialects.postgresql import JSON from lemur.database import db @@ -16,7 +26,7 @@ from lemur.models import roles_authorities class Authority(db.Model): - __tablename__ = 'authorities' + __tablename__ = "authorities" id = Column(Integer, primary_key=True) owner = Column(String(128), nullable=False) name = Column(String(128), unique=True) @@ -27,22 +37,44 @@ class Authority(db.Model): description = Column(Text) options = Column(JSON) date_created = Column(DateTime, PassiveDefault(func.now()), nullable=False) - roles = relationship('Role', secondary=roles_authorities, passive_deletes=True, backref=db.backref('authority'), lazy='dynamic') - user_id = Column(Integer, ForeignKey('users.id')) - authority_certificate = relationship("Certificate", backref='root_authority', uselist=False, foreign_keys='Certificate.root_authority_id') - certificates = relationship("Certificate", backref='authority', foreign_keys='Certificate.authority_id') + roles = relationship( + "Role", + secondary=roles_authorities, + passive_deletes=True, + backref=db.backref("authority"), + lazy="dynamic", + ) + user_id = Column(Integer, ForeignKey("users.id")) + authority_certificate = relationship( + "Certificate", + backref="root_authority", + uselist=False, + foreign_keys="Certificate.root_authority_id", + ) + certificates = relationship( + "Certificate", backref="authority", foreign_keys="Certificate.authority_id" + ) - authority_pending_certificate = relationship("PendingCertificate", backref='root_authority', uselist=False, foreign_keys='PendingCertificate.root_authority_id') - pending_certificates = relationship('PendingCertificate', backref='authority', foreign_keys='PendingCertificate.authority_id') + authority_pending_certificate = relationship( + "PendingCertificate", + backref="root_authority", + uselist=False, + foreign_keys="PendingCertificate.root_authority_id", + ) + pending_certificates = relationship( + "PendingCertificate", + backref="authority", + foreign_keys="PendingCertificate.authority_id", + ) def __init__(self, **kwargs): - self.owner = kwargs['owner'] - self.roles = kwargs.get('roles', []) - self.name = kwargs.get('name') - self.description = kwargs.get('description') - self.authority_certificate = kwargs['authority_certificate'] - self.plugin_name = kwargs['plugin']['slug'] - self.options = kwargs.get('options') + self.owner = kwargs["owner"] + self.roles = kwargs.get("roles", []) + self.name = kwargs.get("name") + self.description = kwargs.get("description") + self.authority_certificate = kwargs["authority_certificate"] + self.plugin_name = kwargs["plugin"]["slug"] + self.options = kwargs.get("options") @property def plugin(self): diff --git a/lemur/authorities/schemas.py b/lemur/authorities/schemas.py index d1f0adfc..c78aec94 100644 --- a/lemur/authorities/schemas.py +++ b/lemur/authorities/schemas.py @@ -11,7 +11,13 @@ from marshmallow import fields, validates_schema, pre_load from marshmallow import validate from marshmallow.exceptions import ValidationError -from lemur.schemas import PluginInputSchema, PluginOutputSchema, ExtensionSchema, AssociatedAuthoritySchema, AssociatedRoleSchema +from lemur.schemas import ( + PluginInputSchema, + PluginOutputSchema, + ExtensionSchema, + AssociatedAuthoritySchema, + AssociatedRoleSchema, +) from lemur.users.schemas import UserNestedOutputSchema from lemur.common.schema import LemurInputSchema, LemurOutputSchema from lemur.common import validators, missing @@ -30,21 +36,36 @@ class AuthorityInputSchema(LemurInputSchema): validity_years = fields.Integer() # certificate body fields - organizational_unit = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT')) - organization = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_ORGANIZATION')) - location = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_LOCATION')) - country = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_COUNTRY')) - state = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_STATE')) + organizational_unit = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATIONAL_UNIT") + ) + organization = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATION") + ) + location = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_LOCATION") + ) + country = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_COUNTRY") + ) + state = fields.String(missing=lambda: current_app.config.get("LEMUR_DEFAULT_STATE")) plugin = fields.Nested(PluginInputSchema) # signing related options - type = fields.String(validate=validate.OneOf(['root', 'subca']), missing='root') + type = fields.String(validate=validate.OneOf(["root", "subca"]), missing="root") parent = fields.Nested(AssociatedAuthoritySchema) - signing_algorithm = fields.String(validate=validate.OneOf(['sha256WithRSA', 'sha1WithRSA']), missing='sha256WithRSA') - key_type = fields.String(validate=validate.OneOf(['RSA2048', 'RSA4096']), missing='RSA2048') + signing_algorithm = fields.String( + validate=validate.OneOf(["sha256WithRSA", "sha1WithRSA"]), + missing="sha256WithRSA", + ) + key_type = fields.String( + validate=validate.OneOf(["RSA2048", "RSA4096"]), missing="RSA2048" + ) key_name = fields.String() - sensitivity = fields.String(validate=validate.OneOf(['medium', 'high']), missing='medium') + sensitivity = fields.String( + validate=validate.OneOf(["medium", "high"]), missing="medium" + ) serial_number = fields.Integer() first_serial = fields.Integer(missing=1) @@ -58,9 +79,11 @@ class AuthorityInputSchema(LemurInputSchema): @validates_schema def validate_subca(self, data): - if data['type'] == 'subca': - if not data.get('parent'): - raise ValidationError("If generating a subca, parent 'authority' must be specified.") + if data["type"] == "subca": + if not data.get("parent"): + raise ValidationError( + "If generating a subca, parent 'authority' must be specified." + ) @pre_load def ensure_dates(self, data): diff --git a/lemur/authorities/service.py b/lemur/authorities/service.py index 41c381e3..c70c6fc5 100644 --- a/lemur/authorities/service.py +++ b/lemur/authorities/service.py @@ -43,7 +43,7 @@ def mint(**kwargs): """ Creates the authority based on the plugin provided. """ - issuer = kwargs['plugin']['plugin_object'] + issuer = kwargs["plugin"]["plugin_object"] values = issuer.create_authority(kwargs) # support older plugins @@ -53,7 +53,12 @@ def mint(**kwargs): elif len(values) == 4: body, private_key, chain, roles = values - roles = create_authority_roles(roles, kwargs['owner'], kwargs['plugin']['plugin_object'].title, kwargs['creator']) + roles = create_authority_roles( + roles, + kwargs["owner"], + kwargs["plugin"]["plugin_object"].title, + kwargs["creator"], + ) return body, private_key, chain, roles @@ -66,16 +71,17 @@ def create_authority_roles(roles, owner, plugin_title, creator): """ role_objs = [] for r in roles: - role = role_service.get_by_name(r['name']) + role = role_service.get_by_name(r["name"]) if not role: role = role_service.create( - r['name'], - password=r['password'], + r["name"], + password=r["password"], description="Auto generated role for {0}".format(plugin_title), - username=r['username']) + username=r["username"], + ) # the user creating the authority should be able to administer it - if role.username == 'admin': + if role.username == "admin": creator.roles.append(role) role_objs.append(role) @@ -84,8 +90,7 @@ def create_authority_roles(roles, owner, plugin_title, creator): owner_role = role_service.get_by_name(owner) if not owner_role: owner_role = role_service.create( - owner, - description="Auto generated role based on owner: {0}".format(owner) + owner, description="Auto generated role based on owner: {0}".format(owner) ) role_objs.append(owner_role) @@ -98,27 +103,29 @@ def create(**kwargs): """ body, private_key, chain, roles = mint(**kwargs) - kwargs['creator'].roles = list(set(list(kwargs['creator'].roles) + roles)) + kwargs["creator"].roles = list(set(list(kwargs["creator"].roles) + roles)) - kwargs['body'] = body - kwargs['private_key'] = private_key - kwargs['chain'] = chain + kwargs["body"] = body + kwargs["private_key"] = private_key + kwargs["chain"] = chain - if kwargs.get('roles'): - kwargs['roles'] += roles + if kwargs.get("roles"): + kwargs["roles"] += roles else: - kwargs['roles'] = roles + kwargs["roles"] = roles cert = upload(**kwargs) - kwargs['authority_certificate'] = cert - if kwargs.get('plugin', {}).get('plugin_options', []): - kwargs['options'] = json.dumps(kwargs['plugin']['plugin_options']) + kwargs["authority_certificate"] = cert + if kwargs.get("plugin", {}).get("plugin_options", []): + kwargs["options"] = json.dumps(kwargs["plugin"]["plugin_options"]) authority = Authority(**kwargs) authority = database.create(authority) - kwargs['creator'].authorities.append(authority) + kwargs["creator"].authorities.append(authority) - metrics.send('authority_created', 'counter', 1, metric_tags=dict(owner=authority.owner)) + metrics.send( + "authority_created", "counter", 1, metric_tags=dict(owner=authority.owner) + ) return authority @@ -150,7 +157,7 @@ def get_by_name(authority_name): :param authority_name: :return: """ - return database.get(Authority, authority_name, field='name') + return database.get(Authority, authority_name, field="name") def get_authority_role(ca_name, creator=None): @@ -173,29 +180,31 @@ def render(args): :return: """ query = database.session_query(Authority) - filt = args.pop('filter') + filt = args.pop("filter") if filt: - terms = filt.split(';') - if 'active' in filt: + terms = filt.split(";") + if "active" in filt: query = query.filter(Authority.active == truthiness(terms[1])) - elif 'cn' in filt: - term = '%{0}%'.format(terms[1]) - sub_query = database.session_query(Certificate.root_authority_id) \ - .filter(Certificate.cn.ilike(term)) \ + elif "cn" in filt: + term = "%{0}%".format(terms[1]) + sub_query = ( + database.session_query(Certificate.root_authority_id) + .filter(Certificate.cn.ilike(term)) .subquery() + ) query = query.filter(Authority.id.in_(sub_query)) else: query = database.filter(query, Authority, terms) # we make sure that a user can only use an authority they either own are a member of - admins can see all - if not args['user'].is_admin: + if not args["user"].is_admin: authority_ids = [] - for authority in args['user'].authorities: + for authority in args["user"].authorities: authority_ids.append(authority.id) - for role in args['user'].roles: + for role in args["user"].roles: for authority in role.authorities: authority_ids.append(authority.id) query = query.filter(Authority.id.in_(authority_ids)) diff --git a/lemur/authorities/views.py b/lemur/authorities/views.py index b85c9b70..49bce63e 100644 --- a/lemur/authorities/views.py +++ b/lemur/authorities/views.py @@ -16,15 +16,21 @@ from lemur.auth.permissions import AuthorityPermission from lemur.certificates import service as certificate_service from lemur.authorities import service -from lemur.authorities.schemas import authority_input_schema, authority_output_schema, authorities_output_schema, authority_update_schema +from lemur.authorities.schemas import ( + authority_input_schema, + authority_output_schema, + authorities_output_schema, + authority_update_schema, +) -mod = Blueprint('authorities', __name__) +mod = Blueprint("authorities", __name__) api = Api(mod) class AuthoritiesList(AuthenticatedResource): """ Defines the 'authorities' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(AuthoritiesList, self).__init__() @@ -107,7 +113,7 @@ class AuthoritiesList(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['user'] = g.current_user + args["user"] = g.current_user return service.render(args) @validate_schema(authority_input_schema, authority_output_schema) @@ -220,7 +226,7 @@ class AuthoritiesList(AuthenticatedResource): :statuscode 403: unauthenticated :statuscode 200: no error """ - data['creator'] = g.current_user + data["creator"] = g.current_user return service.create(**data) @@ -388,7 +394,7 @@ class Authorities(AuthenticatedResource): authority = service.get(authority_id) if not authority: - return dict(message='Not Found'), 404 + return dict(message="Not Found"), 404 # all the authority role members should be allowed roles = [x.name for x in authority.roles] @@ -397,10 +403,10 @@ class Authorities(AuthenticatedResource): if permission.can(): return service.update( authority_id, - owner=data['owner'], - description=data['description'], - active=data['active'], - roles=data['roles'] + owner=data["owner"], + description=data["description"], + active=data["active"], + roles=data["roles"], ) return dict(message="You are not authorized to update this authority."), 403 @@ -505,10 +511,21 @@ class AuthorityVisualizations(AuthenticatedResource): ]} """ authority = service.get(authority_id) - return dict(name=authority.name, children=[{"name": c.name} for c in authority.certificates]) + return dict( + name=authority.name, + children=[{"name": c.name} for c in authority.certificates], + ) -api.add_resource(AuthoritiesList, '/authorities', endpoint='authorities') -api.add_resource(Authorities, '/authorities/', endpoint='authority') -api.add_resource(AuthorityVisualizations, '/authorities//visualize', endpoint='authority_visualizations') -api.add_resource(CertificateAuthority, '/certificates//authority', endpoint='certificateAuthority') +api.add_resource(AuthoritiesList, "/authorities", endpoint="authorities") +api.add_resource(Authorities, "/authorities/", endpoint="authority") +api.add_resource( + AuthorityVisualizations, + "/authorities//visualize", + endpoint="authority_visualizations", +) +api.add_resource( + CertificateAuthority, + "/certificates//authority", + endpoint="certificateAuthority", +) diff --git a/lemur/authorizations/models.py b/lemur/authorizations/models.py index d30de7ed..04ac0508 100644 --- a/lemur/authorizations/models.py +++ b/lemur/authorizations/models.py @@ -13,7 +13,7 @@ from lemur.plugins.base import plugins class Authorization(db.Model): - __tablename__ = 'pending_dns_authorizations' + __tablename__ = "pending_dns_authorizations" id = Column(Integer, primary_key=True, autoincrement=True) account_number = Column(String(128)) domains = Column(JSONType) diff --git a/lemur/certificates/cli.py b/lemur/certificates/cli.py index 04b8ec9a..b57ff175 100644 --- a/lemur/certificates/cli.py +++ b/lemur/certificates/cli.py @@ -34,7 +34,7 @@ from lemur.certificates.service import ( get_all_pending_reissue, get_by_name, get_all_certs, - get + get, ) from lemur.certificates.verify import verify_string @@ -56,11 +56,14 @@ def print_certificate_details(details): "\t[+] Authority: {authority_name}\n" "\t[+] Validity Start: {validity_start}\n" "\t[+] Validity End: {validity_end}\n".format( - common_name=details['commonName'], - sans=",".join(x['value'] for x in details['extensions']['subAltNames']['names']) or None, - authority_name=details['authority']['name'], - validity_start=details['validityStart'], - validity_end=details['validityEnd'] + common_name=details["commonName"], + sans=",".join( + x["value"] for x in details["extensions"]["subAltNames"]["names"] + ) + or None, + authority_name=details["authority"]["name"], + validity_start=details["validityStart"], + validity_end=details["validityEnd"], ) ) @@ -120,13 +123,11 @@ def request_rotation(endpoint, certificate, message, commit): except Exception as e: print( "[!] Failed to rotate endpoint {0} to certificate {1} reason: {2}".format( - endpoint.name, - certificate.name, - e + endpoint.name, certificate.name, e ) ) - metrics.send('endpoint_rotation', 'counter', 1, metric_tags={'status': status}) + metrics.send("endpoint_rotation", "counter", 1, metric_tags={"status": status}) def request_reissue(certificate, commit): @@ -154,17 +155,52 @@ def request_reissue(certificate, commit): except Exception as e: sentry.captureException(extra={"certificate_name": str(certificate.name)}) - current_app.logger.exception(f"Error reissuing certificate: {certificate.name}", exc_info=True) + current_app.logger.exception( + f"Error reissuing certificate: {certificate.name}", exc_info=True + ) print(f"[!] Failed to reissue certificate: {certificate.name}. Reason: {e}") - metrics.send('certificate_reissue', 'counter', 1, metric_tags={'status': status, 'certificate': certificate.name}) + metrics.send( + "certificate_reissue", + "counter", + 1, + metric_tags={"status": status, "certificate": certificate.name}, + ) -@manager.option('-e', '--endpoint', dest='endpoint_name', help='Name of the endpoint you wish to rotate.') -@manager.option('-n', '--new-certificate', dest='new_certificate_name', help='Name of the certificate you wish to rotate to.') -@manager.option('-o', '--old-certificate', dest='old_certificate_name', help='Name of the certificate you wish to rotate.') -@manager.option('-a', '--notify', dest='message', action='store_true', help='Send a rotation notification to the certificates owner.') -@manager.option('-c', '--commit', dest='commit', action='store_true', default=False, help='Persist changes.') +@manager.option( + "-e", + "--endpoint", + dest="endpoint_name", + help="Name of the endpoint you wish to rotate.", +) +@manager.option( + "-n", + "--new-certificate", + dest="new_certificate_name", + help="Name of the certificate you wish to rotate to.", +) +@manager.option( + "-o", + "--old-certificate", + dest="old_certificate_name", + help="Name of the certificate you wish to rotate.", +) +@manager.option( + "-a", + "--notify", + dest="message", + action="store_true", + help="Send a rotation notification to the certificates owner.", +) +@manager.option( + "-c", + "--commit", + dest="commit", + action="store_true", + default=False, + help="Persist changes.", +) def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, commit): """ Rotates an endpoint and reissues it if it has not already been replaced. If it has @@ -183,7 +219,9 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c endpoint = validate_endpoint(endpoint_name) if endpoint and new_cert: - print(f"[+] Rotating endpoint: {endpoint.name} to certificate {new_cert.name}") + print( + f"[+] Rotating endpoint: {endpoint.name} to certificate {new_cert.name}" + ) request_rotation(endpoint, new_cert, message, commit) elif old_cert and new_cert: @@ -197,16 +235,27 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c print("[+] Rotating all endpoints that have new certificates available") for endpoint in endpoint_service.get_all_pending_rotation(): if len(endpoint.certificate.replaced) == 1: - print(f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}") - request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit) + print( + f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}" + ) + request_rotation( + endpoint, endpoint.certificate.replaced[0], message, commit + ) else: - metrics.send('endpoint_rotation', 'counter', 1, metric_tags={ - 'status': FAILURE_METRIC_STATUS, - "old_certificate_name": str(old_cert), - "new_certificate_name": str(endpoint.certificate.replaced[0].name), - "endpoint_name": str(endpoint.name), - "message": str(message), - }) + metrics.send( + "endpoint_rotation", + "counter", + 1, + metric_tags={ + "status": FAILURE_METRIC_STATUS, + "old_certificate_name": str(old_cert), + "new_certificate_name": str( + endpoint.certificate.replaced[0].name + ), + "endpoint_name": str(endpoint.name), + "message": str(message), + }, + ) print( f"[!] Failed to rotate endpoint {endpoint.name} reason: " "Multiple replacement certificates found." @@ -222,20 +271,38 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c "new_certificate_name": str(new_certificate_name), "endpoint_name": str(endpoint_name), "message": str(message), - }) + } + ) - metrics.send('endpoint_rotation_job', 'counter', 1, metric_tags={ - "status": status, - "old_certificate_name": str(old_certificate_name), - "new_certificate_name": str(new_certificate_name), - "endpoint_name": str(endpoint_name), - "message": str(message), - "endpoint": str(globals().get("endpoint")) - }) + metrics.send( + "endpoint_rotation_job", + "counter", + 1, + metric_tags={ + "status": status, + "old_certificate_name": str(old_certificate_name), + "new_certificate_name": str(new_certificate_name), + "endpoint_name": str(endpoint_name), + "message": str(message), + "endpoint": str(globals().get("endpoint")), + }, + ) -@manager.option('-o', '--old-certificate', dest='old_certificate_name', help='Name of the certificate you wish to reissue.') -@manager.option('-c', '--commit', dest='commit', action='store_true', default=False, help='Persist changes.') +@manager.option( + "-o", + "--old-certificate", + dest="old_certificate_name", + help="Name of the certificate you wish to reissue.", +) +@manager.option( + "-c", + "--commit", + dest="commit", + action="store_true", + default=False, + help="Persist changes.", +) def reissue(old_certificate_name, commit): """ Reissues certificate with the same parameters as it was originally issued with. @@ -263,76 +330,94 @@ def reissue(old_certificate_name, commit): except Exception as e: sentry.captureException() current_app.logger.exception("Error reissuing certificate.", exc_info=True) - print( - "[!] Failed to reissue certificates. Reason: {}".format( - e - ) - ) + print("[!] Failed to reissue certificates. Reason: {}".format(e)) - metrics.send('certificate_reissue_job', 'counter', 1, metric_tags={'status': status}) + metrics.send( + "certificate_reissue_job", "counter", 1, metric_tags={"status": status} + ) -@manager.option('-f', '--fqdns', dest='fqdns', help='FQDNs to query. Multiple fqdns specified via comma.') -@manager.option('-i', '--issuer', dest='issuer', help='Issuer to query for.') -@manager.option('-o', '--owner', dest='owner', help='Owner to query for.') -@manager.option('-e', '--expired', dest='expired', type=bool, default=False, help='Include expired certificates.') +@manager.option( + "-f", + "--fqdns", + dest="fqdns", + help="FQDNs to query. Multiple fqdns specified via comma.", +) +@manager.option("-i", "--issuer", dest="issuer", help="Issuer to query for.") +@manager.option("-o", "--owner", dest="owner", help="Owner to query for.") +@manager.option( + "-e", + "--expired", + dest="expired", + type=bool, + default=False, + help="Include expired certificates.", +) def query(fqdns, issuer, owner, expired): """Prints certificates that match the query params.""" table = [] q = database.session_query(Certificate) if issuer: - sub_query = database.session_query(Authority.id) \ - .filter(Authority.name.ilike('%{0}%'.format(issuer))) \ + sub_query = ( + database.session_query(Authority.id) + .filter(Authority.name.ilike("%{0}%".format(issuer))) .subquery() + ) q = q.filter( or_( - Certificate.issuer.ilike('%{0}%'.format(issuer)), - Certificate.authority_id.in_(sub_query) + Certificate.issuer.ilike("%{0}%".format(issuer)), + Certificate.authority_id.in_(sub_query), ) ) if owner: - q = q.filter(Certificate.owner.ilike('%{0}%'.format(owner))) + q = q.filter(Certificate.owner.ilike("%{0}%".format(owner))) if not expired: q = q.filter(Certificate.expired == False) # noqa if fqdns: - for f in fqdns.split(','): + for f in fqdns.split(","): q = q.filter( or_( - Certificate.cn.ilike('%{0}%'.format(f)), - Certificate.domains.any(Domain.name.ilike('%{0}%'.format(f))) + Certificate.cn.ilike("%{0}%".format(f)), + Certificate.domains.any(Domain.name.ilike("%{0}%".format(f))), ) ) for c in q.all(): table.append([c.id, c.name, c.owner, c.issuer]) - print(tabulate(table, headers=['Id', 'Name', 'Owner', 'Issuer'], tablefmt='csv')) + print(tabulate(table, headers=["Id", "Name", "Owner", "Issuer"], tablefmt="csv")) def worker(data, commit, reason): - parts = [x for x in data.split(' ') if x] + parts = [x for x in data.split(" ") if x] try: cert = get(int(parts[0].strip())) plugin = plugins.get(cert.authority.plugin_name) - print('[+] Revoking certificate. Id: {0} Name: {1}'.format(cert.id, cert.name)) + print("[+] Revoking certificate. Id: {0} Name: {1}".format(cert.id, cert.name)) if commit: plugin.revoke_certificate(cert, reason) - metrics.send('certificate_revoke', 'counter', 1, metric_tags={'status': SUCCESS_METRIC_STATUS}) + metrics.send( + "certificate_revoke", + "counter", + 1, + metric_tags={"status": SUCCESS_METRIC_STATUS}, + ) except Exception as e: sentry.captureException() - metrics.send('certificate_revoke', 'counter', 1, metric_tags={'status': FAILURE_METRIC_STATUS}) - print( - "[!] Failed to revoke certificates. Reason: {}".format( - e - ) + metrics.send( + "certificate_revoke", + "counter", + 1, + metric_tags={"status": FAILURE_METRIC_STATUS}, ) + print("[!] Failed to revoke certificates. Reason: {}".format(e)) @manager.command @@ -341,13 +426,22 @@ def clear_pending(): Function clears all pending certificates. :return: """ - v = plugins.get('verisign-issuer') + v = plugins.get("verisign-issuer") v.clear_pending_certificates() -@manager.option('-p', '--path', dest='path', help='Absolute file path to a Lemur query csv.') -@manager.option('-r', '--reason', dest='reason', help='Reason to revoke certificate.') -@manager.option('-c', '--commit', dest='commit', action='store_true', default=False, help='Persist changes.') +@manager.option( + "-p", "--path", dest="path", help="Absolute file path to a Lemur query csv." +) +@manager.option("-r", "--reason", dest="reason", help="Reason to revoke certificate.") +@manager.option( + "-c", + "--commit", + dest="commit", + action="store_true", + default=False, + help="Persist changes.", +) def revoke(path, reason, commit): """ Revokes given certificate. @@ -357,7 +451,7 @@ def revoke(path, reason, commit): print("[+] Starting certificate revocation.") - with open(path, 'r') as f: + with open(path, "r") as f: args = [[x, commit, reason] for x in f.readlines()[2:]] with multiprocessing.Pool(processes=3) as pool: @@ -380,11 +474,11 @@ def check_revoked(): else: status = verify_string(cert.body, "") - cert.status = 'valid' if status else 'revoked' + cert.status = "valid" if status else "revoked" except Exception as e: sentry.captureException() current_app.logger.exception(e) - cert.status = 'unknown' + cert.status = "unknown" database.update(cert) diff --git a/lemur/certificates/hooks.py b/lemur/certificates/hooks.py index 16f6c3b0..93409bb4 100644 --- a/lemur/certificates/hooks.py +++ b/lemur/certificates/hooks.py @@ -12,21 +12,30 @@ import subprocess from flask import current_app -from lemur.certificates.service import csr_created, csr_imported, certificate_issued, certificate_imported +from lemur.certificates.service import ( + csr_created, + csr_imported, + certificate_issued, + certificate_imported, +) def csr_dump_handler(sender, csr, **kwargs): try: - subprocess.run(['openssl', 'req', '-text', '-noout', '-reqopt', 'no_sigdump,no_pubkey'], - input=csr.encode('utf8')) + subprocess.run( + ["openssl", "req", "-text", "-noout", "-reqopt", "no_sigdump,no_pubkey"], + input=csr.encode("utf8"), + ) except Exception as err: current_app.logger.warning("Error inspecting CSR: %s", err) def cert_dump_handler(sender, certificate, **kwargs): try: - subprocess.run(['openssl', 'x509', '-text', '-noout', '-certopt', 'no_sigdump,no_pubkey'], - input=certificate.body.encode('utf8')) + subprocess.run( + ["openssl", "x509", "-text", "-noout", "-certopt", "no_sigdump,no_pubkey"], + input=certificate.body.encode("utf8"), + ) except Exception as err: current_app.logger.warning("Error inspecting certificate: %s", err) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index bd6e8b5e..965f79d1 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -12,7 +12,18 @@ from cryptography import x509 from cryptography.hazmat.primitives.asymmetric import rsa from flask import current_app from idna.core import InvalidCodepoint -from sqlalchemy import event, Integer, ForeignKey, String, PassiveDefault, func, Column, Text, Boolean, Index +from sqlalchemy import ( + event, + Integer, + ForeignKey, + String, + PassiveDefault, + func, + Column, + Text, + Boolean, + Index, +) from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import relationship from sqlalchemy.sql.expression import case, extract @@ -25,19 +36,25 @@ from lemur.database import db from lemur.domains.models import Domain from lemur.extensions import metrics from lemur.extensions import sentry -from lemur.models import certificate_associations, certificate_source_associations, \ - certificate_destination_associations, certificate_notification_associations, \ - certificate_replacement_associations, roles_certificates, pending_cert_replacement_associations +from lemur.models import ( + certificate_associations, + certificate_source_associations, + certificate_destination_associations, + certificate_notification_associations, + certificate_replacement_associations, + roles_certificates, + pending_cert_replacement_associations, +) from lemur.plugins.base import plugins from lemur.policies.models import RotationPolicy from lemur.utils import Vault def get_sequence(name): - if '-' not in name: + if "-" not in name: return name, None - parts = name.split('-') + parts = name.split("-") # see if we have an int at the end of our name try: @@ -49,18 +66,22 @@ def get_sequence(name): if len(parts[-1]) == 8: return name, None - root = '-'.join(parts[:-1]) + root = "-".join(parts[:-1]) return root, seq def get_or_increase_name(name, serial): - certificates = Certificate.query.filter(Certificate.name.ilike('{0}%'.format(name))).all() + certificates = Certificate.query.filter( + Certificate.name.ilike("{0}%".format(name)) + ).all() if not certificates: return name - serial_name = '{0}-{1}'.format(name, hex(int(serial))[2:].upper()) - certificates = Certificate.query.filter(Certificate.name.ilike('{0}%'.format(serial_name))).all() + serial_name = "{0}-{1}".format(name, hex(int(serial))[2:].upper()) + certificates = Certificate.query.filter( + Certificate.name.ilike("{0}%".format(serial_name)) + ).all() if not certificates: return serial_name @@ -72,21 +93,29 @@ def get_or_increase_name(name, serial): if end: ends.append(end) - return '{0}-{1}'.format(root, max(ends) + 1) + return "{0}-{1}".format(root, max(ends) + 1) class Certificate(db.Model): - __tablename__ = 'certificates' + __tablename__ = "certificates" __table_args__ = ( - Index('ix_certificates_cn', "cn", - postgresql_ops={"cn": "gin_trgm_ops"}, - postgresql_using='gin'), - Index('ix_certificates_name', "name", - postgresql_ops={"name": "gin_trgm_ops"}, - postgresql_using='gin'), + Index( + "ix_certificates_cn", + "cn", + postgresql_ops={"cn": "gin_trgm_ops"}, + postgresql_using="gin", + ), + Index( + "ix_certificates_name", + "name", + postgresql_ops={"name": "gin_trgm_ops"}, + postgresql_using="gin", + ), ) id = Column(Integer, primary_key=True) - ix = Index('ix_certificates_id_desc', id.desc(), postgresql_using='btree', unique=True) + ix = Index( + "ix_certificates_id_desc", id.desc(), postgresql_using="btree", unique=True + ) external_id = Column(String(128)) owner = Column(String(128), nullable=False) name = Column(String(256), unique=True) @@ -102,7 +131,9 @@ class Certificate(db.Model): serial = Column(String(128)) cn = Column(String(128)) deleted = Column(Boolean, index=True, default=False) - dns_provider_id = Column(Integer(), ForeignKey('dns_providers.id', ondelete='CASCADE'), nullable=True) + dns_provider_id = Column( + Integer(), ForeignKey("dns_providers.id", ondelete="CASCADE"), nullable=True + ) not_before = Column(ArrowType) not_after = Column(ArrowType) @@ -114,34 +145,53 @@ class Certificate(db.Model): san = Column(String(1024)) # TODO this should be migrated to boolean rotation = Column(Boolean, default=False) - user_id = Column(Integer, ForeignKey('users.id')) - authority_id = Column(Integer, ForeignKey('authorities.id', ondelete="CASCADE")) - root_authority_id = Column(Integer, ForeignKey('authorities.id', ondelete="CASCADE")) - rotation_policy_id = Column(Integer, ForeignKey('rotation_policies.id')) + user_id = Column(Integer, ForeignKey("users.id")) + authority_id = Column(Integer, ForeignKey("authorities.id", ondelete="CASCADE")) + root_authority_id = Column( + Integer, ForeignKey("authorities.id", ondelete="CASCADE") + ) + rotation_policy_id = Column(Integer, ForeignKey("rotation_policies.id")) - notifications = relationship('Notification', secondary=certificate_notification_associations, backref='certificate') - destinations = relationship('Destination', secondary=certificate_destination_associations, backref='certificate') - sources = relationship('Source', secondary=certificate_source_associations, backref='certificate') - domains = relationship('Domain', secondary=certificate_associations, backref='certificate') - roles = relationship('Role', secondary=roles_certificates, backref='certificate') - replaces = relationship('Certificate', - secondary=certificate_replacement_associations, - primaryjoin=id == certificate_replacement_associations.c.certificate_id, # noqa - secondaryjoin=id == certificate_replacement_associations.c.replaced_certificate_id, # noqa - backref='replaced') + notifications = relationship( + "Notification", + secondary=certificate_notification_associations, + backref="certificate", + ) + destinations = relationship( + "Destination", + secondary=certificate_destination_associations, + backref="certificate", + ) + sources = relationship( + "Source", secondary=certificate_source_associations, backref="certificate" + ) + domains = relationship( + "Domain", secondary=certificate_associations, backref="certificate" + ) + roles = relationship("Role", secondary=roles_certificates, backref="certificate") + replaces = relationship( + "Certificate", + secondary=certificate_replacement_associations, + primaryjoin=id == certificate_replacement_associations.c.certificate_id, # noqa + secondaryjoin=id + == certificate_replacement_associations.c.replaced_certificate_id, # noqa + backref="replaced", + ) - replaced_by_pending = relationship('PendingCertificate', - secondary=pending_cert_replacement_associations, - backref='pending_replace', - viewonly=True) + replaced_by_pending = relationship( + "PendingCertificate", + secondary=pending_cert_replacement_associations, + backref="pending_replace", + viewonly=True, + ) - logs = relationship('Log', backref='certificate') - endpoints = relationship('Endpoint', backref='certificate') + logs = relationship("Log", backref="certificate") + endpoints = relationship("Endpoint", backref="certificate") rotation_policy = relationship("RotationPolicy") - sensitive_fields = ('private_key',) + sensitive_fields = ("private_key",) def __init__(self, **kwargs): - self.body = kwargs['body'].strip() + self.body = kwargs["body"].strip() cert = self.parsed_cert self.issuer = defaults.issuer(cert) @@ -152,36 +202,42 @@ class Certificate(db.Model): self.serial = defaults.serial(cert) # when destinations are appended they require a valid name. - if kwargs.get('name'): - self.name = get_or_increase_name(defaults.text_to_slug(kwargs['name']), self.serial) + if kwargs.get("name"): + self.name = get_or_increase_name( + defaults.text_to_slug(kwargs["name"]), self.serial + ) else: self.name = get_or_increase_name( - defaults.certificate_name(self.cn, self.issuer, self.not_before, self.not_after, self.san), self.serial) + defaults.certificate_name( + self.cn, self.issuer, self.not_before, self.not_after, self.san + ), + self.serial, + ) - self.owner = kwargs['owner'] + self.owner = kwargs["owner"] - if kwargs.get('private_key'): - self.private_key = kwargs['private_key'].strip() + if kwargs.get("private_key"): + self.private_key = kwargs["private_key"].strip() - if kwargs.get('chain'): - self.chain = kwargs['chain'].strip() + if kwargs.get("chain"): + self.chain = kwargs["chain"].strip() - if kwargs.get('csr'): - self.csr = kwargs['csr'].strip() + if kwargs.get("csr"): + self.csr = kwargs["csr"].strip() - self.notify = kwargs.get('notify', True) - self.destinations = kwargs.get('destinations', []) - self.notifications = kwargs.get('notifications', []) - self.description = kwargs.get('description') - self.roles = list(set(kwargs.get('roles', []))) - self.replaces = kwargs.get('replaces', []) - self.rotation = kwargs.get('rotation') - self.rotation_policy = kwargs.get('rotation_policy') + self.notify = kwargs.get("notify", True) + self.destinations = kwargs.get("destinations", []) + self.notifications = kwargs.get("notifications", []) + self.description = kwargs.get("description") + self.roles = list(set(kwargs.get("roles", []))) + self.replaces = kwargs.get("replaces", []) + self.rotation = kwargs.get("rotation") + self.rotation_policy = kwargs.get("rotation_policy") self.signing_algorithm = defaults.signing_algorithm(cert) self.bits = defaults.bitstrength(cert) - self.external_id = kwargs.get('external_id') - self.authority_id = kwargs.get('authority_id') - self.dns_provider_id = kwargs.get('dns_provider_id') + self.external_id = kwargs.get("external_id") + self.authority_id = kwargs.get("authority_id") + self.dns_provider_id = kwargs.get("dns_provider_id") for domain in defaults.domains(cert): self.domains.append(Domain(name=domain)) @@ -195,8 +251,11 @@ class Certificate(db.Model): Integrity checks: Does the cert have a valid chain and matching private key? """ if self.private_key: - validators.verify_private_key_match(utils.parse_private_key(self.private_key), self.parsed_cert, - error_class=AssertionError) + validators.verify_private_key_match( + utils.parse_private_key(self.private_key), + self.parsed_cert, + error_class=AssertionError, + ) if self.chain: chain = [self.parsed_cert] + utils.parse_cert_chain(self.chain) @@ -238,7 +297,9 @@ class Certificate(db.Model): @property def key_type(self): if isinstance(self.parsed_cert.public_key(), rsa.RSAPublicKey): - return 'RSA{key_size}'.format(key_size=self.parsed_cert.public_key().key_size) + return "RSA{key_size}".format( + key_size=self.parsed_cert.public_key().key_size + ) @property def validity_remaining(self): @@ -263,26 +324,16 @@ class Certificate(db.Model): @expired.expression def expired(cls): - return case( - [ - (cls.not_after <= arrow.utcnow(), True) - ], - else_=False - ) + return case([(cls.not_after <= arrow.utcnow(), True)], else_=False) @hybrid_property def revoked(self): - if 'revoked' == self.status: + if "revoked" == self.status: return True @revoked.expression def revoked(cls): - return case( - [ - (cls.status == 'revoked', True) - ], - else_=False - ) + return case([(cls.status == "revoked", True)], else_=False) @hybrid_property def in_rotation_window(self): @@ -305,66 +356,65 @@ class Certificate(db.Model): :return: """ return case( - [ - (extract('day', cls.not_after - func.now()) <= RotationPolicy.days, True) - ], - else_=False + [(extract("day", cls.not_after - func.now()) <= RotationPolicy.days, True)], + else_=False, ) @property def extensions(self): # setup default values - return_extensions = { - 'sub_alt_names': {'names': []} - } + return_extensions = {"sub_alt_names": {"names": []}} try: for extension in self.parsed_cert.extensions: value = extension.value if isinstance(value, x509.BasicConstraints): - return_extensions['basic_constraints'] = value + return_extensions["basic_constraints"] = value elif isinstance(value, x509.SubjectAlternativeName): - return_extensions['sub_alt_names']['names'] = value + return_extensions["sub_alt_names"]["names"] = value elif isinstance(value, x509.ExtendedKeyUsage): - return_extensions['extended_key_usage'] = value + return_extensions["extended_key_usage"] = value elif isinstance(value, x509.KeyUsage): - return_extensions['key_usage'] = value + return_extensions["key_usage"] = value elif isinstance(value, x509.SubjectKeyIdentifier): - return_extensions['subject_key_identifier'] = {'include_ski': True} + return_extensions["subject_key_identifier"] = {"include_ski": True} elif isinstance(value, x509.AuthorityInformationAccess): - return_extensions['certificate_info_access'] = {'include_aia': True} + return_extensions["certificate_info_access"] = {"include_aia": True} elif isinstance(value, x509.AuthorityKeyIdentifier): - aki = { - 'use_key_identifier': False, - 'use_authority_cert': False - } + aki = {"use_key_identifier": False, "use_authority_cert": False} if value.key_identifier: - aki['use_key_identifier'] = True + aki["use_key_identifier"] = True if value.authority_cert_issuer: - aki['use_authority_cert'] = True + aki["use_authority_cert"] = True - return_extensions['authority_key_identifier'] = aki + return_extensions["authority_key_identifier"] = aki elif isinstance(value, x509.CRLDistributionPoints): - return_extensions['crl_distribution_points'] = {'include_crl_dp': value} + return_extensions["crl_distribution_points"] = { + "include_crl_dp": value + } # TODO: Not supporting custom OIDs yet. https://github.com/Netflix/lemur/issues/665 else: - current_app.logger.warning('Custom OIDs not yet supported for clone operation.') + current_app.logger.warning( + "Custom OIDs not yet supported for clone operation." + ) except InvalidCodepoint as e: sentry.captureException() - current_app.logger.warning('Unable to parse extensions due to underscore in dns name') + current_app.logger.warning( + "Unable to parse extensions due to underscore in dns name" + ) except ValueError as e: sentry.captureException() - current_app.logger.warning('Unable to parse') + current_app.logger.warning("Unable to parse") current_app.logger.exception(e) return return_extensions @@ -373,7 +423,7 @@ class Certificate(db.Model): return "Certificate(name={name})".format(name=self.name) -@event.listens_for(Certificate.destinations, 'append') +@event.listens_for(Certificate.destinations, "append") def update_destinations(target, value, initiator): """ Attempt to upload certificate to the new destination @@ -387,17 +437,31 @@ def update_destinations(target, value, initiator): status = FAILURE_METRIC_STATUS try: if target.private_key or not destination_plugin.requires_key: - destination_plugin.upload(target.name, target.body, target.private_key, target.chain, value.options) + destination_plugin.upload( + target.name, + target.body, + target.private_key, + target.chain, + value.options, + ) status = SUCCESS_METRIC_STATUS except Exception as e: sentry.captureException() raise - metrics.send('destination_upload', 'counter', 1, - metric_tags={'status': status, 'certificate': target.name, 'destination': value.label}) + metrics.send( + "destination_upload", + "counter", + 1, + metric_tags={ + "status": status, + "certificate": target.name, + "destination": value.label, + }, + ) -@event.listens_for(Certificate.replaces, 'append') +@event.listens_for(Certificate.replaces, "append") def update_replacement(target, value, initiator): """ When a certificate is marked as 'replaced' we should not notify. diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index f4a6fa9a..bf950e70 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -39,22 +39,26 @@ from lemur.users.schemas import UserNestedOutputSchema class CertificateSchema(LemurInputSchema): owner = fields.Email(required=True) - description = fields.String(missing='', allow_none=True) + description = fields.String(missing="", allow_none=True) class CertificateCreationSchema(CertificateSchema): @post_load def default_notification(self, data): - if not data['notifications']: - data['notifications'] += notification_service.create_default_expiration_notifications( - "DEFAULT_{0}".format(data['owner'].split('@')[0].upper()), - [data['owner']], + if not data["notifications"]: + data[ + "notifications" + ] += notification_service.create_default_expiration_notifications( + "DEFAULT_{0}".format(data["owner"].split("@")[0].upper()), + [data["owner"]], ) - data['notifications'] += notification_service.create_default_expiration_notifications( - 'DEFAULT_SECURITY', - current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL'), - current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL_INTERVALS', None) + data[ + "notifications" + ] += notification_service.create_default_expiration_notifications( + "DEFAULT_SECURITY", + current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL"), + current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL_INTERVALS", None), ) return data @@ -71,37 +75,53 @@ class CertificateInputSchema(CertificateCreationSchema): destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True) notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True) replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True) - replacements = fields.Nested(AssociatedCertificateSchema, missing=[], many=True) # deprecated + replacements = fields.Nested( + AssociatedCertificateSchema, missing=[], many=True + ) # deprecated roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True) - dns_provider = fields.Nested(AssociatedDnsProviderSchema, missing=None, allow_none=True, required=False) + dns_provider = fields.Nested( + AssociatedDnsProviderSchema, missing=None, allow_none=True, required=False + ) csr = fields.String(allow_none=True, validate=validators.csr) key_type = fields.String( - validate=validate.OneOf(CERTIFICATE_KEY_TYPES), - missing='RSA2048') + validate=validate.OneOf(CERTIFICATE_KEY_TYPES), missing="RSA2048" + ) notify = fields.Boolean(default=True) rotation = fields.Boolean() - rotation_policy = fields.Nested(AssociatedRotationPolicySchema, missing={'name': 'default'}, allow_none=True, - default={'name': 'default'}) + rotation_policy = fields.Nested( + AssociatedRotationPolicySchema, + missing={"name": "default"}, + allow_none=True, + default={"name": "default"}, + ) # certificate body fields - organizational_unit = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT')) - organization = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_ORGANIZATION')) - location = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_LOCATION')) - country = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_COUNTRY')) - state = fields.String(missing=lambda: current_app.config.get('LEMUR_DEFAULT_STATE')) + organizational_unit = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATIONAL_UNIT") + ) + organization = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_ORGANIZATION") + ) + location = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_LOCATION") + ) + country = fields.String( + missing=lambda: current_app.config.get("LEMUR_DEFAULT_COUNTRY") + ) + state = fields.String(missing=lambda: current_app.config.get("LEMUR_DEFAULT_STATE")) extensions = fields.Nested(ExtensionSchema) @validates_schema def validate_authority(self, data): - if isinstance(data['authority'], str): + if isinstance(data["authority"], str): raise ValidationError("Authority not found.") - if not data['authority'].active: - raise ValidationError("The authority is inactive.", ['authority']) + if not data["authority"].active: + raise ValidationError("The authority is inactive.", ["authority"]) @validates_schema def validate_dates(self, data): @@ -109,23 +129,19 @@ class CertificateInputSchema(CertificateCreationSchema): @pre_load def load_data(self, data): - if data.get('replacements'): - data['replaces'] = data['replacements'] # TODO remove when field is deprecated - if data.get('csr'): - csr_sans = cert_utils.get_sans_from_csr(data['csr']) - if not data.get('extensions'): - data['extensions'] = { - 'subAltNames': { - 'names': [] - } - } - elif not data['extensions'].get('subAltNames'): - data['extensions']['subAltNames'] = { - 'names': [] - } - elif not data['extensions']['subAltNames'].get('names'): - data['extensions']['subAltNames']['names'] = [] - data['extensions']['subAltNames']['names'] += csr_sans + if data.get("replacements"): + data["replaces"] = data[ + "replacements" + ] # TODO remove when field is deprecated + if data.get("csr"): + csr_sans = cert_utils.get_sans_from_csr(data["csr"]) + if not data.get("extensions"): + data["extensions"] = {"subAltNames": {"names": []}} + elif not data["extensions"].get("subAltNames"): + data["extensions"]["subAltNames"] = {"names": []} + elif not data["extensions"]["subAltNames"].get("names"): + data["extensions"]["subAltNames"]["names"] = [] + data["extensions"]["subAltNames"]["names"] += csr_sans return missing.convert_validity_years(data) @@ -138,13 +154,17 @@ class CertificateEditInputSchema(CertificateSchema): destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True) notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True) replaces = fields.Nested(AssociatedCertificateSchema, missing=[], many=True) - replacements = fields.Nested(AssociatedCertificateSchema, missing=[], many=True) # deprecated + replacements = fields.Nested( + AssociatedCertificateSchema, missing=[], many=True + ) # deprecated roles = fields.Nested(AssociatedRoleSchema, missing=[], many=True) @pre_load def load_data(self, data): - if data.get('replacements'): - data['replaces'] = data['replacements'] # TODO remove when field is deprecated + if data.get("replacements"): + data["replaces"] = data[ + "replacements" + ] # TODO remove when field is deprecated return data @post_load @@ -155,10 +175,15 @@ class CertificateEditInputSchema(CertificateSchema): :param data: :return: """ - if data['owner']: - notification_name = "DEFAULT_{0}".format(data['owner'].split('@')[0].upper()) - data['notifications'] += notification_service.create_default_expiration_notifications(notification_name, - [data['owner']]) + if data["owner"]: + notification_name = "DEFAULT_{0}".format( + data["owner"].split("@")[0].upper() + ) + data[ + "notifications" + ] += notification_service.create_default_expiration_notifications( + notification_name, [data["owner"]] + ) return data @@ -184,13 +209,13 @@ class CertificateNestedOutputSchema(LemurOutputSchema): # Note aliasing is the first step in deprecating these fields. cn = fields.String() # deprecated - common_name = fields.String(attribute='cn') + common_name = fields.String(attribute="cn") not_after = fields.DateTime() # deprecated - validity_end = ArrowDateTime(attribute='not_after') + validity_end = ArrowDateTime(attribute="not_after") not_before = fields.DateTime() # deprecated - validity_start = ArrowDateTime(attribute='not_before') + validity_start = ArrowDateTime(attribute="not_before") issuer = fields.Nested(AuthorityNestedOutputSchema) @@ -221,22 +246,22 @@ class CertificateOutputSchema(LemurOutputSchema): # Note aliasing is the first step in deprecating these fields. notify = fields.Boolean() - active = fields.Boolean(attribute='notify') + active = fields.Boolean(attribute="notify") cn = fields.String() - common_name = fields.String(attribute='cn') + common_name = fields.String(attribute="cn") distinguished_name = fields.String() not_after = fields.DateTime() - validity_end = ArrowDateTime(attribute='not_after') + validity_end = ArrowDateTime(attribute="not_after") not_before = fields.DateTime() - validity_start = ArrowDateTime(attribute='not_before') + validity_start = ArrowDateTime(attribute="not_before") owner = fields.Email() san = fields.Boolean() serial = fields.String() - serial_hex = Hex(attribute='serial') + serial_hex = Hex(attribute="serial") signing_algorithm = fields.String() status = fields.String() @@ -253,7 +278,9 @@ class CertificateOutputSchema(LemurOutputSchema): dns_provider = fields.Nested(DnsProvidersNestedOutputSchema) roles = fields.Nested(RoleNestedOutputSchema, many=True) endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[]) - replaced_by = fields.Nested(CertificateNestedOutputSchema, many=True, attribute='replaced') + replaced_by = fields.Nested( + CertificateNestedOutputSchema, many=True, attribute="replaced" + ) rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema) @@ -274,35 +301,41 @@ class CertificateUploadInputSchema(CertificateCreationSchema): @validates_schema def keys(self, data): - if data.get('destinations'): - if not data.get('private_key'): - raise ValidationError('Destinations require private key.') + if data.get("destinations"): + if not data.get("private_key"): + raise ValidationError("Destinations require private key.") @validates_schema def validate_cert_private_key_chain(self, data): cert = None key = None - if data.get('body'): + if data.get("body"): try: - cert = utils.parse_certificate(data['body']) + cert = utils.parse_certificate(data["body"]) except ValueError: - raise ValidationError("Public certificate presented is not valid.", field_names=['body']) + raise ValidationError( + "Public certificate presented is not valid.", field_names=["body"] + ) - if data.get('private_key'): + if data.get("private_key"): try: - key = utils.parse_private_key(data['private_key']) + key = utils.parse_private_key(data["private_key"]) except ValueError: - raise ValidationError("Private key presented is not valid.", field_names=['private_key']) + raise ValidationError( + "Private key presented is not valid.", field_names=["private_key"] + ) if cert and key: # Throws ValidationError validators.verify_private_key_match(key, cert) - if data.get('chain'): + if data.get("chain"): try: - chain = utils.parse_cert_chain(data['chain']) + chain = utils.parse_cert_chain(data["chain"]) except ValueError: - raise ValidationError("Invalid certificate in certificate chain.", field_names=['chain']) + raise ValidationError( + "Invalid certificate in certificate chain.", field_names=["chain"] + ) # Throws ValidationError validators.verify_cert_chain([cert] + chain) @@ -318,8 +351,10 @@ class CertificateNotificationOutputSchema(LemurOutputSchema): name = fields.String() owner = fields.Email() user = fields.Nested(UserNestedOutputSchema) - validity_end = ArrowDateTime(attribute='not_after') - replaced_by = fields.Nested(CertificateNestedOutputSchema, many=True, attribute='replaced') + validity_end = ArrowDateTime(attribute="not_after") + replaced_by = fields.Nested( + CertificateNestedOutputSchema, many=True, attribute="replaced" + ) endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[]) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 815349ff..51fede4f 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -26,10 +26,14 @@ from lemur.plugins.base import plugins from lemur.roles import service as role_service from lemur.roles.models import Role -csr_created = signals.signal('csr_created', "CSR generated") -csr_imported = signals.signal('csr_imported', "CSR imported from external source") -certificate_issued = signals.signal('certificate_issued', "Authority issued a certificate") -certificate_imported = signals.signal('certificate_imported', "Certificate imported from external source") +csr_created = signals.signal("csr_created", "CSR generated") +csr_imported = signals.signal("csr_imported", "CSR imported from external source") +certificate_issued = signals.signal( + "certificate_issued", "Authority issued a certificate" +) +certificate_imported = signals.signal( + "certificate_imported", "Certificate imported from external source" +) def get(cert_id): @@ -49,7 +53,7 @@ def get_by_name(name): :param name: :return: """ - return database.get(Certificate, name, field='name') + return database.get(Certificate, name, field="name") def get_by_serial(serial): @@ -105,8 +109,12 @@ def get_all_pending_cleaning(source): :param source: :return: """ - return Certificate.query.filter(Certificate.sources.any(id=source.id)) \ - .filter(not_(Certificate.endpoints.any())).filter(Certificate.expired).all() + return ( + Certificate.query.filter(Certificate.sources.any(id=source.id)) + .filter(not_(Certificate.endpoints.any())) + .filter(Certificate.expired) + .all() + ) def get_all_pending_reissue(): @@ -119,9 +127,12 @@ def get_all_pending_reissue(): :return: """ - return Certificate.query.filter(Certificate.rotation == True) \ - .filter(not_(Certificate.replaced.any())) \ - .filter(Certificate.in_rotation_window == True).all() # noqa + return ( + Certificate.query.filter(Certificate.rotation == True) + .filter(not_(Certificate.replaced.any())) + .filter(Certificate.in_rotation_window == True) + .all() + ) # noqa def find_duplicates(cert): @@ -133,10 +144,12 @@ def find_duplicates(cert): :param cert: :return: """ - if cert['chain']: - return Certificate.query.filter_by(body=cert['body'].strip(), chain=cert['chain'].strip()).all() + if cert["chain"]: + return Certificate.query.filter_by( + body=cert["body"].strip(), chain=cert["chain"].strip() + ).all() else: - return Certificate.query.filter_by(body=cert['body'].strip(), chain=None).all() + return Certificate.query.filter_by(body=cert["body"].strip(), chain=None).all() def export(cert, export_plugin): @@ -148,8 +161,10 @@ def export(cert, export_plugin): :param cert: :return: """ - plugin = plugins.get(export_plugin['slug']) - return plugin.export(cert.body, cert.chain, cert.private_key, export_plugin['pluginOptions']) + plugin = plugins.get(export_plugin["slug"]) + return plugin.export( + cert.body, cert.chain, cert.private_key, export_plugin["pluginOptions"] + ) def update(cert_id, **kwargs): @@ -168,17 +183,19 @@ def update(cert_id, **kwargs): def create_certificate_roles(**kwargs): # create an role for the owner and assign it - owner_role = role_service.get_by_name(kwargs['owner']) + owner_role = role_service.get_by_name(kwargs["owner"]) if not owner_role: owner_role = role_service.create( - kwargs['owner'], - description="Auto generated role based on owner: {0}".format(kwargs['owner']) + kwargs["owner"], + description="Auto generated role based on owner: {0}".format( + kwargs["owner"] + ), ) # ensure that the authority's owner is also associated with the certificate - if kwargs.get('authority'): - authority_owner_role = role_service.get_by_name(kwargs['authority'].owner) + if kwargs.get("authority"): + authority_owner_role = role_service.get_by_name(kwargs["authority"].owner) return [owner_role, authority_owner_role] return [owner_role] @@ -190,16 +207,16 @@ def mint(**kwargs): Support for multiple authorities is handled by individual plugins. """ - authority = kwargs['authority'] + authority = kwargs["authority"] issuer = plugins.get(authority.plugin_name) # allow the CSR to be specified by the user - if not kwargs.get('csr'): + if not kwargs.get("csr"): csr, private_key = create_csr(**kwargs) csr_created.send(authority=authority, csr=csr) else: - csr = str(kwargs.get('csr')) + csr = str(kwargs.get("csr")) private_key = None csr_imported.send(authority=authority, csr=csr) @@ -220,8 +237,8 @@ def import_certificate(**kwargs): :param kwargs: """ - if not kwargs.get('owner'): - kwargs['owner'] = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL')[0] + if not kwargs.get("owner"): + kwargs["owner"] = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")[0] return upload(**kwargs) @@ -232,16 +249,16 @@ def upload(**kwargs): """ roles = create_certificate_roles(**kwargs) - if kwargs.get('roles'): - kwargs['roles'] += roles + if kwargs.get("roles"): + kwargs["roles"] += roles else: - kwargs['roles'] = roles + kwargs["roles"] = roles cert = Certificate(**kwargs) - cert.authority = kwargs.get('authority') + cert.authority = kwargs.get("authority") cert = database.create(cert) - kwargs['creator'].certificates.append(cert) + kwargs["creator"].certificates.append(cert) cert = database.update(cert) certificate_imported.send(certificate=cert, authority=cert.authority) @@ -258,39 +275,45 @@ def create(**kwargs): current_app.logger.error("Exception minting certificate", exc_info=True) sentry.captureException() raise - kwargs['body'] = cert_body - kwargs['private_key'] = private_key - kwargs['chain'] = cert_chain - kwargs['external_id'] = external_id - kwargs['csr'] = csr + kwargs["body"] = cert_body + kwargs["private_key"] = private_key + kwargs["chain"] = cert_chain + kwargs["external_id"] = external_id + kwargs["csr"] = csr roles = create_certificate_roles(**kwargs) - if kwargs.get('roles'): - kwargs['roles'] += roles + if kwargs.get("roles"): + kwargs["roles"] += roles else: - kwargs['roles'] = roles + kwargs["roles"] = roles if cert_body: cert = Certificate(**kwargs) - kwargs['creator'].certificates.append(cert) + kwargs["creator"].certificates.append(cert) else: cert = PendingCertificate(**kwargs) - kwargs['creator'].pending_certificates.append(cert) + kwargs["creator"].pending_certificates.append(cert) - cert.authority = kwargs['authority'] + cert.authority = kwargs["authority"] database.commit() if isinstance(cert, Certificate): certificate_issued.send(certificate=cert, authority=cert.authority) - metrics.send('certificate_issued', 'counter', 1, metric_tags=dict(owner=cert.owner, issuer=cert.issuer)) + metrics.send( + "certificate_issued", + "counter", + 1, + metric_tags=dict(owner=cert.owner, issuer=cert.issuer), + ) if isinstance(cert, PendingCertificate): # We need to refresh the pending certificate to avoid "Instance is not bound to a Session; " # "attribute refresh operation cannot proceed" pending_cert = database.session_query(PendingCertificate).get(cert.id) from lemur.common.celery import fetch_acme_cert + if not current_app.config.get("ACME_DISABLE_AUTORESOLVE", False): fetch_acme_cert.apply_async((pending_cert.id,), countdown=5) @@ -306,51 +329,55 @@ def render(args): """ query = database.session_query(Certificate) - time_range = args.pop('time_range') - destination_id = args.pop('destination_id') - notification_id = args.pop('notification_id', None) - show = args.pop('show') + time_range = args.pop("time_range") + destination_id = args.pop("destination_id") + notification_id = args.pop("notification_id", None) + show = args.pop("show") # owner = args.pop('owner') # creator = args.pop('creator') # TODO we should enabling filtering by owner - filt = args.pop('filter') + filt = args.pop("filter") if filt: - terms = filt.split(';') - term = '%{0}%'.format(terms[1]) + terms = filt.split(";") + term = "%{0}%".format(terms[1]) # Exact matches for quotes. Only applies to name, issuer, and cn if terms[1].startswith('"') and terms[1].endswith('"'): term = terms[1][1:-1] - if 'issuer' in terms: + if "issuer" in terms: # we can't rely on issuer being correct in the cert directly so we combine queries - sub_query = database.session_query(Authority.id) \ - .filter(Authority.name.ilike(term)) \ + sub_query = ( + database.session_query(Authority.id) + .filter(Authority.name.ilike(term)) .subquery() + ) query = query.filter( or_( Certificate.issuer.ilike(term), - Certificate.authority_id.in_(sub_query) + Certificate.authority_id.in_(sub_query), ) ) - elif 'destination' in terms: - query = query.filter(Certificate.destinations.any(Destination.id == terms[1])) - elif 'notify' in filt: + elif "destination" in terms: + query = query.filter( + Certificate.destinations.any(Destination.id == terms[1]) + ) + elif "notify" in filt: query = query.filter(Certificate.notify == truthiness(terms[1])) - elif 'active' in filt: + elif "active" in filt: query = query.filter(Certificate.active == truthiness(terms[1])) - elif 'cn' in terms: + elif "cn" in terms: query = query.filter( or_( Certificate.cn.ilike(term), - Certificate.domains.any(Domain.name.ilike(term)) + Certificate.domains.any(Domain.name.ilike(term)), ) ) - elif 'id' in terms: + elif "id" in terms: query = query.filter(Certificate.id == cast(terms[1], Integer)) - elif 'name' in terms: + elif "name" in terms: query = query.filter( or_( Certificate.name.ilike(term), @@ -362,26 +389,35 @@ def render(args): query = database.filter(query, Certificate, terms) if show: - sub_query = database.session_query(Role.name).filter(Role.user_id == args['user'].id).subquery() + sub_query = ( + database.session_query(Role.name) + .filter(Role.user_id == args["user"].id) + .subquery() + ) query = query.filter( or_( - Certificate.user_id == args['user'].id, - Certificate.owner.in_(sub_query) + Certificate.user_id == args["user"].id, Certificate.owner.in_(sub_query) ) ) if destination_id: - query = query.filter(Certificate.destinations.any(Destination.id == destination_id)) + query = query.filter( + Certificate.destinations.any(Destination.id == destination_id) + ) if notification_id: - query = query.filter(Certificate.notifications.any(Notification.id == notification_id)) + query = query.filter( + Certificate.notifications.any(Notification.id == notification_id) + ) if time_range: - to = arrow.now().replace(weeks=+time_range).format('YYYY-MM-DD') - now = arrow.now().format('YYYY-MM-DD') - query = query.filter(Certificate.not_after <= to).filter(Certificate.not_after >= now) + to = arrow.now().replace(weeks=+time_range).format("YYYY-MM-DD") + now = arrow.now().format("YYYY-MM-DD") + query = query.filter(Certificate.not_after <= to).filter( + Certificate.not_after >= now + ) - if current_app.config.get('ALLOW_CERT_DELETION', False): + if current_app.config.get("ALLOW_CERT_DELETION", False): query = query.filter(Certificate.deleted == False) # noqa result = database.sort_and_page(query, Certificate, args) @@ -409,18 +445,20 @@ def query_common_name(common_name, args): :param args: :return: """ - owner = args.pop('owner') + owner = args.pop("owner") if not owner: - owner = '%' + owner = "%" # only not expired certificates current_time = arrow.utcnow() - result = Certificate.query.filter(Certificate.cn.ilike(common_name)) \ - .filter(Certificate.owner.ilike(owner))\ - .filter(Certificate.not_after >= current_time.format('YYYY-MM-DD')) \ - .filter(Certificate.rotation.is_(True))\ + result = ( + Certificate.query.filter(Certificate.cn.ilike(common_name)) + .filter(Certificate.owner.ilike(owner)) + .filter(Certificate.not_after >= current_time.format("YYYY-MM-DD")) + .filter(Certificate.rotation.is_(True)) .all() + ) return result @@ -432,62 +470,77 @@ def create_csr(**csr_config): :param csr_config: """ - private_key = generate_private_key(csr_config.get('key_type')) + private_key = generate_private_key(csr_config.get("key_type")) builder = x509.CertificateSigningRequestBuilder() - name_list = [x509.NameAttribute(x509.OID_COMMON_NAME, csr_config['common_name'])] - if current_app.config.get('LEMUR_OWNER_EMAIL_IN_SUBJECT', True): - name_list.append(x509.NameAttribute(x509.OID_EMAIL_ADDRESS, csr_config['owner'])) - if 'organization' in csr_config and csr_config['organization'].strip(): - name_list.append(x509.NameAttribute(x509.OID_ORGANIZATION_NAME, csr_config['organization'])) - if 'organizational_unit' in csr_config and csr_config['organizational_unit'].strip(): - name_list.append(x509.NameAttribute(x509.OID_ORGANIZATIONAL_UNIT_NAME, csr_config['organizational_unit'])) - if 'country' in csr_config and csr_config['country'].strip(): - name_list.append(x509.NameAttribute(x509.OID_COUNTRY_NAME, csr_config['country'])) - if 'state' in csr_config and csr_config['state'].strip(): - name_list.append(x509.NameAttribute(x509.OID_STATE_OR_PROVINCE_NAME, csr_config['state'])) - if 'location' in csr_config and csr_config['location'].strip(): - name_list.append(x509.NameAttribute(x509.OID_LOCALITY_NAME, csr_config['location'])) + name_list = [x509.NameAttribute(x509.OID_COMMON_NAME, csr_config["common_name"])] + if current_app.config.get("LEMUR_OWNER_EMAIL_IN_SUBJECT", True): + name_list.append( + x509.NameAttribute(x509.OID_EMAIL_ADDRESS, csr_config["owner"]) + ) + if "organization" in csr_config and csr_config["organization"].strip(): + name_list.append( + x509.NameAttribute(x509.OID_ORGANIZATION_NAME, csr_config["organization"]) + ) + if ( + "organizational_unit" in csr_config + and csr_config["organizational_unit"].strip() + ): + name_list.append( + x509.NameAttribute( + x509.OID_ORGANIZATIONAL_UNIT_NAME, csr_config["organizational_unit"] + ) + ) + if "country" in csr_config and csr_config["country"].strip(): + name_list.append( + x509.NameAttribute(x509.OID_COUNTRY_NAME, csr_config["country"]) + ) + if "state" in csr_config and csr_config["state"].strip(): + name_list.append( + x509.NameAttribute(x509.OID_STATE_OR_PROVINCE_NAME, csr_config["state"]) + ) + if "location" in csr_config and csr_config["location"].strip(): + name_list.append( + x509.NameAttribute(x509.OID_LOCALITY_NAME, csr_config["location"]) + ) builder = builder.subject_name(x509.Name(name_list)) - extensions = csr_config.get('extensions', {}) - critical_extensions = ['basic_constraints', 'sub_alt_names', 'key_usage'] - noncritical_extensions = ['extended_key_usage'] + extensions = csr_config.get("extensions", {}) + critical_extensions = ["basic_constraints", "sub_alt_names", "key_usage"] + noncritical_extensions = ["extended_key_usage"] for k, v in extensions.items(): if v: if k in critical_extensions: - current_app.logger.debug('Adding Critical Extension: {0} {1}'.format(k, v)) - if k == 'sub_alt_names': - if v['names']: - builder = builder.add_extension(v['names'], critical=True) + current_app.logger.debug( + "Adding Critical Extension: {0} {1}".format(k, v) + ) + if k == "sub_alt_names": + if v["names"]: + builder = builder.add_extension(v["names"], critical=True) else: builder = builder.add_extension(v, critical=True) if k in noncritical_extensions: - current_app.logger.debug('Adding Extension: {0} {1}'.format(k, v)) + current_app.logger.debug("Adding Extension: {0} {1}".format(k, v)) builder = builder.add_extension(v, critical=False) - ski = extensions.get('subject_key_identifier', {}) - if ski.get('include_ski', False): + ski = extensions.get("subject_key_identifier", {}) + if ski.get("include_ski", False): builder = builder.add_extension( x509.SubjectKeyIdentifier.from_public_key(private_key.public_key()), - critical=False + critical=False, ) - request = builder.sign( - private_key, hashes.SHA256(), default_backend() - ) + request = builder.sign(private_key, hashes.SHA256(), default_backend()) # serialize our private key and CSR private_key = private_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, # would like to use PKCS8 but AWS ELBs don't like it - encryption_algorithm=serialization.NoEncryption() - ).decode('utf-8') + encryption_algorithm=serialization.NoEncryption(), + ).decode("utf-8") - csr = request.public_bytes( - encoding=serialization.Encoding.PEM - ).decode('utf-8') + csr = request.public_bytes(encoding=serialization.Encoding.PEM).decode("utf-8") return csr, private_key @@ -499,16 +552,19 @@ def stats(**kwargs): :param kwargs: :return: """ - if kwargs.get('metric') == 'not_after': + if kwargs.get("metric") == "not_after": start = arrow.utcnow() end = start.replace(weeks=+32) - items = database.db.session.query(Certificate.issuer, func.count(Certificate.id)) \ - .group_by(Certificate.issuer) \ - .filter(Certificate.not_after <= end.format('YYYY-MM-DD')) \ - .filter(Certificate.not_after >= start.format('YYYY-MM-DD')).all() + items = ( + database.db.session.query(Certificate.issuer, func.count(Certificate.id)) + .group_by(Certificate.issuer) + .filter(Certificate.not_after <= end.format("YYYY-MM-DD")) + .filter(Certificate.not_after >= start.format("YYYY-MM-DD")) + .all() + ) else: - attr = getattr(Certificate, kwargs.get('metric')) + attr = getattr(Certificate, kwargs.get("metric")) query = database.db.session.query(attr, func.count(attr)) items = query.group_by(attr).all() @@ -519,7 +575,7 @@ def stats(**kwargs): keys.append(key) values.append(count) - return {'labels': keys, 'values': values} + return {"labels": keys, "values": values} def get_account_number(arn): @@ -566,22 +622,24 @@ def get_certificate_primitives(certificate): certificate via `create`. """ start, end = calculate_reissue_range(certificate.not_before, certificate.not_after) - ser = CertificateInputSchema().load(CertificateOutputSchema().dump(certificate).data) + ser = CertificateInputSchema().load( + CertificateOutputSchema().dump(certificate).data + ) assert not ser.errors, "Error re-serializing certificate: %s" % ser.errors data = ser.data # we can't quite tell if we are using a custom name, as this is an automated process (typically) # we will rely on the Lemur generated name - data.pop('name', None) + data.pop("name", None) # TODO this can be removed once we migrate away from cn - data['cn'] = data['common_name'] + data["cn"] = data["common_name"] # needed until we move off not_* - data['not_before'] = start - data['not_after'] = end - data['validity_start'] = start - data['validity_end'] = end + data["not_before"] = start + data["not_after"] = end + data["validity_start"] = start + data["validity_end"] = end return data @@ -599,13 +657,13 @@ def reissue_certificate(certificate, replace=None, user=None): # We do not want to re-use the CSR when creating a certificate because this defeats the purpose of rotation. del primitives["csr"] if not user: - primitives['creator'] = certificate.user + primitives["creator"] = certificate.user else: - primitives['creator'] = user + primitives["creator"] = user if replace: - primitives['replaces'] = [certificate] + primitives["replaces"] = [certificate] new_cert = create(**primitives) diff --git a/lemur/certificates/utils.py b/lemur/certificates/utils.py index 800e1201..4e6cc4f1 100644 --- a/lemur/certificates/utils.py +++ b/lemur/certificates/utils.py @@ -23,17 +23,18 @@ def get_sans_from_csr(data): """ sub_alt_names = [] try: - request = x509.load_pem_x509_csr(data.encode('utf-8'), default_backend()) + request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend()) except Exception: - raise ValidationError('CSR presented is not valid.') + raise ValidationError("CSR presented is not valid.") try: - alt_names = request.extensions.get_extension_for_class(x509.SubjectAlternativeName) + alt_names = request.extensions.get_extension_for_class( + x509.SubjectAlternativeName + ) for alt_name in alt_names.value: - sub_alt_names.append({ - 'nameType': type(alt_name).__name__, - 'value': alt_name.value - }) + sub_alt_names.append( + {"nameType": type(alt_name).__name__, "value": alt_name.value} + ) except x509.ExtensionNotFound: pass diff --git a/lemur/certificates/verify.py b/lemur/certificates/verify.py index d42e306c..76c6b521 100644 --- a/lemur/certificates/verify.py +++ b/lemur/certificates/verify.py @@ -29,31 +29,45 @@ def ocsp_verify(cert, cert_path, issuer_chain_path): :param issuer_chain_path: :return bool: True if certificate is valid, False otherwise """ - command = ['openssl', 'x509', '-noout', '-ocsp_uri', '-in', cert_path] + command = ["openssl", "x509", "-noout", "-ocsp_uri", "-in", cert_path] p1 = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) url, err = p1.communicate() if not url: - current_app.logger.debug("No OCSP URL in certificate {}".format(cert.serial_number)) + current_app.logger.debug( + "No OCSP URL in certificate {}".format(cert.serial_number) + ) return None - p2 = subprocess.Popen(['openssl', 'ocsp', '-issuer', issuer_chain_path, - '-cert', cert_path, "-url", url.strip()], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) + p2 = subprocess.Popen( + [ + "openssl", + "ocsp", + "-issuer", + issuer_chain_path, + "-cert", + cert_path, + "-url", + url.strip(), + ], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) message, err = p2.communicate() - p_message = message.decode('utf-8') + p_message = message.decode("utf-8") - if 'error' in p_message or 'Error' in p_message: + if "error" in p_message or "Error" in p_message: raise Exception("Got error when parsing OCSP url") - elif 'revoked' in p_message: - current_app.logger.debug("OCSP reports certificate revoked: {}".format(cert.serial_number)) + elif "revoked" in p_message: + current_app.logger.debug( + "OCSP reports certificate revoked: {}".format(cert.serial_number) + ) return False - elif 'good' not in p_message: + elif "good" not in p_message: raise Exception("Did not receive a valid response") return True @@ -73,7 +87,9 @@ def crl_verify(cert, cert_path): x509.OID_CRL_DISTRIBUTION_POINTS ).value except x509.ExtensionNotFound: - current_app.logger.debug("No CRLDP extension in certificate {}".format(cert.serial_number)) + current_app.logger.debug( + "No CRLDP extension in certificate {}".format(cert.serial_number) + ) return None for p in distribution_points: @@ -92,8 +108,9 @@ def crl_verify(cert, cert_path): except ConnectionError: raise Exception("Unable to retrieve CRL: {0}".format(point)) - crl_cache[point] = x509.load_der_x509_crl(response.content, - backend=default_backend()) + crl_cache[point] = x509.load_der_x509_crl( + response.content, backend=default_backend() + ) else: current_app.logger.debug("CRL point is cached {}".format(point)) @@ -110,8 +127,9 @@ def crl_verify(cert, cert_path): except x509.ExtensionNotFound: pass - current_app.logger.debug("CRL reports certificate " - "revoked: {}".format(cert.serial_number)) + current_app.logger.debug( + "CRL reports certificate " "revoked: {}".format(cert.serial_number) + ) return False return True @@ -125,7 +143,7 @@ def verify(cert_path, issuer_chain_path): :param issuer_chain_path: :return: True if valid, False otherwise """ - with open(cert_path, 'rt') as c: + with open(cert_path, "rt") as c: try: cert = parse_certificate(c.read()) except ValueError as e: @@ -154,10 +172,10 @@ def verify_string(cert_string, issuer_string): :return: True if valid, False otherwise """ with mktempfile() as cert_tmp: - with open(cert_tmp, 'w') as f: + with open(cert_tmp, "w") as f: f.write(cert_string) with mktempfile() as issuer_tmp: - with open(issuer_tmp, 'w') as f: + with open(issuer_tmp, "w") as f: f.write(issuer_string) status = verify(cert_tmp, issuer_tmp) return status diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 48f6d672..61a74a59 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -26,14 +26,14 @@ from lemur.certificates.schemas import ( certificate_upload_input_schema, certificates_output_schema, certificate_export_input_schema, - certificate_edit_input_schema + certificate_edit_input_schema, ) from lemur.roles import service as role_service from lemur.logs import service as log_service -mod = Blueprint('certificates', __name__) +mod = Blueprint("certificates", __name__) api = Api(mod) @@ -128,8 +128,8 @@ class CertificatesListValid(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['user'] = g.user - common_name = args['filter'].split(';')[1] + args["user"] = g.user + common_name = args["filter"].split(";")[1] return service.query_common_name(common_name, args) @@ -228,16 +228,18 @@ class CertificatesNameQuery(AuthenticatedResource): """ parser = paginated_parser.copy() - parser.add_argument('timeRange', type=int, dest='time_range', location='args') - parser.add_argument('owner', type=inputs.boolean, location='args') - parser.add_argument('id', type=str, location='args') - parser.add_argument('active', type=inputs.boolean, location='args') - parser.add_argument('destinationId', type=int, dest="destination_id", location='args') - parser.add_argument('creator', type=str, location='args') - parser.add_argument('show', type=str, location='args') + parser.add_argument("timeRange", type=int, dest="time_range", location="args") + parser.add_argument("owner", type=inputs.boolean, location="args") + parser.add_argument("id", type=str, location="args") + parser.add_argument("active", type=inputs.boolean, location="args") + parser.add_argument( + "destinationId", type=int, dest="destination_id", location="args" + ) + parser.add_argument("creator", type=str, location="args") + parser.add_argument("show", type=str, location="args") args = parser.parse_args() - args['user'] = g.user + args["user"] = g.user return service.query_name(certificate_name, args) @@ -336,16 +338,18 @@ class CertificatesList(AuthenticatedResource): """ parser = paginated_parser.copy() - parser.add_argument('timeRange', type=int, dest='time_range', location='args') - parser.add_argument('owner', type=inputs.boolean, location='args') - parser.add_argument('id', type=str, location='args') - parser.add_argument('active', type=inputs.boolean, location='args') - parser.add_argument('destinationId', type=int, dest="destination_id", location='args') - parser.add_argument('creator', type=str, location='args') - parser.add_argument('show', type=str, location='args') + parser.add_argument("timeRange", type=int, dest="time_range", location="args") + parser.add_argument("owner", type=inputs.boolean, location="args") + parser.add_argument("id", type=str, location="args") + parser.add_argument("active", type=inputs.boolean, location="args") + parser.add_argument( + "destinationId", type=int, dest="destination_id", location="args" + ) + parser.add_argument("creator", type=str, location="args") + parser.add_argument("show", type=str, location="args") args = parser.parse_args() - args['user'] = g.user + args["user"] = g.user return service.render(args) @validate_schema(certificate_input_schema, certificate_output_schema) @@ -463,24 +467,31 @@ class CertificatesList(AuthenticatedResource): :statuscode 403: unauthenticated """ - role = role_service.get_by_name(data['authority'].owner) + role = role_service.get_by_name(data["authority"].owner) # all the authority role members should be allowed - roles = [x.name for x in data['authority'].roles] + roles = [x.name for x in data["authority"].roles] # allow "owner" roles by team DL roles.append(role) - authority_permission = AuthorityPermission(data['authority'].id, roles) + authority_permission = AuthorityPermission(data["authority"].id, roles) if authority_permission.can(): - data['creator'] = g.user + data["creator"] = g.user cert = service.create(**data) if isinstance(cert, Certificate): # only log if created, not pending - log_service.create(g.user, 'create_cert', certificate=cert) + log_service.create(g.user, "create_cert", certificate=cert) return cert - return dict(message="You are not authorized to use the authority: {0}".format(data['authority'].name)), 403 + return ( + dict( + message="You are not authorized to use the authority: {0}".format( + data["authority"].name + ) + ), + 403, + ) class CertificatesUpload(AuthenticatedResource): @@ -583,12 +594,14 @@ class CertificatesUpload(AuthenticatedResource): :statuscode 200: no error """ - data['creator'] = g.user - if data.get('destinations'): - if data.get('private_key'): + data["creator"] = g.user + if data.get("destinations"): + if data.get("private_key"): return service.upload(**data) else: - raise Exception("Private key must be provided in order to upload certificate to AWS") + raise Exception( + "Private key must be provided in order to upload certificate to AWS" + ) return service.upload(**data) @@ -600,10 +613,12 @@ class CertificatesStats(AuthenticatedResource): super(CertificatesStats, self).__init__() def get(self): - self.reqparse.add_argument('metric', type=str, location='args') - self.reqparse.add_argument('range', default=32, type=int, location='args') - self.reqparse.add_argument('destinationId', dest='destination_id', location='args') - self.reqparse.add_argument('active', type=str, default='true', location='args') + self.reqparse.add_argument("metric", type=str, location="args") + self.reqparse.add_argument("range", default=32, type=int, location="args") + self.reqparse.add_argument( + "destinationId", dest="destination_id", location="args" + ) + self.reqparse.add_argument("active", type=str, default="true", location="args") args = self.reqparse.parse_args() @@ -655,12 +670,12 @@ class CertificatePrivateKey(AuthenticatedResource): permission = CertificatePermission(owner_role, [x.name for x in cert.roles]) if not permission.can(): - return dict(message='You are not authorized to view this key'), 403 + return dict(message="You are not authorized to view this key"), 403 - log_service.create(g.current_user, 'key_view', certificate=cert) + log_service.create(g.current_user, "key_view", certificate=cert) response = make_response(jsonify(key=cert.private_key), 200) - response.headers['cache-control'] = 'private, max-age=0, no-cache, no-store' - response.headers['pragma'] = 'no-cache' + response.headers["cache-control"] = "private, max-age=0, no-cache, no-store" + response.headers["pragma"] = "no-cache" return response @@ -850,19 +865,25 @@ class Certificates(AuthenticatedResource): permission = CertificatePermission(owner_role, [x.name for x in cert.roles]) if not permission.can(): - return dict(message='You are not authorized to update this certificate'), 403 + return ( + dict(message="You are not authorized to update this certificate"), + 403, + ) - for destination in data['destinations']: + for destination in data["destinations"]: if destination.plugin.requires_key: if not cert.private_key: - return dict( - message='Unable to add destination: {0}. Certificate does not have required private key.'.format( - destination.label - ) - ), 400 + return ( + dict( + message="Unable to add destination: {0}. Certificate does not have required private key.".format( + destination.label + ) + ), + 400, + ) cert = service.update(certificate_id, **data) - log_service.create(g.current_user, 'update_cert', certificate=cert) + log_service.create(g.current_user, "update_cert", certificate=cert) return cert def delete(self, certificate_id, data=None): @@ -891,7 +912,7 @@ class Certificates(AuthenticatedResource): :statuscode 405: certificate deletion is disabled """ - if not current_app.config.get('ALLOW_CERT_DELETION', False): + if not current_app.config.get("ALLOW_CERT_DELETION", False): return dict(message="Certificate deletion is disabled"), 405 cert = service.get(certificate_id) @@ -908,11 +929,14 @@ class Certificates(AuthenticatedResource): permission = CertificatePermission(owner_role, [x.name for x in cert.roles]) if not permission.can(): - return dict(message='You are not authorized to delete this certificate'), 403 + return ( + dict(message="You are not authorized to delete this certificate"), + 403, + ) service.update(certificate_id, deleted=True) - log_service.create(g.current_user, 'delete_cert', certificate=cert) - return 'Certificate deleted', 204 + log_service.create(g.current_user, "delete_cert", certificate=cert) + return "Certificate deleted", 204 class NotificationCertificatesList(AuthenticatedResource): @@ -1012,17 +1036,19 @@ class NotificationCertificatesList(AuthenticatedResource): """ parser = paginated_parser.copy() - parser.add_argument('timeRange', type=int, dest='time_range', location='args') - parser.add_argument('owner', type=inputs.boolean, location='args') - parser.add_argument('id', type=str, location='args') - parser.add_argument('active', type=inputs.boolean, location='args') - parser.add_argument('destinationId', type=int, dest="destination_id", location='args') - parser.add_argument('creator', type=str, location='args') - parser.add_argument('show', type=str, location='args') + parser.add_argument("timeRange", type=int, dest="time_range", location="args") + parser.add_argument("owner", type=inputs.boolean, location="args") + parser.add_argument("id", type=str, location="args") + parser.add_argument("active", type=inputs.boolean, location="args") + parser.add_argument( + "destinationId", type=int, dest="destination_id", location="args" + ) + parser.add_argument("creator", type=str, location="args") + parser.add_argument("show", type=str, location="args") args = parser.parse_args() - args['notification_id'] = notification_id - args['user'] = g.current_user + args["notification_id"] = notification_id + args["user"] = g.current_user return service.render(args) @@ -1195,30 +1221,48 @@ class CertificateExport(AuthenticatedResource): if not cert: return dict(message="Cannot find specified certificate"), 404 - plugin = data['plugin']['plugin_object'] + plugin = data["plugin"]["plugin_object"] if plugin.requires_key: if not cert.private_key: - return dict( - message='Unable to export certificate, plugin: {0} requires a private key but no key was found.'.format( - plugin.slug)), 400 + return ( + dict( + message="Unable to export certificate, plugin: {0} requires a private key but no key was found.".format( + plugin.slug + ) + ), + 400, + ) else: # allow creators if g.current_user != cert.user: owner_role = role_service.get_by_name(cert.owner) - permission = CertificatePermission(owner_role, [x.name for x in cert.roles]) + permission = CertificatePermission( + owner_role, [x.name for x in cert.roles] + ) if not permission.can(): - return dict(message='You are not authorized to export this certificate.'), 403 + return ( + dict( + message="You are not authorized to export this certificate." + ), + 403, + ) - options = data['plugin']['plugin_options'] + options = data["plugin"]["plugin_options"] - log_service.create(g.current_user, 'key_view', certificate=cert) - extension, passphrase, data = plugin.export(cert.body, cert.chain, cert.private_key, options) + log_service.create(g.current_user, "key_view", certificate=cert) + extension, passphrase, data = plugin.export( + cert.body, cert.chain, cert.private_key, options + ) # we take a hit in message size when b64 encoding - return dict(extension=extension, passphrase=passphrase, data=base64.b64encode(data).decode('utf-8')) + return dict( + extension=extension, + passphrase=passphrase, + data=base64.b64encode(data).decode("utf-8"), + ) class CertificateRevoke(AuthenticatedResource): @@ -1269,30 +1313,66 @@ class CertificateRevoke(AuthenticatedResource): permission = CertificatePermission(owner_role, [x.name for x in cert.roles]) if not permission.can(): - return dict(message='You are not authorized to revoke this certificate.'), 403 + return ( + dict(message="You are not authorized to revoke this certificate."), + 403, + ) if not cert.external_id: - return dict(message='Cannot revoke certificate. No external id found.'), 400 + return dict(message="Cannot revoke certificate. No external id found."), 400 if cert.endpoints: - return dict(message='Cannot revoke certificate. Endpoints are deployed with the given certificate.'), 403 + return ( + dict( + message="Cannot revoke certificate. Endpoints are deployed with the given certificate." + ), + 403, + ) plugin = plugins.get(cert.authority.plugin_name) plugin.revoke_certificate(cert, data) - log_service.create(g.current_user, 'revoke_cert', certificate=cert) + log_service.create(g.current_user, "revoke_cert", certificate=cert) return dict(id=cert.id) -api.add_resource(CertificateRevoke, '/certificates//revoke', endpoint='revokeCertificate') -api.add_resource(CertificatesNameQuery, '/certificates/name/', endpoint='certificatesNameQuery') -api.add_resource(CertificatesList, '/certificates', endpoint='certificates') -api.add_resource(CertificatesListValid, '/certificates/valid', endpoint='certificatesListValid') -api.add_resource(Certificates, '/certificates/', endpoint='certificate') -api.add_resource(CertificatesStats, '/certificates/stats', endpoint='certificateStats') -api.add_resource(CertificatesUpload, '/certificates/upload', endpoint='certificateUpload') -api.add_resource(CertificatePrivateKey, '/certificates//key', endpoint='privateKeyCertificates') -api.add_resource(CertificateExport, '/certificates//export', endpoint='exportCertificate') -api.add_resource(NotificationCertificatesList, '/notifications//certificates', - endpoint='notificationCertificates') -api.add_resource(CertificatesReplacementsList, '/certificates//replacements', - endpoint='replacements') +api.add_resource( + CertificateRevoke, + "/certificates//revoke", + endpoint="revokeCertificate", +) +api.add_resource( + CertificatesNameQuery, + "/certificates/name/", + endpoint="certificatesNameQuery", +) +api.add_resource(CertificatesList, "/certificates", endpoint="certificates") +api.add_resource( + CertificatesListValid, "/certificates/valid", endpoint="certificatesListValid" +) +api.add_resource( + Certificates, "/certificates/", endpoint="certificate" +) +api.add_resource(CertificatesStats, "/certificates/stats", endpoint="certificateStats") +api.add_resource( + CertificatesUpload, "/certificates/upload", endpoint="certificateUpload" +) +api.add_resource( + CertificatePrivateKey, + "/certificates//key", + endpoint="privateKeyCertificates", +) +api.add_resource( + CertificateExport, + "/certificates//export", + endpoint="exportCertificate", +) +api.add_resource( + NotificationCertificatesList, + "/notifications//certificates", + endpoint="notificationCertificates", +) +api.add_resource( + CertificatesReplacementsList, + "/certificates//replacements", + endpoint="replacements", +) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 23eabddb..7eb1bb0d 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -32,8 +32,11 @@ else: def make_celery(app): - celery = Celery(app.import_name, backend=app.config.get('CELERY_RESULT_BACKEND'), - broker=app.config.get('CELERY_BROKER_URL')) + celery = Celery( + app.import_name, + backend=app.config.get("CELERY_RESULT_BACKEND"), + broker=app.config.get("CELERY_BROKER_URL"), + ) celery.conf.update(app.config) TaskBase = celery.Task @@ -53,6 +56,7 @@ celery = make_celery(flask_app) def is_task_active(fun, task_id, args): from celery.task.control import inspect + i = inspect() active_tasks = i.active() for _, tasks in active_tasks.items(): @@ -99,7 +103,7 @@ def fetch_acme_cert(id): # We only care about certs using the acme-issuer plugin for cert in pending_certs: cert_authority = get_authority(cert.authority_id) - if cert_authority.plugin_name == 'acme-issuer': + if cert_authority.plugin_name == "acme-issuer": acme_certs.append(cert) else: wrong_issuer += 1 @@ -112,20 +116,22 @@ def fetch_acme_cert(id): # It's necessary to reload the pending cert due to detached instance: http://sqlalche.me/e/bhk3 pending_cert = pending_certificate_service.get(cert.get("pending_cert").id) if not pending_cert: - log_data["message"] = "Pending certificate doesn't exist anymore. Was it resolved by another process?" + log_data[ + "message" + ] = "Pending certificate doesn't exist anymore. Was it resolved by another process?" current_app.logger.error(log_data) continue if real_cert: # If a real certificate was returned from issuer, then create it in Lemur and mark # the pending certificate as resolved - final_cert = pending_certificate_service.create_certificate(pending_cert, real_cert, pending_cert.user) - pending_certificate_service.update( - cert.get("pending_cert").id, - resolved_cert_id=final_cert.id + final_cert = pending_certificate_service.create_certificate( + pending_cert, real_cert, pending_cert.user ) pending_certificate_service.update( - cert.get("pending_cert").id, - resolved=True + cert.get("pending_cert").id, resolved_cert_id=final_cert.id + ) + pending_certificate_service.update( + cert.get("pending_cert").id, resolved=True ) # add metrics to metrics extension new += 1 @@ -139,17 +145,17 @@ def fetch_acme_cert(id): if pending_cert.number_attempts > 4: error_log["message"] = "Deleting pending certificate" - send_pending_failure_notification(pending_cert, notify_owner=pending_cert.notify) + send_pending_failure_notification( + pending_cert, notify_owner=pending_cert.notify + ) # Mark the pending cert as resolved pending_certificate_service.update( - cert.get("pending_cert").id, - resolved=True + cert.get("pending_cert").id, resolved=True ) else: pending_certificate_service.increment_attempt(pending_cert) pending_certificate_service.update( - cert.get("pending_cert").id, - status=str(cert.get("last_error")) + cert.get("pending_cert").id, status=str(cert.get("last_error")) ) # Add failed pending cert task back to queue fetch_acme_cert.delay(id) @@ -161,9 +167,7 @@ def fetch_acme_cert(id): current_app.logger.debug(log_data) print( "[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format( - new=new, - failed=failed, - wrong_issuer=wrong_issuer + new=new, failed=failed, wrong_issuer=wrong_issuer ) ) @@ -175,7 +179,7 @@ def fetch_all_pending_acme_certs(): log_data = { "function": "{}.{}".format(__name__, sys._getframe().f_code.co_name), - "message": "Starting job." + "message": "Starting job.", } current_app.logger.debug(log_data) @@ -183,7 +187,7 @@ def fetch_all_pending_acme_certs(): # We only care about certs using the acme-issuer plugin for cert in pending_certs: cert_authority = get_authority(cert.authority_id) - if cert_authority.plugin_name == 'acme-issuer': + if cert_authority.plugin_name == "acme-issuer": if datetime.now(timezone.utc) - cert.last_updated > timedelta(minutes=5): log_data["message"] = "Triggering job for cert {}".format(cert.name) log_data["cert_name"] = cert.name @@ -195,17 +199,15 @@ def fetch_all_pending_acme_certs(): @celery.task() def remove_old_acme_certs(): """Prune old pending acme certificates from the database""" - log_data = { - "function": "{}.{}".format(__name__, sys._getframe().f_code.co_name) - } - pending_certs = pending_certificate_service.get_pending_certs('all') + log_data = {"function": "{}.{}".format(__name__, sys._getframe().f_code.co_name)} + pending_certs = pending_certificate_service.get_pending_certs("all") # Delete pending certs more than a week old for cert in pending_certs: if datetime.now(timezone.utc) - cert.last_updated > timedelta(days=7): - log_data['pending_cert_id'] = cert.id - log_data['pending_cert_name'] = cert.name - log_data['message'] = "Deleting pending certificate" + log_data["pending_cert_id"] = cert.id + log_data["pending_cert_name"] = cert.name + log_data["message"] = "Deleting pending certificate" current_app.logger.debug(log_data) pending_certificate_service.delete(cert) @@ -218,7 +220,9 @@ def clean_all_sources(): """ sources = validate_sources("all") for source in sources: - current_app.logger.debug("Creating celery task to clean source {}".format(source.label)) + current_app.logger.debug( + "Creating celery task to clean source {}".format(source.label) + ) clean_source.delay(source.label) @@ -242,7 +246,9 @@ def sync_all_sources(): """ sources = validate_sources("all") for source in sources: - current_app.logger.debug("Creating celery task to sync source {}".format(source.label)) + current_app.logger.debug( + "Creating celery task to sync source {}".format(source.label) + ) sync_source.delay(source.label) @@ -277,7 +283,9 @@ def sync_source(source): log_data["message"] = "Error syncing source: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send('sync_source_timeout', 'counter', 1, metric_tags={'source': source}) + metrics.send( + "sync_source_timeout", "counter", 1, metric_tags={"source": source} + ) return log_data["message"] = "Done syncing source" diff --git a/lemur/common/defaults.py b/lemur/common/defaults.py index 6b259f6b..d563dbd0 100644 --- a/lemur/common/defaults.py +++ b/lemur/common/defaults.py @@ -9,18 +9,20 @@ from lemur.extensions import sentry from lemur.constants import SAN_NAMING_TEMPLATE, DEFAULT_NAMING_TEMPLATE -def text_to_slug(value, joiner='-'): +def text_to_slug(value, joiner="-"): """ Normalize a string to a "slug" value, stripping character accents and removing non-alphanum characters. A series of non-alphanumeric characters is replaced with the joiner character. """ # Strip all character accents: decompose Unicode characters and then drop combining chars. - value = ''.join(c for c in unicodedata.normalize('NFKD', value) if not unicodedata.combining(c)) + value = "".join( + c for c in unicodedata.normalize("NFKD", value) if not unicodedata.combining(c) + ) # Replace all remaining non-alphanumeric characters with joiner string. Multiple characters get collapsed into a # single joiner. Except, keep 'xn--' used in IDNA domain names as is. - value = re.sub(r'[^A-Za-z0-9.]+(?' + return "" # Try Common Name or fall back to Organization name - attrs = (cert.issuer.get_attributes_for_oid(x509.OID_COMMON_NAME) or - cert.issuer.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME)) + attrs = cert.issuer.get_attributes_for_oid( + x509.OID_COMMON_NAME + ) or cert.issuer.get_attributes_for_oid(x509.OID_ORGANIZATION_NAME) if not attrs: - current_app.logger.error("Unable to get issuer! Cert serial {:x}".format(cert.serial_number)) - return '' + current_app.logger.error( + "Unable to get issuer! Cert serial {:x}".format(cert.serial_number) + ) + return "" - return text_to_slug(attrs[0].value, '') + return text_to_slug(attrs[0].value, "") def not_before(cert): diff --git a/lemur/common/fields.py b/lemur/common/fields.py index 5ab0c6f0..15631832 100644 --- a/lemur/common/fields.py +++ b/lemur/common/fields.py @@ -25,6 +25,7 @@ class Hex(Field): """ A hex formatted string. """ + def _serialize(self, value, attr, obj): if value: value = hex(int(value))[2:].upper() @@ -48,25 +49,25 @@ class ArrowDateTime(Field): """ DATEFORMAT_SERIALIZATION_FUNCS = { - 'iso': utils.isoformat, - 'iso8601': utils.isoformat, - 'rfc': utils.rfcformat, - 'rfc822': utils.rfcformat, + "iso": utils.isoformat, + "iso8601": utils.isoformat, + "rfc": utils.rfcformat, + "rfc822": utils.rfcformat, } DATEFORMAT_DESERIALIZATION_FUNCS = { - 'iso': utils.from_iso, - 'iso8601': utils.from_iso, - 'rfc': utils.from_rfc, - 'rfc822': utils.from_rfc, + "iso": utils.from_iso, + "iso8601": utils.from_iso, + "rfc": utils.from_rfc, + "rfc822": utils.from_rfc, } - DEFAULT_FORMAT = 'iso' + DEFAULT_FORMAT = "iso" localtime = False default_error_messages = { - 'invalid': 'Not a valid datetime.', - 'format': '"{input}" cannot be formatted as a datetime.', + "invalid": "Not a valid datetime.", + "format": '"{input}" cannot be formatted as a datetime.', } def __init__(self, format=None, **kwargs): @@ -89,34 +90,36 @@ class ArrowDateTime(Field): try: return format_func(value, localtime=self.localtime) except (AttributeError, ValueError) as err: - self.fail('format', input=value) + self.fail("format", input=value) else: return value.strftime(self.dateformat) def _deserialize(self, value, attr, data): if not value: # Falsy values, e.g. '', None, [] are not valid - raise self.fail('invalid') + raise self.fail("invalid") self.dateformat = self.dateformat or self.DEFAULT_FORMAT func = self.DATEFORMAT_DESERIALIZATION_FUNCS.get(self.dateformat) if func: try: return arrow.get(func(value)) except (TypeError, AttributeError, ValueError): - raise self.fail('invalid') + raise self.fail("invalid") elif self.dateformat: try: return dt.datetime.strptime(value, self.dateformat) except (TypeError, AttributeError, ValueError): - raise self.fail('invalid') + raise self.fail("invalid") elif utils.dateutil_available: try: return arrow.get(utils.from_datestring(value)) except TypeError: - raise self.fail('invalid') + raise self.fail("invalid") else: - warnings.warn('It is recommended that you install python-dateutil ' - 'for improved datetime deserialization.') - raise self.fail('invalid') + warnings.warn( + "It is recommended that you install python-dateutil " + "for improved datetime deserialization." + ) + raise self.fail("invalid") class KeyUsageExtension(Field): @@ -131,73 +134,75 @@ class KeyUsageExtension(Field): def _serialize(self, value, attr, obj): return { - 'useDigitalSignature': value.digital_signature, - 'useNonRepudiation': value.content_commitment, - 'useKeyEncipherment': value.key_encipherment, - 'useDataEncipherment': value.data_encipherment, - 'useKeyAgreement': value.key_agreement, - 'useKeyCertSign': value.key_cert_sign, - 'useCRLSign': value.crl_sign, - 'useEncipherOnly': value._encipher_only, - 'useDecipherOnly': value._decipher_only + "useDigitalSignature": value.digital_signature, + "useNonRepudiation": value.content_commitment, + "useKeyEncipherment": value.key_encipherment, + "useDataEncipherment": value.data_encipherment, + "useKeyAgreement": value.key_agreement, + "useKeyCertSign": value.key_cert_sign, + "useCRLSign": value.crl_sign, + "useEncipherOnly": value._encipher_only, + "useDecipherOnly": value._decipher_only, } def _deserialize(self, value, attr, data): keyusages = { - 'digital_signature': False, - 'content_commitment': False, - 'key_encipherment': False, - 'data_encipherment': False, - 'key_agreement': False, - 'key_cert_sign': False, - 'crl_sign': False, - 'encipher_only': False, - 'decipher_only': False + "digital_signature": False, + "content_commitment": False, + "key_encipherment": False, + "data_encipherment": False, + "key_agreement": False, + "key_cert_sign": False, + "crl_sign": False, + "encipher_only": False, + "decipher_only": False, } for k, v in value.items(): - if k == 'useDigitalSignature': - keyusages['digital_signature'] = v + if k == "useDigitalSignature": + keyusages["digital_signature"] = v - elif k == 'useNonRepudiation': - keyusages['content_commitment'] = v + elif k == "useNonRepudiation": + keyusages["content_commitment"] = v - elif k == 'useKeyEncipherment': - keyusages['key_encipherment'] = v + elif k == "useKeyEncipherment": + keyusages["key_encipherment"] = v - elif k == 'useDataEncipherment': - keyusages['data_encipherment'] = v + elif k == "useDataEncipherment": + keyusages["data_encipherment"] = v - elif k == 'useKeyCertSign': - keyusages['key_cert_sign'] = v + elif k == "useKeyCertSign": + keyusages["key_cert_sign"] = v - elif k == 'useCRLSign': - keyusages['crl_sign'] = v + elif k == "useCRLSign": + keyusages["crl_sign"] = v - elif k == 'useKeyAgreement': - keyusages['key_agreement'] = v + elif k == "useKeyAgreement": + keyusages["key_agreement"] = v - elif k == 'useEncipherOnly' and v: - keyusages['encipher_only'] = True - keyusages['key_agreement'] = True + elif k == "useEncipherOnly" and v: + keyusages["encipher_only"] = True + keyusages["key_agreement"] = True - elif k == 'useDecipherOnly' and v: - keyusages['decipher_only'] = True - keyusages['key_agreement'] = True + elif k == "useDecipherOnly" and v: + keyusages["decipher_only"] = True + keyusages["key_agreement"] = True - if keyusages['encipher_only'] and keyusages['decipher_only']: - raise ValidationError('A certificate cannot have both Encipher Only and Decipher Only Extended Key Usages.') + if keyusages["encipher_only"] and keyusages["decipher_only"]: + raise ValidationError( + "A certificate cannot have both Encipher Only and Decipher Only Extended Key Usages." + ) return x509.KeyUsage( - digital_signature=keyusages['digital_signature'], - content_commitment=keyusages['content_commitment'], - key_encipherment=keyusages['key_encipherment'], - data_encipherment=keyusages['data_encipherment'], - key_agreement=keyusages['key_agreement'], - key_cert_sign=keyusages['key_cert_sign'], - crl_sign=keyusages['crl_sign'], - encipher_only=keyusages['encipher_only'], - decipher_only=keyusages['decipher_only'] + digital_signature=keyusages["digital_signature"], + content_commitment=keyusages["content_commitment"], + key_encipherment=keyusages["key_encipherment"], + data_encipherment=keyusages["data_encipherment"], + key_agreement=keyusages["key_agreement"], + key_cert_sign=keyusages["key_cert_sign"], + crl_sign=keyusages["crl_sign"], + encipher_only=keyusages["encipher_only"], + decipher_only=keyusages["decipher_only"], ) @@ -216,69 +221,77 @@ class ExtendedKeyUsageExtension(Field): usage_list = {} for usage in usages: if usage == x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH: - usage_list['useClientAuthentication'] = True + usage_list["useClientAuthentication"] = True elif usage == x509.oid.ExtendedKeyUsageOID.SERVER_AUTH: - usage_list['useServerAuthentication'] = True + usage_list["useServerAuthentication"] = True elif usage == x509.oid.ExtendedKeyUsageOID.CODE_SIGNING: - usage_list['useCodeSigning'] = True + usage_list["useCodeSigning"] = True elif usage == x509.oid.ExtendedKeyUsageOID.EMAIL_PROTECTION: - usage_list['useEmailProtection'] = True + usage_list["useEmailProtection"] = True elif usage == x509.oid.ExtendedKeyUsageOID.TIME_STAMPING: - usage_list['useTimestamping'] = True + usage_list["useTimestamping"] = True elif usage == x509.oid.ExtendedKeyUsageOID.OCSP_SIGNING: - usage_list['useOCSPSigning'] = True + usage_list["useOCSPSigning"] = True - elif usage.dotted_string == '1.3.6.1.5.5.7.3.14': - usage_list['useEapOverLAN'] = True + elif usage.dotted_string == "1.3.6.1.5.5.7.3.14": + usage_list["useEapOverLAN"] = True - elif usage.dotted_string == '1.3.6.1.5.5.7.3.13': - usage_list['useEapOverPPP'] = True + elif usage.dotted_string == "1.3.6.1.5.5.7.3.13": + usage_list["useEapOverPPP"] = True - elif usage.dotted_string == '1.3.6.1.4.1.311.20.2.2': - usage_list['useSmartCardLogon'] = True + elif usage.dotted_string == "1.3.6.1.4.1.311.20.2.2": + usage_list["useSmartCardLogon"] = True else: - current_app.logger.warning('Unable to serialize ExtendedKeyUsage with OID: {usage}'.format(usage=usage.dotted_string)) + current_app.logger.warning( + "Unable to serialize ExtendedKeyUsage with OID: {usage}".format( + usage=usage.dotted_string + ) + ) return usage_list def _deserialize(self, value, attr, data): usage_oids = [] for k, v in value.items(): - if k == 'useClientAuthentication' and v: + if k == "useClientAuthentication" and v: usage_oids.append(x509.oid.ExtendedKeyUsageOID.CLIENT_AUTH) - elif k == 'useServerAuthentication' and v: + elif k == "useServerAuthentication" and v: usage_oids.append(x509.oid.ExtendedKeyUsageOID.SERVER_AUTH) - elif k == 'useCodeSigning' and v: + elif k == "useCodeSigning" and v: usage_oids.append(x509.oid.ExtendedKeyUsageOID.CODE_SIGNING) - elif k == 'useEmailProtection' and v: + elif k == "useEmailProtection" and v: usage_oids.append(x509.oid.ExtendedKeyUsageOID.EMAIL_PROTECTION) - elif k == 'useTimestamping' and v: + elif k == "useTimestamping" and v: usage_oids.append(x509.oid.ExtendedKeyUsageOID.TIME_STAMPING) - elif k == 'useOCSPSigning' and v: + elif k == "useOCSPSigning" and v: usage_oids.append(x509.oid.ExtendedKeyUsageOID.OCSP_SIGNING) - elif k == 'useEapOverLAN' and v: + elif k == "useEapOverLAN" and v: usage_oids.append(x509.oid.ObjectIdentifier("1.3.6.1.5.5.7.3.14")) - elif k == 'useEapOverPPP' and v: + elif k == "useEapOverPPP" and v: usage_oids.append(x509.oid.ObjectIdentifier("1.3.6.1.5.5.7.3.13")) - elif k == 'useSmartCardLogon' and v: + elif k == "useSmartCardLogon" and v: usage_oids.append(x509.oid.ObjectIdentifier("1.3.6.1.4.1.311.20.2.2")) else: - current_app.logger.warning('Unable to deserialize ExtendedKeyUsage with name: {key}'.format(key=k)) + current_app.logger.warning( + "Unable to deserialize ExtendedKeyUsage with name: {key}".format( + key=k + ) + ) return x509.ExtendedKeyUsage(usage_oids) @@ -294,15 +307,17 @@ class BasicConstraintsExtension(Field): """ def _serialize(self, value, attr, obj): - return {'ca': value.ca, 'path_length': value.path_length} + return {"ca": value.ca, "path_length": value.path_length} def _deserialize(self, value, attr, data): - ca = value.get('ca', False) - path_length = value.get('path_length', None) + ca = value.get("ca", False) + path_length = value.get("path_length", None) if ca: if not isinstance(path_length, (type(None), int)): - raise ValidationError('A CA certificate path_length (for BasicConstraints) must be None or an integer.') + raise ValidationError( + "A CA certificate path_length (for BasicConstraints) must be None or an integer." + ) return x509.BasicConstraints(ca=True, path_length=path_length) else: return x509.BasicConstraints(ca=False, path_length=None) @@ -317,6 +332,7 @@ class SubjectAlternativeNameExtension(Field): :param kwargs: The same keyword arguments that :class:`Field` receives. """ + def _serialize(self, value, attr, obj): general_names = [] name_type = None @@ -326,53 +342,59 @@ class SubjectAlternativeNameExtension(Field): value = name.value if isinstance(name, x509.DNSName): - name_type = 'DNSName' + name_type = "DNSName" elif isinstance(name, x509.IPAddress): if isinstance(value, ipaddress.IPv4Network): - name_type = 'IPNetwork' + name_type = "IPNetwork" else: - name_type = 'IPAddress' + name_type = "IPAddress" value = str(value) elif isinstance(name, x509.UniformResourceIdentifier): - name_type = 'uniformResourceIdentifier' + name_type = "uniformResourceIdentifier" elif isinstance(name, x509.DirectoryName): - name_type = 'directoryName' + name_type = "directoryName" elif isinstance(name, x509.RFC822Name): - name_type = 'rfc822Name' + name_type = "rfc822Name" elif isinstance(name, x509.RegisteredID): - name_type = 'registeredID' + name_type = "registeredID" value = value.dotted_string else: - current_app.logger.warning('Unknown SubAltName type: {name}'.format(name=name)) + current_app.logger.warning( + "Unknown SubAltName type: {name}".format(name=name) + ) continue - general_names.append({'nameType': name_type, 'value': value}) + general_names.append({"nameType": name_type, "value": value}) return general_names def _deserialize(self, value, attr, data): general_names = [] for name in value: - if name['nameType'] == 'DNSName': - validators.sensitive_domain(name['value']) - general_names.append(x509.DNSName(name['value'])) + if name["nameType"] == "DNSName": + validators.sensitive_domain(name["value"]) + general_names.append(x509.DNSName(name["value"])) - elif name['nameType'] == 'IPAddress': - general_names.append(x509.IPAddress(ipaddress.ip_address(name['value']))) + elif name["nameType"] == "IPAddress": + general_names.append( + x509.IPAddress(ipaddress.ip_address(name["value"])) + ) - elif name['nameType'] == 'IPNetwork': - general_names.append(x509.IPAddress(ipaddress.ip_network(name['value']))) + elif name["nameType"] == "IPNetwork": + general_names.append( + x509.IPAddress(ipaddress.ip_network(name["value"])) + ) - elif name['nameType'] == 'uniformResourceIdentifier': - general_names.append(x509.UniformResourceIdentifier(name['value'])) + elif name["nameType"] == "uniformResourceIdentifier": + general_names.append(x509.UniformResourceIdentifier(name["value"])) - elif name['nameType'] == 'directoryName': + elif name["nameType"] == "directoryName": # TODO: Need to parse a string in name['value'] like: # 'CN=Common Name, O=Org Name, OU=OrgUnit Name, C=US, ST=ST, L=City/emailAddress=person@example.com' # or @@ -390,26 +412,32 @@ class SubjectAlternativeNameExtension(Field): # general_names.append(x509.DirectoryName(x509.Name(BLAH)))) pass - elif name['nameType'] == 'rfc822Name': - general_names.append(x509.RFC822Name(name['value'])) + elif name["nameType"] == "rfc822Name": + general_names.append(x509.RFC822Name(name["value"])) - elif name['nameType'] == 'registeredID': - general_names.append(x509.RegisteredID(x509.ObjectIdentifier(name['value']))) + elif name["nameType"] == "registeredID": + general_names.append( + x509.RegisteredID(x509.ObjectIdentifier(name["value"])) + ) - elif name['nameType'] == 'otherName': + elif name["nameType"] == "otherName": # This has two inputs (type and value), so it doesn't fit the mold of the rest of these GeneralName entities. # general_names.append(x509.OtherName(name['type'], bytes(name['value']), 'utf-8')) pass - elif name['nameType'] == 'x400Address': + elif name["nameType"] == "x400Address": # The Python Cryptography library doesn't support x400Address types (yet?) pass - elif name['nameType'] == 'EDIPartyName': + elif name["nameType"] == "EDIPartyName": # The Python Cryptography library doesn't support EDIPartyName types (yet?) pass else: - current_app.logger.warning('Unable to deserialize SubAltName with type: {name_type}'.format(name_type=name['nameType'])) + current_app.logger.warning( + "Unable to deserialize SubAltName with type: {name_type}".format( + name_type=name["nameType"] + ) + ) return x509.SubjectAlternativeName(general_names) diff --git a/lemur/common/health.py b/lemur/common/health.py index 69df3f0c..7e0a17ff 100644 --- a/lemur/common/health.py +++ b/lemur/common/health.py @@ -10,20 +10,20 @@ from flask import Blueprint from lemur.database import db from lemur.extensions import sentry -mod = Blueprint('healthCheck', __name__) +mod = Blueprint("healthCheck", __name__) -@mod.route('/healthcheck') +@mod.route("/healthcheck") def health(): try: if healthcheck(db): - return 'ok' + return "ok" except Exception: sentry.captureException() - return 'db check failed' + return "db check failed" def healthcheck(db): with db.engine.connect() as connection: - connection.execute('SELECT 1;') + connection.execute("SELECT 1;") return True diff --git a/lemur/common/managers.py b/lemur/common/managers.py index 9f30f216..6ce2608f 100644 --- a/lemur/common/managers.py +++ b/lemur/common/managers.py @@ -52,7 +52,7 @@ class InstanceManager(object): results = [] for cls_path in class_list: - module_name, class_name = cls_path.rsplit('.', 1) + module_name, class_name = cls_path.rsplit(".", 1) try: module = __import__(module_name, {}, {}, class_name) cls = getattr(module, class_name) @@ -62,10 +62,14 @@ class InstanceManager(object): results.append(cls) except InvalidConfiguration as e: - current_app.logger.warning("Plugin '{0}' may not work correctly. {1}".format(class_name, e)) + current_app.logger.warning( + "Plugin '{0}' may not work correctly. {1}".format(class_name, e) + ) except Exception as e: - current_app.logger.exception("Unable to import {0}. Reason: {1}".format(cls_path, e)) + current_app.logger.exception( + "Unable to import {0}. Reason: {1}".format(cls_path, e) + ) continue self.cache = results diff --git a/lemur/common/missing.py b/lemur/common/missing.py index 5c7dffac..2f5156df 100644 --- a/lemur/common/missing.py +++ b/lemur/common/missing.py @@ -11,15 +11,15 @@ def convert_validity_years(data): :param data: :return: """ - if data.get('validity_years'): + if data.get("validity_years"): now = arrow.utcnow() - data['validity_start'] = now.isoformat() + data["validity_start"] = now.isoformat() - end = now.replace(years=+int(data['validity_years'])) + end = now.replace(years=+int(data["validity_years"])) - if not current_app.config.get('LEMUR_ALLOW_WEEKEND_EXPIRATION', True): + if not current_app.config.get("LEMUR_ALLOW_WEEKEND_EXPIRATION", True): if is_weekend(end): end = end.replace(days=-2) - data['validity_end'] = end.isoformat() + data["validity_end"] = end.isoformat() return data diff --git a/lemur/common/schema.py b/lemur/common/schema.py index ee765dc4..bfa0a091 100644 --- a/lemur/common/schema.py +++ b/lemur/common/schema.py @@ -22,27 +22,26 @@ class LemurSchema(Schema): """ Base schema from which all grouper schema's inherit """ + __envelope__ = True def under(self, data, many=None): items = [] if many: for i in data: - items.append( - {underscore(key): value for key, value in i.items()} - ) + items.append({underscore(key): value for key, value in i.items()}) return items - return { - underscore(key): value - for key, value in data.items() - } + return {underscore(key): value for key, value in data.items()} def camel(self, data, many=None): items = [] if many: for i in data: items.append( - {camelize(key, uppercase_first_letter=False): value for key, value in i.items()} + { + camelize(key, uppercase_first_letter=False): value + for key, value in i.items() + } ) return items return { @@ -52,16 +51,16 @@ class LemurSchema(Schema): def wrap_with_envelope(self, data, many): if many: - if 'total' in self.context.keys(): - return dict(total=self.context['total'], items=data) + if "total" in self.context.keys(): + return dict(total=self.context["total"], items=data) return data class LemurInputSchema(LemurSchema): @pre_load(pass_many=True) def preprocess(self, data, many): - if isinstance(data, dict) and data.get('owner'): - data['owner'] = data['owner'].lower() + if isinstance(data, dict) and data.get("owner"): + data["owner"] = data["owner"].lower() return self.under(data, many=many) @@ -74,17 +73,17 @@ class LemurOutputSchema(LemurSchema): def unwrap_envelope(self, data, many): if many: - if data['items']: + if data["items"]: if isinstance(data, InstrumentedList) or isinstance(data, list): - self.context['total'] = len(data) + self.context["total"] = len(data) return data else: - self.context['total'] = data['total'] + self.context["total"] = data["total"] else: - self.context['total'] = 0 - data = {'items': []} + self.context["total"] = 0 + data = {"items": []} - return data['items'] + return data["items"] return data @@ -110,11 +109,11 @@ def format_errors(messages): def wrap_errors(messages): - errors = dict(message='Validation Error.') - if messages.get('_schema'): - errors['reasons'] = {'Schema': {'rule': messages['_schema']}} + errors = dict(message="Validation Error.") + if messages.get("_schema"): + errors["reasons"] = {"Schema": {"rule": messages["_schema"]}} else: - errors['reasons'] = format_errors(messages) + errors["reasons"] = format_errors(messages) return errors @@ -123,19 +122,19 @@ def unwrap_pagination(data, output_schema): return data if isinstance(data, dict): - if 'total' in data.keys(): - if data.get('total') == 0: + if "total" in data.keys(): + if data.get("total") == 0: return data - marshaled_data = {'total': data['total']} - marshaled_data['items'] = output_schema.dump(data['items'], many=True).data + marshaled_data = {"total": data["total"]} + marshaled_data["items"] = output_schema.dump(data["items"], many=True).data return marshaled_data return output_schema.dump(data).data elif isinstance(data, list): - marshaled_data = {'total': len(data)} - marshaled_data['items'] = output_schema.dump(data, many=True).data + marshaled_data = {"total": len(data)} + marshaled_data["items"] = output_schema.dump(data, many=True).data return marshaled_data return output_schema.dump(data).data @@ -155,7 +154,7 @@ def validate_schema(input_schema, output_schema): if errors: return wrap_errors(errors), 400 - kwargs['data'] = data + kwargs["data"] = data try: resp = f(*args, **kwargs) @@ -173,4 +172,5 @@ def validate_schema(input_schema, output_schema): return unwrap_pagination(resp, output_schema), 200 return decorated_function + return decorator diff --git a/lemur/common/utils.py b/lemur/common/utils.py index 40f828f3..c33722b2 100644 --- a/lemur/common/utils.py +++ b/lemur/common/utils.py @@ -25,22 +25,22 @@ from lemur.exceptions import InvalidConfiguration paginated_parser = RequestParser() -paginated_parser.add_argument('count', type=int, default=10, location='args') -paginated_parser.add_argument('page', type=int, default=1, location='args') -paginated_parser.add_argument('sortDir', type=str, dest='sort_dir', location='args') -paginated_parser.add_argument('sortBy', type=str, dest='sort_by', location='args') -paginated_parser.add_argument('filter', type=str, location='args') -paginated_parser.add_argument('owner', type=str, location='args') +paginated_parser.add_argument("count", type=int, default=10, location="args") +paginated_parser.add_argument("page", type=int, default=1, location="args") +paginated_parser.add_argument("sortDir", type=str, dest="sort_dir", location="args") +paginated_parser.add_argument("sortBy", type=str, dest="sort_by", location="args") +paginated_parser.add_argument("filter", type=str, location="args") +paginated_parser.add_argument("owner", type=str, location="args") def get_psuedo_random_string(): """ Create a random and strongish challenge. """ - challenge = ''.join(random.choice(string.ascii_uppercase) for x in range(6)) # noqa - challenge += ''.join(random.choice("~!@#$%^&*()_+") for x in range(6)) # noqa - challenge += ''.join(random.choice(string.ascii_lowercase) for x in range(6)) - challenge += ''.join(random.choice(string.digits) for x in range(6)) # noqa + challenge = "".join(random.choice(string.ascii_uppercase) for x in range(6)) # noqa + challenge += "".join(random.choice("~!@#$%^&*()_+") for x in range(6)) # noqa + challenge += "".join(random.choice(string.ascii_lowercase) for x in range(6)) + challenge += "".join(random.choice(string.digits) for x in range(6)) # noqa return challenge @@ -53,7 +53,7 @@ def parse_certificate(body): """ assert isinstance(body, str) - return x509.load_pem_x509_certificate(body.encode('utf-8'), default_backend()) + return x509.load_pem_x509_certificate(body.encode("utf-8"), default_backend()) def parse_private_key(private_key): @@ -66,7 +66,9 @@ def parse_private_key(private_key): """ assert isinstance(private_key, str) - return load_pem_private_key(private_key.encode('utf8'), password=None, backend=default_backend()) + return load_pem_private_key( + private_key.encode("utf8"), password=None, backend=default_backend() + ) def split_pem(data): @@ -100,14 +102,15 @@ def parse_csr(csr): """ assert isinstance(csr, str) - return x509.load_pem_x509_csr(csr.encode('utf-8'), default_backend()) + return x509.load_pem_x509_csr(csr.encode("utf-8"), default_backend()) def get_authority_key(body): """Returns the authority key for a given certificate in hex format""" parsed_cert = parse_certificate(body) authority_key = parsed_cert.extensions.get_extension_for_class( - x509.AuthorityKeyIdentifier).value.key_identifier + x509.AuthorityKeyIdentifier + ).value.key_identifier return authority_key.hex() @@ -127,20 +130,17 @@ def generate_private_key(key_type): _CURVE_TYPES = { "ECCPRIME192V1": ec.SECP192R1(), "ECCPRIME256V1": ec.SECP256R1(), - "ECCSECP192R1": ec.SECP192R1(), "ECCSECP224R1": ec.SECP224R1(), "ECCSECP256R1": ec.SECP256R1(), "ECCSECP384R1": ec.SECP384R1(), "ECCSECP521R1": ec.SECP521R1(), "ECCSECP256K1": ec.SECP256K1(), - "ECCSECT163K1": ec.SECT163K1(), "ECCSECT233K1": ec.SECT233K1(), "ECCSECT283K1": ec.SECT283K1(), "ECCSECT409K1": ec.SECT409K1(), "ECCSECT571K1": ec.SECT571K1(), - "ECCSECT163R2": ec.SECT163R2(), "ECCSECT233R1": ec.SECT233R1(), "ECCSECT283R1": ec.SECT283R1(), @@ -149,22 +149,20 @@ def generate_private_key(key_type): } if key_type not in CERTIFICATE_KEY_TYPES: - raise Exception("Invalid key type: {key_type}. Supported key types: {choices}".format( - key_type=key_type, - choices=",".join(CERTIFICATE_KEY_TYPES) - )) + raise Exception( + "Invalid key type: {key_type}. Supported key types: {choices}".format( + key_type=key_type, choices=",".join(CERTIFICATE_KEY_TYPES) + ) + ) - if 'RSA' in key_type: + if "RSA" in key_type: key_size = int(key_type[3:]) return rsa.generate_private_key( - public_exponent=65537, - key_size=key_size, - backend=default_backend() + public_exponent=65537, key_size=key_size, backend=default_backend() ) - elif 'ECC' in key_type: + elif "ECC" in key_type: return ec.generate_private_key( - curve=_CURVE_TYPES[key_type], - backend=default_backend() + curve=_CURVE_TYPES[key_type], backend=default_backend() ) @@ -184,11 +182,26 @@ def check_cert_signature(cert, issuer_public_key): raise UnsupportedAlgorithm("RSASSA-PSS not supported") else: padder = padding.PKCS1v15() - issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, padder, cert.signature_hash_algorithm) - elif isinstance(issuer_public_key, ec.EllipticCurvePublicKey) and isinstance(ec.ECDSA(cert.signature_hash_algorithm), ec.ECDSA): - issuer_public_key.verify(cert.signature, cert.tbs_certificate_bytes, ec.ECDSA(cert.signature_hash_algorithm)) + issuer_public_key.verify( + cert.signature, + cert.tbs_certificate_bytes, + padder, + cert.signature_hash_algorithm, + ) + elif isinstance(issuer_public_key, ec.EllipticCurvePublicKey) and isinstance( + ec.ECDSA(cert.signature_hash_algorithm), ec.ECDSA + ): + issuer_public_key.verify( + cert.signature, + cert.tbs_certificate_bytes, + ec.ECDSA(cert.signature_hash_algorithm), + ) else: - raise UnsupportedAlgorithm("Unsupported Algorithm '{var}'.".format(var=cert.signature_algorithm_oid._name)) + raise UnsupportedAlgorithm( + "Unsupported Algorithm '{var}'.".format( + var=cert.signature_algorithm_oid._name + ) + ) def is_selfsigned(cert): @@ -224,7 +237,9 @@ def validate_conf(app, required_vars): """ for var in required_vars: if var not in app.config: - raise InvalidConfiguration("Required variable '{var}' is not set in Lemur's conf.".format(var=var)) + raise InvalidConfiguration( + "Required variable '{var}' is not set in Lemur's conf.".format(var=var) + ) # https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/WindowedRangeQuery @@ -243,18 +258,15 @@ def column_windows(session, column, windowsize): be computed. """ + def int_for_range(start_id, end_id): if end_id: - return and_( - column >= start_id, - column < end_id - ) + return and_(column >= start_id, column < end_id) else: return column >= start_id q = session.query( - column, - func.row_number().over(order_by=column).label('rownum') + column, func.row_number().over(order_by=column).label("rownum") ).from_self(column) if windowsize > 1: @@ -274,9 +286,7 @@ def column_windows(session, column, windowsize): def windowed_query(q, column, windowsize): """"Break a Query into windows on a given column.""" - for whereclause in column_windows( - q.session, - column, windowsize): + for whereclause in column_windows(q.session, column, windowsize): for row in q.filter(whereclause).order_by(column): yield row @@ -284,7 +294,7 @@ def windowed_query(q, column, windowsize): def truthiness(s): """If input string resembles something truthy then return True, else False.""" - return s.lower() in ('true', 'yes', 'on', 't', '1') + return s.lower() in ("true", "yes", "on", "t", "1") def find_matching_certificates_by_hash(cert, matching_certs): @@ -292,6 +302,8 @@ def find_matching_certificates_by_hash(cert, matching_certs): determine if any of the certificate hashes match and return the matches.""" matching = [] for c in matching_certs: - if parse_certificate(c.body).fingerprint(hashes.SHA256()) == cert.fingerprint(hashes.SHA256()): + if parse_certificate(c.body).fingerprint(hashes.SHA256()) == cert.fingerprint( + hashes.SHA256() + ): matching.append(c) return matching diff --git a/lemur/common/validators.py b/lemur/common/validators.py index 91b831ba..3e6ebcf9 100644 --- a/lemur/common/validators.py +++ b/lemur/common/validators.py @@ -16,7 +16,7 @@ def common_name(value): # Common name could be a domain name, or a human-readable name of the subject (often used in CA names or client # certificates). As a simple heuristic, we assume that human-readable names always include a space. # However, to avoid confusion for humans, we also don't count spaces at the beginning or end of the string. - if ' ' not in value.strip(): + if " " not in value.strip(): return sensitive_domain(value) @@ -30,17 +30,21 @@ def sensitive_domain(domain): # User has permission, no need to check anything return - whitelist = current_app.config.get('LEMUR_WHITELISTED_DOMAINS', []) + whitelist = current_app.config.get("LEMUR_WHITELISTED_DOMAINS", []) if whitelist and not any(re.match(pattern, domain) for pattern in whitelist): - raise ValidationError('Domain {0} does not match whitelisted domain patterns. ' - 'Contact an administrator to issue the certificate.'.format(domain)) + raise ValidationError( + "Domain {0} does not match whitelisted domain patterns. " + "Contact an administrator to issue the certificate.".format(domain) + ) # Avoid circular import. from lemur.domains import service as domain_service if any(d.sensitive for d in domain_service.get_by_name(domain)): - raise ValidationError('Domain {0} has been marked as sensitive. ' - 'Contact an administrator to issue the certificate.'.format(domain)) + raise ValidationError( + "Domain {0} has been marked as sensitive. " + "Contact an administrator to issue the certificate.".format(domain) + ) def encoding(oid_encoding): @@ -49,9 +53,13 @@ def encoding(oid_encoding): :param oid_encoding: :return: """ - valid_types = ['b64asn1', 'string', 'ia5string'] + valid_types = ["b64asn1", "string", "ia5string"] if oid_encoding.lower() not in [o_type.lower() for o_type in valid_types]: - raise ValidationError('Invalid Oid Encoding: {0} choose from {1}'.format(oid_encoding, ",".join(valid_types))) + raise ValidationError( + "Invalid Oid Encoding: {0} choose from {1}".format( + oid_encoding, ",".join(valid_types) + ) + ) def sub_alt_type(alt_type): @@ -60,10 +68,23 @@ def sub_alt_type(alt_type): :param alt_type: :return: """ - valid_types = ['DNSName', 'IPAddress', 'uniFormResourceIdentifier', 'directoryName', 'rfc822Name', 'registrationID', - 'otherName', 'x400Address', 'EDIPartyName'] + valid_types = [ + "DNSName", + "IPAddress", + "uniFormResourceIdentifier", + "directoryName", + "rfc822Name", + "registrationID", + "otherName", + "x400Address", + "EDIPartyName", + ] if alt_type.lower() not in [a_type.lower() for a_type in valid_types]: - raise ValidationError('Invalid SubAltName Type: {0} choose from {1}'.format(type, ",".join(valid_types))) + raise ValidationError( + "Invalid SubAltName Type: {0} choose from {1}".format( + type, ",".join(valid_types) + ) + ) def csr(data): @@ -73,16 +94,18 @@ def csr(data): :return: """ try: - request = x509.load_pem_x509_csr(data.encode('utf-8'), default_backend()) + request = x509.load_pem_x509_csr(data.encode("utf-8"), default_backend()) except Exception: - raise ValidationError('CSR presented is not valid.') + raise ValidationError("CSR presented is not valid.") # Validate common name and SubjectAltNames for name in request.subject.get_attributes_for_oid(NameOID.COMMON_NAME): common_name(name.value) try: - alt_names = request.extensions.get_extension_for_class(x509.SubjectAlternativeName) + alt_names = request.extensions.get_extension_for_class( + x509.SubjectAlternativeName + ) for name in alt_names.value.get_values_for_type(x509.DNSName): sensitive_domain(name) @@ -91,26 +114,40 @@ def csr(data): def dates(data): - if not data.get('validity_start') and data.get('validity_end'): - raise ValidationError('If validity start is specified so must validity end.') + if not data.get("validity_start") and data.get("validity_end"): + raise ValidationError("If validity start is specified so must validity end.") - if not data.get('validity_end') and data.get('validity_start'): - raise ValidationError('If validity end is specified so must validity start.') + if not data.get("validity_end") and data.get("validity_start"): + raise ValidationError("If validity end is specified so must validity start.") - if data.get('validity_start') and data.get('validity_end'): - if not current_app.config.get('LEMUR_ALLOW_WEEKEND_EXPIRATION', True): - if is_weekend(data.get('validity_end')): - raise ValidationError('Validity end must not land on a weekend.') + if data.get("validity_start") and data.get("validity_end"): + if not current_app.config.get("LEMUR_ALLOW_WEEKEND_EXPIRATION", True): + if is_weekend(data.get("validity_end")): + raise ValidationError("Validity end must not land on a weekend.") - if not data['validity_start'] < data['validity_end']: - raise ValidationError('Validity start must be before validity end.') + if not data["validity_start"] < data["validity_end"]: + raise ValidationError("Validity start must be before validity end.") - if data.get('authority'): - if data.get('validity_start').date() < data['authority'].authority_certificate.not_before.date(): - raise ValidationError('Validity start must not be before {0}'.format(data['authority'].authority_certificate.not_before)) + if data.get("authority"): + if ( + data.get("validity_start").date() + < data["authority"].authority_certificate.not_before.date() + ): + raise ValidationError( + "Validity start must not be before {0}".format( + data["authority"].authority_certificate.not_before + ) + ) - if data.get('validity_end').date() > data['authority'].authority_certificate.not_after.date(): - raise ValidationError('Validity end must not be after {0}'.format(data['authority'].authority_certificate.not_after)) + if ( + data.get("validity_end").date() + > data["authority"].authority_certificate.not_after.date() + ): + raise ValidationError( + "Validity end must not be after {0}".format( + data["authority"].authority_certificate.not_after + ) + ) return data @@ -148,8 +185,13 @@ def verify_cert_chain(certs, error_class=ValidationError): # Avoid circular import. from lemur.common import defaults - raise error_class("Incorrect chain certificate(s) provided: '%s' is not signed by '%s'" - % (defaults.common_name(cert) or 'Unknown', defaults.common_name(issuer))) + raise error_class( + "Incorrect chain certificate(s) provided: '%s' is not signed by '%s'" + % ( + defaults.common_name(cert) or "Unknown", + defaults.common_name(issuer), + ) + ) except UnsupportedAlgorithm as err: current_app.logger.warning("Skipping chain validation: %s", err) diff --git a/lemur/constants.py b/lemur/constants.py index 060ecfed..cc1653cb 100644 --- a/lemur/constants.py +++ b/lemur/constants.py @@ -7,28 +7,28 @@ SAN_NAMING_TEMPLATE = "SAN-{subject}-{issuer}-{not_before}-{not_after}" DEFAULT_NAMING_TEMPLATE = "{subject}-{issuer}-{not_before}-{not_after}" NONSTANDARD_NAMING_TEMPLATE = "{issuer}-{not_before}-{not_after}" -SUCCESS_METRIC_STATUS = 'success' -FAILURE_METRIC_STATUS = 'failure' +SUCCESS_METRIC_STATUS = "success" +FAILURE_METRIC_STATUS = "failure" CERTIFICATE_KEY_TYPES = [ - 'RSA2048', - 'RSA4096', - 'ECCPRIME192V1', - 'ECCPRIME256V1', - 'ECCSECP192R1', - 'ECCSECP224R1', - 'ECCSECP256R1', - 'ECCSECP384R1', - 'ECCSECP521R1', - 'ECCSECP256K1', - 'ECCSECT163K1', - 'ECCSECT233K1', - 'ECCSECT283K1', - 'ECCSECT409K1', - 'ECCSECT571K1', - 'ECCSECT163R2', - 'ECCSECT233R1', - 'ECCSECT283R1', - 'ECCSECT409R1', - 'ECCSECT571R2' + "RSA2048", + "RSA4096", + "ECCPRIME192V1", + "ECCPRIME256V1", + "ECCSECP192R1", + "ECCSECP224R1", + "ECCSECP256R1", + "ECCSECP384R1", + "ECCSECP521R1", + "ECCSECP256K1", + "ECCSECT163K1", + "ECCSECT233K1", + "ECCSECT283K1", + "ECCSECT409K1", + "ECCSECT571K1", + "ECCSECT163R2", + "ECCSECT233R1", + "ECCSECT283R1", + "ECCSECT409R1", + "ECCSECT571R2", ] diff --git a/lemur/database.py b/lemur/database.py index 82fb0423..a9610325 100644 --- a/lemur/database.py +++ b/lemur/database.py @@ -43,7 +43,7 @@ def session_query(model): :param model: sqlalchemy model :return: query object for model """ - return model.query if hasattr(model, 'query') else db.session.query(model) + return model.query if hasattr(model, "query") else db.session.query(model) def create_query(model, kwargs): @@ -77,7 +77,7 @@ def add(model): def get_model_column(model, field): - if field in getattr(model, 'sensitive_fields', ()): + if field in getattr(model, "sensitive_fields", ()): raise AttrNotFound(field) column = model.__table__.columns._data.get(field, None) if column is None: @@ -100,7 +100,7 @@ def find_all(query, model, kwargs): kwargs = filter_none(kwargs) for attr, value in kwargs.items(): if not isinstance(value, list): - value = value.split(',') + value = value.split(",") conditions.append(get_model_column(model, attr).in_(value)) @@ -200,7 +200,7 @@ def filter(query, model, terms): :return: """ column = get_model_column(model, underscore(terms[0])) - return query.filter(column.ilike('%{}%'.format(terms[1]))) + return query.filter(column.ilike("%{}%".format(terms[1]))) def sort(query, model, field, direction): @@ -214,7 +214,7 @@ def sort(query, model, field, direction): :param direction: """ column = get_model_column(model, underscore(field)) - return query.order_by(column.desc() if direction == 'desc' else column.asc()) + return query.order_by(column.desc() if direction == "desc" else column.asc()) def paginate(query, page, count): @@ -247,10 +247,10 @@ def update_list(model, model_attr, item_model, items): for i in items: for item in getattr(model, model_attr): - if item.id == i['id']: + if item.id == i["id"]: break else: - getattr(model, model_attr).append(get(item_model, i['id'])) + getattr(model, model_attr).append(get(item_model, i["id"])) return model @@ -276,9 +276,9 @@ def get_count(q): disable_group_by = False if len(q._entities) > 1: # currently support only one entity - raise Exception('only one entity is supported for get_count, got: %s' % q) + raise Exception("only one entity is supported for get_count, got: %s" % q) entity = q._entities[0] - if hasattr(entity, 'column'): + if hasattr(entity, "column"): # _ColumnEntity has column attr - on case: query(Model.column)... col = entity.column if q._group_by and q._distinct: @@ -295,7 +295,11 @@ def get_count(q): count_func = func.count() if q._group_by and not disable_group_by: count_func = count_func.over(None) - count_q = q.options(lazyload('*')).statement.with_only_columns([count_func]).order_by(None) + count_q = ( + q.options(lazyload("*")) + .statement.with_only_columns([count_func]) + .order_by(None) + ) if disable_group_by: count_q = count_q.group_by(None) count = q.session.execute(count_q).scalar() @@ -311,13 +315,13 @@ def sort_and_page(query, model, args): :param args: :return: """ - sort_by = args.pop('sort_by') - sort_dir = args.pop('sort_dir') - page = args.pop('page') - count = args.pop('count') + sort_by = args.pop("sort_by") + sort_dir = args.pop("sort_dir") + page = args.pop("page") + count = args.pop("count") - if args.get('user'): - user = args.pop('user') + if args.get("user"): + user = args.pop("user") query = find_all(query, model, args) diff --git a/lemur/default.conf.py b/lemur/default.conf.py index 217d8371..bd67bf7a 100644 --- a/lemur/default.conf.py +++ b/lemur/default.conf.py @@ -1,6 +1,7 @@ # This is just Python which means you can inherit and tweak settings import os + _basedir = os.path.abspath(os.path.dirname(__file__)) THREADS_PER_PAGE = 8 diff --git a/lemur/defaults/views.py b/lemur/defaults/views.py index 5a573829..b3741b15 100644 --- a/lemur/defaults/views.py +++ b/lemur/defaults/views.py @@ -13,12 +13,13 @@ from lemur.auth.service import AuthenticatedResource from lemur.defaults.schemas import default_output_schema -mod = Blueprint('default', __name__) +mod = Blueprint("default", __name__) api = Api(mod) class LemurDefaults(AuthenticatedResource): """ Defines the 'defaults' endpoint """ + def __init__(self): super(LemurDefaults) @@ -59,17 +60,21 @@ class LemurDefaults(AuthenticatedResource): :statuscode 403: unauthenticated """ - default_authority = get_by_name(current_app.config.get('LEMUR_DEFAULT_AUTHORITY')) + default_authority = get_by_name( + current_app.config.get("LEMUR_DEFAULT_AUTHORITY") + ) return dict( - country=current_app.config.get('LEMUR_DEFAULT_COUNTRY'), - state=current_app.config.get('LEMUR_DEFAULT_STATE'), - location=current_app.config.get('LEMUR_DEFAULT_LOCATION'), - organization=current_app.config.get('LEMUR_DEFAULT_ORGANIZATION'), - organizational_unit=current_app.config.get('LEMUR_DEFAULT_ORGANIZATIONAL_UNIT'), - issuer_plugin=current_app.config.get('LEMUR_DEFAULT_ISSUER_PLUGIN'), + country=current_app.config.get("LEMUR_DEFAULT_COUNTRY"), + state=current_app.config.get("LEMUR_DEFAULT_STATE"), + location=current_app.config.get("LEMUR_DEFAULT_LOCATION"), + organization=current_app.config.get("LEMUR_DEFAULT_ORGANIZATION"), + organizational_unit=current_app.config.get( + "LEMUR_DEFAULT_ORGANIZATIONAL_UNIT" + ), + issuer_plugin=current_app.config.get("LEMUR_DEFAULT_ISSUER_PLUGIN"), authority=default_authority, ) -api.add_resource(LemurDefaults, '/defaults', endpoint='default') +api.add_resource(LemurDefaults, "/defaults", endpoint="default") diff --git a/lemur/destinations/models.py b/lemur/destinations/models.py index 192a5f5d..a2575378 100644 --- a/lemur/destinations/models.py +++ b/lemur/destinations/models.py @@ -13,7 +13,7 @@ from lemur.plugins.base import plugins class Destination(db.Model): - __tablename__ = 'destinations' + __tablename__ = "destinations" id = Column(Integer, primary_key=True) label = Column(String(32)) options = Column(JSONType) diff --git a/lemur/destinations/schemas.py b/lemur/destinations/schemas.py index 279889b4..cc46ecd4 100644 --- a/lemur/destinations/schemas.py +++ b/lemur/destinations/schemas.py @@ -30,7 +30,7 @@ class DestinationOutputSchema(LemurOutputSchema): @post_dump def fill_object(self, data): if data: - data['plugin']['pluginOptions'] = data['options'] + data["plugin"]["pluginOptions"] = data["options"] return data diff --git a/lemur/destinations/service.py b/lemur/destinations/service.py index 8e505fce..92162f4b 100644 --- a/lemur/destinations/service.py +++ b/lemur/destinations/service.py @@ -26,10 +26,12 @@ def create(label, plugin_name, options, description=None): """ # remove any sub-plugin objects before try to save the json options for option in options: - if 'plugin' in option['type']: - del option['value']['plugin_object'] + if "plugin" in option["type"]: + del option["value"]["plugin_object"] - destination = Destination(label=label, options=options, plugin_name=plugin_name, description=description) + destination = Destination( + label=label, options=options, plugin_name=plugin_name, description=description + ) current_app.logger.info("Destination: %s created", label) # add the destination as source, to avoid new destinations that are not in source, as long as an AWS destination @@ -85,7 +87,7 @@ def get_by_label(label): :param label: :return: """ - return database.get(Destination, label, field='label') + return database.get(Destination, label, field="label") def get_all(): @@ -99,17 +101,19 @@ def get_all(): def render(args): - filt = args.pop('filter') - certificate_id = args.pop('certificate_id', None) + filt = args.pop("filter") + certificate_id = args.pop("certificate_id", None) if certificate_id: - query = database.session_query(Destination).join(Certificate, Destination.certificate) + query = database.session_query(Destination).join( + Certificate, Destination.certificate + ) query = query.filter(Certificate.id == certificate_id) else: query = database.session_query(Destination) if filt: - terms = filt.split(';') + terms = filt.split(";") query = database.filter(query, Destination, terms) return database.sort_and_page(query, Destination, args) @@ -122,9 +126,15 @@ def stats(**kwargs): :param kwargs: :return: """ - items = database.db.session.query(Destination.label, func.count(certificate_destination_associations.c.certificate_id))\ - .join(certificate_destination_associations)\ - .group_by(Destination.label).all() + items = ( + database.db.session.query( + Destination.label, + func.count(certificate_destination_associations.c.certificate_id), + ) + .join(certificate_destination_associations) + .group_by(Destination.label) + .all() + ) keys = [] values = [] @@ -132,4 +142,4 @@ def stats(**kwargs): keys.append(key) values.append(count) - return {'labels': keys, 'values': values} + return {"labels": keys, "values": values} diff --git a/lemur/destinations/views.py b/lemur/destinations/views.py index 7084e8e9..0b0559fe 100644 --- a/lemur/destinations/views.py +++ b/lemur/destinations/views.py @@ -15,15 +15,20 @@ from lemur.auth.permissions import admin_permission from lemur.common.utils import paginated_parser from lemur.common.schema import validate_schema -from lemur.destinations.schemas import destinations_output_schema, destination_input_schema, destination_output_schema +from lemur.destinations.schemas import ( + destinations_output_schema, + destination_input_schema, + destination_output_schema, +) -mod = Blueprint('destinations', __name__) +mod = Blueprint("destinations", __name__) api = Api(mod) class DestinationsList(AuthenticatedResource): """ Defines the 'destinations' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(DestinationsList, self).__init__() @@ -176,7 +181,12 @@ class DestinationsList(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 200: no error """ - return service.create(data['label'], data['plugin']['slug'], data['plugin']['plugin_options'], data['description']) + return service.create( + data["label"], + data["plugin"]["slug"], + data["plugin"]["plugin_options"], + data["description"], + ) class Destinations(AuthenticatedResource): @@ -325,16 +335,22 @@ class Destinations(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 200: no error """ - return service.update(destination_id, data['label'], data['plugin']['plugin_options'], data['description']) + return service.update( + destination_id, + data["label"], + data["plugin"]["plugin_options"], + data["description"], + ) @admin_permission.require(http_exception=403) def delete(self, destination_id): service.delete(destination_id) - return {'result': True} + return {"result": True} class CertificateDestinations(AuthenticatedResource): """ Defines the 'certificate/', endpoint='destination') -api.add_resource(CertificateDestinations, '/certificates//destinations', - endpoint='certificateDestinations') -api.add_resource(DestinationsStats, '/destinations/stats', endpoint='destinationStats') +api.add_resource(DestinationsList, "/destinations", endpoint="destinations") +api.add_resource( + Destinations, "/destinations/", endpoint="destination" +) +api.add_resource( + CertificateDestinations, + "/certificates//destinations", + endpoint="certificateDestinations", +) +api.add_resource(DestinationsStats, "/destinations/stats", endpoint="destinationStats") diff --git a/lemur/dns_providers/cli.py b/lemur/dns_providers/cli.py index 159bdaa0..72f9c874 100644 --- a/lemur/dns_providers/cli.py +++ b/lemur/dns_providers/cli.py @@ -5,7 +5,9 @@ from lemur.dns_providers.service import get_all_dns_providers, set_domains from lemur.extensions import metrics from lemur.plugins.base import plugins -manager = Manager(usage="Iterates through all DNS providers and sets DNS zones in the database.") +manager = Manager( + usage="Iterates through all DNS providers and sets DNS zones in the database." +) @manager.command @@ -27,5 +29,5 @@ def get_all_zones(): status = SUCCESS_METRIC_STATUS - metrics.send('get_all_zones', 'counter', 1, metric_tags={'status': status}) + metrics.send("get_all_zones", "counter", 1, metric_tags={"status": status}) print("[+] Done with dns provider zone lookup and configuration.") diff --git a/lemur/dns_providers/models.py b/lemur/dns_providers/models.py index 435a2398..eb8cdff9 100644 --- a/lemur/dns_providers/models.py +++ b/lemur/dns_providers/models.py @@ -9,22 +9,23 @@ from lemur.utils import Vault class DnsProvider(db.Model): - __tablename__ = 'dns_providers' - id = Column( - Integer(), - primary_key=True, - ) + __tablename__ = "dns_providers" + id = Column(Integer(), primary_key=True) name = Column(String(length=256), unique=True, nullable=True) description = Column(Text(), nullable=True) provider_type = Column(String(length=256), nullable=True) credentials = Column(Vault, nullable=True) api_endpoint = Column(String(length=256), nullable=True) - date_created = Column(ArrowType(), server_default=text('now()'), nullable=False) + date_created = Column(ArrowType(), server_default=text("now()"), nullable=False) status = Column(String(length=128), nullable=True) options = Column(JSON, nullable=True) domains = Column(JSON, nullable=True) - certificates = relationship("Certificate", backref='dns_provider', foreign_keys='Certificate.dns_provider_id', - lazy='dynamic') + certificates = relationship( + "Certificate", + backref="dns_provider", + foreign_keys="Certificate.dns_provider_id", + lazy="dynamic", + ) def __init__(self, name, description, provider_type, credentials): self.name = name diff --git a/lemur/dns_providers/service.py b/lemur/dns_providers/service.py index bf50bba1..ec9fa0de 100644 --- a/lemur/dns_providers/service.py +++ b/lemur/dns_providers/service.py @@ -49,7 +49,9 @@ def get_friendly(dns_provider_id): } if dns_provider.provider_type == "route53": - dns_provider_friendly["account_id"] = json.loads(dns_provider.credentials).get("account_id") + dns_provider_friendly["account_id"] = json.loads(dns_provider.credentials).get( + "account_id" + ) return dns_provider_friendly @@ -64,40 +66,40 @@ def delete(dns_provider_id): def get_types(): provider_config = current_app.config.get( - 'ACME_DNS_PROVIDER_TYPES', - {"items": [ - { - 'name': 'route53', - 'requirements': [ - { - 'name': 'account_id', - 'type': 'int', - 'required': True, - 'helpMessage': 'AWS Account number' - }, - ] - }, - { - 'name': 'cloudflare', - 'requirements': [ - { - 'name': 'email', - 'type': 'str', - 'required': True, - 'helpMessage': 'Cloudflare Email' - }, - { - 'name': 'key', - 'type': 'str', - 'required': True, - 'helpMessage': 'Cloudflare Key' - }, - ] - }, - { - 'name': 'dyn', - }, - ]} + "ACME_DNS_PROVIDER_TYPES", + { + "items": [ + { + "name": "route53", + "requirements": [ + { + "name": "account_id", + "type": "int", + "required": True, + "helpMessage": "AWS Account number", + } + ], + }, + { + "name": "cloudflare", + "requirements": [ + { + "name": "email", + "type": "str", + "required": True, + "helpMessage": "Cloudflare Email", + }, + { + "name": "key", + "type": "str", + "required": True, + "helpMessage": "Cloudflare Key", + }, + ], + }, + {"name": "dyn"}, + ] + }, ) if not provider_config: raise Exception("No DNS Provider configuration specified.") diff --git a/lemur/dns_providers/views.py b/lemur/dns_providers/views.py index 1f5b3164..d470aa2f 100644 --- a/lemur/dns_providers/views.py +++ b/lemur/dns_providers/views.py @@ -13,9 +13,12 @@ from lemur.auth.service import AuthenticatedResource from lemur.common.schema import validate_schema from lemur.common.utils import paginated_parser from lemur.dns_providers import service -from lemur.dns_providers.schemas import dns_provider_output_schema, dns_provider_input_schema +from lemur.dns_providers.schemas import ( + dns_provider_output_schema, + dns_provider_input_schema, +) -mod = Blueprint('dns_providers', __name__) +mod = Blueprint("dns_providers", __name__) api = Api(mod) @@ -71,12 +74,12 @@ class DnsProvidersList(AuthenticatedResource): """ parser = paginated_parser.copy() - parser.add_argument('dns_provider_id', type=int, location='args') - parser.add_argument('name', type=str, location='args') - parser.add_argument('type', type=str, location='args') + parser.add_argument("dns_provider_id", type=int, location="args") + parser.add_argument("name", type=str, location="args") + parser.add_argument("type", type=str, location="args") args = parser.parse_args() - args['user'] = g.user + args["user"] = g.user return service.render(args) @validate_schema(dns_provider_input_schema, None) @@ -152,7 +155,7 @@ class DnsProviders(AuthenticatedResource): @admin_permission.require(http_exception=403) def delete(self, dns_provider_id): service.delete(dns_provider_id) - return {'result': True} + return {"result": True} class DnsProviderOptions(AuthenticatedResource): @@ -166,6 +169,10 @@ class DnsProviderOptions(AuthenticatedResource): return service.get_types() -api.add_resource(DnsProvidersList, '/dns_providers', endpoint='dns_providers') -api.add_resource(DnsProviders, '/dns_providers/', endpoint='dns_provider') -api.add_resource(DnsProviderOptions, '/dns_provider_options', endpoint='dns_provider_options') +api.add_resource(DnsProvidersList, "/dns_providers", endpoint="dns_providers") +api.add_resource( + DnsProviders, "/dns_providers/", endpoint="dns_provider" +) +api.add_resource( + DnsProviderOptions, "/dns_provider_options", endpoint="dns_provider_options" +) diff --git a/lemur/domains/models.py b/lemur/domains/models.py index 05fccd9c..791e74de 100644 --- a/lemur/domains/models.py +++ b/lemur/domains/models.py @@ -13,11 +13,14 @@ from lemur.database import db class Domain(db.Model): - __tablename__ = 'domains' + __tablename__ = "domains" __table_args__ = ( - Index('ix_domains_name_gin', "name", - postgresql_ops={"name": "gin_trgm_ops"}, - postgresql_using='gin'), + Index( + "ix_domains_name_gin", + "name", + postgresql_ops={"name": "gin_trgm_ops"}, + postgresql_using="gin", + ), ) id = Column(Integer, primary_key=True) name = Column(String(256), index=True) diff --git a/lemur/domains/service.py b/lemur/domains/service.py index c9b8f759..8a581bfd 100644 --- a/lemur/domains/service.py +++ b/lemur/domains/service.py @@ -77,11 +77,11 @@ def render(args): :return: """ query = database.session_query(Domain) - filt = args.pop('filter') - certificate_id = args.pop('certificate_id', None) + filt = args.pop("filter") + certificate_id = args.pop("certificate_id", None) if filt: - terms = filt.split(';') + terms = filt.split(";") query = database.filter(query, Domain, terms) if certificate_id: diff --git a/lemur/domains/views.py b/lemur/domains/views.py index db73f5cd..a3e0cdff 100644 --- a/lemur/domains/views.py +++ b/lemur/domains/views.py @@ -17,14 +17,19 @@ from lemur.auth.permissions import SensitiveDomainPermission from lemur.common.schema import validate_schema from lemur.common.utils import paginated_parser -from lemur.domains.schemas import domain_input_schema, domain_output_schema, domains_output_schema +from lemur.domains.schemas import ( + domain_input_schema, + domain_output_schema, + domains_output_schema, +) -mod = Blueprint('domains', __name__) +mod = Blueprint("domains", __name__) api = Api(mod) class DomainsList(AuthenticatedResource): """ Defines the 'domains' endpoint """ + def __init__(self): super(DomainsList, self).__init__() @@ -123,7 +128,7 @@ class DomainsList(AuthenticatedResource): :statuscode 200: no error :statuscode 403: unauthenticated """ - return service.create(data['name'], data['sensitive']) + return service.create(data["name"], data["sensitive"]) class Domains(AuthenticatedResource): @@ -205,13 +210,14 @@ class Domains(AuthenticatedResource): :statuscode 403: unauthenticated """ if SensitiveDomainPermission().can(): - return service.update(domain_id, data['name'], data['sensitive']) + return service.update(domain_id, data["name"], data["sensitive"]) - return dict(message='You are not authorized to modify this domain'), 403 + return dict(message="You are not authorized to modify this domain"), 403 class CertificateDomains(AuthenticatedResource): """ Defines the 'domains' endpoint """ + def __init__(self): super(CertificateDomains, self).__init__() @@ -265,10 +271,14 @@ class CertificateDomains(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['certificate_id'] = certificate_id + args["certificate_id"] = certificate_id return service.render(args) -api.add_resource(DomainsList, '/domains', endpoint='domains') -api.add_resource(Domains, '/domains/', endpoint='domain') -api.add_resource(CertificateDomains, '/certificates//domains', endpoint='certificateDomains') +api.add_resource(DomainsList, "/domains", endpoint="domains") +api.add_resource(Domains, "/domains/", endpoint="domain") +api.add_resource( + CertificateDomains, + "/certificates//domains", + endpoint="certificateDomains", +) diff --git a/lemur/endpoints/cli.py b/lemur/endpoints/cli.py index 59496930..99f8c342 100644 --- a/lemur/endpoints/cli.py +++ b/lemur/endpoints/cli.py @@ -21,7 +21,14 @@ from lemur.endpoints.models import Endpoint manager = Manager(usage="Handles all endpoint related tasks.") -@manager.option('-ttl', '--time-to-live', type=int, dest='ttl', default=2, help='Time in hours, which endpoint has not been refreshed to remove the endpoint.') +@manager.option( + "-ttl", + "--time-to-live", + type=int, + dest="ttl", + default=2, + help="Time in hours, which endpoint has not been refreshed to remove the endpoint.", +) def expire(ttl): """ Removed all endpoints that have not been recently updated. @@ -31,12 +38,18 @@ def expire(ttl): try: now = arrow.utcnow() expiration = now - timedelta(hours=ttl) - endpoints = database.session_query(Endpoint).filter(cast(Endpoint.last_updated, ArrowType) <= expiration) + endpoints = database.session_query(Endpoint).filter( + cast(Endpoint.last_updated, ArrowType) <= expiration + ) for endpoint in endpoints: - print("[!] Expiring endpoint: {name} Last Updated: {last_updated}".format(name=endpoint.name, last_updated=endpoint.last_updated)) + print( + "[!] Expiring endpoint: {name} Last Updated: {last_updated}".format( + name=endpoint.name, last_updated=endpoint.last_updated + ) + ) database.delete(endpoint) - metrics.send('endpoint_expired', 'counter', 1) + metrics.send("endpoint_expired", "counter", 1) print("[+] Finished expiration.") except Exception as e: diff --git a/lemur/endpoints/models.py b/lemur/endpoints/models.py index b5823327..6e44fe71 100644 --- a/lemur/endpoints/models.py +++ b/lemur/endpoints/models.py @@ -20,15 +20,11 @@ from lemur.database import db from lemur.models import policies_ciphers -BAD_CIPHERS = [ - 'Protocol-SSLv3', - 'Protocol-SSLv2', - 'Protocol-TLSv1' -] +BAD_CIPHERS = ["Protocol-SSLv3", "Protocol-SSLv2", "Protocol-TLSv1"] class Cipher(db.Model): - __tablename__ = 'ciphers' + __tablename__ = "ciphers" id = Column(Integer, primary_key=True) name = Column(String(128), nullable=False) @@ -38,23 +34,18 @@ class Cipher(db.Model): @deprecated.expression def deprecated(cls): - return case( - [ - (cls.name in BAD_CIPHERS, True) - ], - else_=False - ) + return case([(cls.name in BAD_CIPHERS, True)], else_=False) class Policy(db.Model): - ___tablename__ = 'policies' + ___tablename__ = "policies" id = Column(Integer, primary_key=True) name = Column(String(128), nullable=True) - ciphers = relationship('Cipher', secondary=policies_ciphers, backref='policy') + ciphers = relationship("Cipher", secondary=policies_ciphers, backref="policy") class Endpoint(db.Model): - __tablename__ = 'endpoints' + __tablename__ = "endpoints" id = Column(Integer, primary_key=True) owner = Column(String(128)) name = Column(String(128)) @@ -62,16 +53,18 @@ class Endpoint(db.Model): type = Column(String(128)) active = Column(Boolean, default=True) port = Column(Integer) - policy_id = Column(Integer, ForeignKey('policy.id')) - policy = relationship('Policy', backref='endpoint') - certificate_id = Column(Integer, ForeignKey('certificates.id')) - source_id = Column(Integer, ForeignKey('sources.id')) + policy_id = Column(Integer, ForeignKey("policy.id")) + policy = relationship("Policy", backref="endpoint") + certificate_id = Column(Integer, ForeignKey("certificates.id")) + source_id = Column(Integer, ForeignKey("sources.id")) sensitive = Column(Boolean, default=False) - source = relationship('Source', back_populates='endpoints') + source = relationship("Source", back_populates="endpoints") last_updated = Column(ArrowType, default=arrow.utcnow, nullable=False) - date_created = Column(ArrowType, default=arrow.utcnow, onupdate=arrow.utcnow, nullable=False) + date_created = Column( + ArrowType, default=arrow.utcnow, onupdate=arrow.utcnow, nullable=False + ) - replaced = association_proxy('certificate', 'replaced') + replaced = association_proxy("certificate", "replaced") @property def issues(self): @@ -79,13 +72,30 @@ class Endpoint(db.Model): for cipher in self.policy.ciphers: if cipher.deprecated: - issues.append({'name': 'deprecated cipher', 'value': '{0} has been deprecated consider removing it.'.format(cipher.name)}) + issues.append( + { + "name": "deprecated cipher", + "value": "{0} has been deprecated consider removing it.".format( + cipher.name + ), + } + ) if self.certificate.expired: - issues.append({'name': 'expired certificate', 'value': 'There is an expired certificate attached to this endpoint consider replacing it.'}) + issues.append( + { + "name": "expired certificate", + "value": "There is an expired certificate attached to this endpoint consider replacing it.", + } + ) if self.certificate.revoked: - issues.append({'name': 'revoked', 'value': 'There is a revoked certificate attached to this endpoint consider replacing it.'}) + issues.append( + { + "name": "revoked", + "value": "There is a revoked certificate attached to this endpoint consider replacing it.", + } + ) return issues diff --git a/lemur/endpoints/service.py b/lemur/endpoints/service.py index d14174df..2a737858 100644 --- a/lemur/endpoints/service.py +++ b/lemur/endpoints/service.py @@ -46,7 +46,7 @@ def get_by_name(name): :param name: :return: """ - return database.get(Endpoint, name, field='name') + return database.get(Endpoint, name, field="name") def get_by_dnsname(dnsname): @@ -56,7 +56,7 @@ def get_by_dnsname(dnsname): :param dnsname: :return: """ - return database.get(Endpoint, dnsname, field='dnsname') + return database.get(Endpoint, dnsname, field="dnsname") def get_by_dnsname_and_port(dnsname, port): @@ -66,7 +66,11 @@ def get_by_dnsname_and_port(dnsname, port): :param port: :return: """ - return Endpoint.query.filter(Endpoint.dnsname == dnsname).filter(Endpoint.port == port).scalar() + return ( + Endpoint.query.filter(Endpoint.dnsname == dnsname) + .filter(Endpoint.port == port) + .scalar() + ) def get_by_source(source_label): @@ -95,12 +99,14 @@ def create(**kwargs): """ endpoint = Endpoint(**kwargs) database.create(endpoint) - metrics.send('endpoint_added', 'counter', 1, metric_tags={'source': endpoint.source.label}) + metrics.send( + "endpoint_added", "counter", 1, metric_tags={"source": endpoint.source.label} + ) return endpoint def get_or_create_policy(**kwargs): - policy = database.get(Policy, kwargs['name'], field='name') + policy = database.get(Policy, kwargs["name"], field="name") if not policy: policy = Policy(**kwargs) @@ -110,7 +116,7 @@ def get_or_create_policy(**kwargs): def get_or_create_cipher(**kwargs): - cipher = database.get(Cipher, kwargs['name'], field='name') + cipher = database.get(Cipher, kwargs["name"], field="name") if not cipher: cipher = Cipher(**kwargs) @@ -122,11 +128,13 @@ def get_or_create_cipher(**kwargs): def update(endpoint_id, **kwargs): endpoint = database.get(Endpoint, endpoint_id) - endpoint.policy = kwargs['policy'] - endpoint.certificate = kwargs['certificate'] - endpoint.source = kwargs['source'] + endpoint.policy = kwargs["policy"] + endpoint.certificate = kwargs["certificate"] + endpoint.source = kwargs["source"] endpoint.last_updated = arrow.utcnow() - metrics.send('endpoint_updated', 'counter', 1, metric_tags={'source': endpoint.source.label}) + metrics.send( + "endpoint_updated", "counter", 1, metric_tags={"source": endpoint.source.label} + ) database.update(endpoint) return endpoint @@ -138,19 +146,17 @@ def render(args): :return: """ query = database.session_query(Endpoint) - filt = args.pop('filter') + filt = args.pop("filter") if filt: - terms = filt.split(';') - if 'active' in filt: # this is really weird but strcmp seems to not work here?? + terms = filt.split(";") + if "active" in filt: # this is really weird but strcmp seems to not work here?? query = query.filter(Endpoint.active == truthiness(terms[1])) - elif 'port' in filt: - if terms[1] != 'null': # ng-table adds 'null' if a number is removed + elif "port" in filt: + if terms[1] != "null": # ng-table adds 'null' if a number is removed query = query.filter(Endpoint.port == terms[1]) - elif 'ciphers' in filt: - query = query.filter( - Cipher.name == terms[1] - ) + elif "ciphers" in filt: + query = query.filter(Cipher.name == terms[1]) else: query = database.filter(query, Endpoint, terms) @@ -164,7 +170,7 @@ def stats(**kwargs): :param kwargs: :return: """ - attr = getattr(Endpoint, kwargs.get('metric')) + attr = getattr(Endpoint, kwargs.get("metric")) query = database.db.session.query(attr, func.count(attr)) items = query.group_by(attr).all() @@ -175,4 +181,4 @@ def stats(**kwargs): keys.append(key) values.append(count) - return {'labels': keys, 'values': values} + return {"labels": keys, "values": values} diff --git a/lemur/endpoints/views.py b/lemur/endpoints/views.py index 6509f056..9f469a6b 100644 --- a/lemur/endpoints/views.py +++ b/lemur/endpoints/views.py @@ -16,12 +16,13 @@ from lemur.endpoints import service from lemur.endpoints.schemas import endpoint_output_schema, endpoints_output_schema -mod = Blueprint('endpoints', __name__) +mod = Blueprint("endpoints", __name__) api = Api(mod) class EndpointsList(AuthenticatedResource): """ Defines the 'endpoints' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(EndpointsList, self).__init__() @@ -63,7 +64,7 @@ class EndpointsList(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['user'] = g.current_user + args["user"] = g.current_user return service.render(args) @@ -103,5 +104,5 @@ class Endpoints(AuthenticatedResource): return service.get(endpoint_id) -api.add_resource(EndpointsList, '/endpoints', endpoint='endpoints') -api.add_resource(Endpoints, '/endpoints/', endpoint='endpoint') +api.add_resource(EndpointsList, "/endpoints", endpoint="endpoints") +api.add_resource(Endpoints, "/endpoints/", endpoint="endpoint") diff --git a/lemur/exceptions.py b/lemur/exceptions.py index d392fe5d..98e216bb 100644 --- a/lemur/exceptions.py +++ b/lemur/exceptions.py @@ -21,7 +21,9 @@ class DuplicateError(LemurException): class InvalidListener(LemurException): def __str__(self): - return repr("Invalid listener, ensure you select a certificate if you are using a secure protocol") + return repr( + "Invalid listener, ensure you select a certificate if you are using a secure protocol" + ) class AttrNotFound(LemurException): diff --git a/lemur/extensions.py b/lemur/extensions.py index a54df6c7..24c4c814 100644 --- a/lemur/extensions.py +++ b/lemur/extensions.py @@ -15,25 +15,33 @@ class SQLAlchemy(SA): db = SQLAlchemy() from flask_migrate import Migrate + migrate = Migrate() from flask_bcrypt import Bcrypt + bcrypt = Bcrypt() from flask_principal import Principal + principal = Principal(use_sessions=False) from flask_mail import Mail + smtp_mail = Mail() from lemur.metrics import Metrics + metrics = Metrics() from raven.contrib.flask import Sentry + sentry = Sentry() from blinker import Namespace + signals = Namespace() from flask_cors import CORS + cors = CORS() diff --git a/lemur/factory.py b/lemur/factory.py index c2719e9b..b4066e78 100644 --- a/lemur/factory.py +++ b/lemur/factory.py @@ -24,9 +24,7 @@ from lemur.common.health import mod as health from lemur.extensions import db, migrate, principal, smtp_mail, metrics, sentry, cors -DEFAULT_BLUEPRINTS = ( - health, -) +DEFAULT_BLUEPRINTS = (health,) API_VERSION = 1 @@ -71,16 +69,20 @@ def from_file(file_path, silent=False): :param file_path: :param silent: """ - d = imp.new_module('config') + d = imp.new_module("config") d.__file__ = file_path try: with open(file_path) as config_file: - exec(compile(config_file.read(), # nosec: config file safe - file_path, 'exec'), d.__dict__) + exec( + compile( + config_file.read(), file_path, "exec" # nosec: config file safe + ), + d.__dict__, + ) except IOError as e: if silent and e.errno in (errno.ENOENT, errno.EISDIR): return False - e.strerror = 'Unable to load configuration file (%s)' % e.strerror + e.strerror = "Unable to load configuration file (%s)" % e.strerror raise return d @@ -94,8 +96,8 @@ def configure_app(app, config=None): :return: """ # respect the config first - if config and config != 'None': - app.config['CONFIG_PATH'] = config + if config and config != "None": + app.config["CONFIG_PATH"] = config app.config.from_object(from_file(config)) else: try: @@ -103,12 +105,21 @@ def configure_app(app, config=None): except RuntimeError: # look in default paths if os.path.isfile(os.path.expanduser("~/.lemur/lemur.conf.py")): - app.config.from_object(from_file(os.path.expanduser("~/.lemur/lemur.conf.py"))) + app.config.from_object( + from_file(os.path.expanduser("~/.lemur/lemur.conf.py")) + ) else: - app.config.from_object(from_file(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'default.conf.py'))) + app.config.from_object( + from_file( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), + "default.conf.py", + ) + ) + ) # we don't use this - app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False + app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False def configure_extensions(app): @@ -125,9 +136,15 @@ def configure_extensions(app): metrics.init_app(app) sentry.init_app(app) - if app.config['CORS']: - app.config['CORS_HEADERS'] = 'Content-Type' - cors.init_app(app, resources=r'/api/*', headers='Content-Type', origin='*', supports_credentials=True) + if app.config["CORS"]: + app.config["CORS_HEADERS"] = "Content-Type" + cors.init_app( + app, + resources=r"/api/*", + headers="Content-Type", + origin="*", + supports_credentials=True, + ) def configure_blueprints(app, blueprints): @@ -148,22 +165,25 @@ def configure_logging(app): :param app: """ - handler = RotatingFileHandler(app.config.get('LOG_FILE', 'lemur.log'), maxBytes=10000000, backupCount=100) + handler = RotatingFileHandler( + app.config.get("LOG_FILE", "lemur.log"), maxBytes=10000000, backupCount=100 + ) - handler.setFormatter(Formatter( - '%(asctime)s %(levelname)s: %(message)s ' - '[in %(pathname)s:%(lineno)d]' - )) + handler.setFormatter( + Formatter( + "%(asctime)s %(levelname)s: %(message)s " "[in %(pathname)s:%(lineno)d]" + ) + ) - handler.setLevel(app.config.get('LOG_LEVEL', 'DEBUG')) - app.logger.setLevel(app.config.get('LOG_LEVEL', 'DEBUG')) + handler.setLevel(app.config.get("LOG_LEVEL", "DEBUG")) + app.logger.setLevel(app.config.get("LOG_LEVEL", "DEBUG")) app.logger.addHandler(handler) stream_handler = StreamHandler() - stream_handler.setLevel(app.config.get('LOG_LEVEL', 'DEBUG')) + stream_handler.setLevel(app.config.get("LOG_LEVEL", "DEBUG")) app.logger.addHandler(stream_handler) - if app.config.get('DEBUG_DUMP', False): + if app.config.get("DEBUG_DUMP", False): activate_debug_dump() @@ -176,17 +196,21 @@ def install_plugins(app): """ from lemur.plugins import plugins from lemur.plugins.base import register + # entry_points={ # 'lemur.plugins': [ # 'verisign = lemur_verisign.plugin:VerisignPlugin' # ], # }, - for ep in pkg_resources.iter_entry_points('lemur.plugins'): + for ep in pkg_resources.iter_entry_points("lemur.plugins"): try: plugin = ep.load() except Exception: import traceback - app.logger.error("Failed to load plugin %r:\n%s\n" % (ep.name, traceback.format_exc())) + + app.logger.error( + "Failed to load plugin %r:\n%s\n" % (ep.name, traceback.format_exc()) + ) else: register(plugin) @@ -196,6 +220,9 @@ def install_plugins(app): try: plugins.get(slug) except KeyError: - raise Exception("Unable to location notification plugin: {slug}. Ensure that " - "LEMUR_DEFAULT_NOTIFICATION_PLUGIN is set to a valid and installed notification plugin." - .format(slug=slug)) + raise Exception( + "Unable to location notification plugin: {slug}. Ensure that " + "LEMUR_DEFAULT_NOTIFICATION_PLUGIN is set to a valid and installed notification plugin.".format( + slug=slug + ) + ) diff --git a/lemur/logs/models.py b/lemur/logs/models.py index 9f982c24..07a2ded3 100644 --- a/lemur/logs/models.py +++ b/lemur/logs/models.py @@ -15,9 +15,19 @@ from lemur.database import db class Log(db.Model): - __tablename__ = 'logs' + __tablename__ = "logs" id = Column(Integer, primary_key=True) - certificate_id = Column(Integer, ForeignKey('certificates.id')) - log_type = Column(Enum('key_view', 'create_cert', 'update_cert', 'revoke_cert', 'delete_cert', name='log_type'), nullable=False) + certificate_id = Column(Integer, ForeignKey("certificates.id")) + log_type = Column( + Enum( + "key_view", + "create_cert", + "update_cert", + "revoke_cert", + "delete_cert", + name="log_type", + ), + nullable=False, + ) logged_at = Column(ArrowType(), PassiveDefault(func.now()), nullable=False) - user_id = Column(Integer, ForeignKey('users.id'), nullable=False) + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) diff --git a/lemur/logs/service.py b/lemur/logs/service.py index 04355938..f4949911 100644 --- a/lemur/logs/service.py +++ b/lemur/logs/service.py @@ -24,7 +24,11 @@ def create(user, type, certificate=None): :param certificate: :return: """ - current_app.logger.info("[lemur-audit] action: {0}, user: {1}, certificate: {2}.".format(type, user.email, certificate.name)) + current_app.logger.info( + "[lemur-audit] action: {0}, user: {1}, certificate: {2}.".format( + type, user.email, certificate.name + ) + ) view = Log(user_id=user.id, log_type=type, certificate_id=certificate.id) database.add(view) database.commit() @@ -50,20 +54,22 @@ def render(args): """ query = database.session_query(Log) - filt = args.pop('filter') + filt = args.pop("filter") if filt: - terms = filt.split(';') + terms = filt.split(";") - if 'certificate.name' in terms: - sub_query = database.session_query(Certificate.id)\ - .filter(Certificate.name.ilike('%{0}%'.format(terms[1]))) + if "certificate.name" in terms: + sub_query = database.session_query(Certificate.id).filter( + Certificate.name.ilike("%{0}%".format(terms[1])) + ) query = query.filter(Log.certificate_id.in_(sub_query)) - elif 'user.email' in terms: - sub_query = database.session_query(User.id)\ - .filter(User.email.ilike('%{0}%'.format(terms[1]))) + elif "user.email" in terms: + sub_query = database.session_query(User.id).filter( + User.email.ilike("%{0}%".format(terms[1])) + ) query = query.filter(Log.user_id.in_(sub_query)) diff --git a/lemur/logs/views.py b/lemur/logs/views.py index 1e0bd184..57c588ed 100644 --- a/lemur/logs/views.py +++ b/lemur/logs/views.py @@ -17,12 +17,13 @@ from lemur.logs.schemas import logs_output_schema from lemur.logs import service -mod = Blueprint('logs', __name__) +mod = Blueprint("logs", __name__) api = Api(mod) class LogsList(AuthenticatedResource): """ Defines the 'logs' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(LogsList, self).__init__() @@ -65,10 +66,10 @@ class LogsList(AuthenticatedResource): :statuscode 200: no error """ parser = paginated_parser.copy() - parser.add_argument('owner', type=str, location='args') - parser.add_argument('id', type=str, location='args') + parser.add_argument("owner", type=str, location="args") + parser.add_argument("id", type=str, location="args") args = parser.parse_args() return service.render(args) -api.add_resource(LogsList, '/logs', endpoint='logs') +api.add_resource(LogsList, "/logs", endpoint="logs") diff --git a/lemur/manage.py b/lemur/manage.py index c9ce4240..e6e85a9d 100755 --- a/lemur/manage.py +++ b/lemur/manage.py @@ -1,4 +1,4 @@ -from __future__ import unicode_literals # at top of module +from __future__ import unicode_literals # at top of module import os import sys @@ -52,24 +52,24 @@ from lemur.dns_providers.models import DnsProvider # noqa from sqlalchemy.sql import text manager = Manager(create_app) -manager.add_option('-c', '--config', dest='config_path', required=False) +manager.add_option("-c", "--config", dest="config_path", required=False) migrate = Migrate(create_app) REQUIRED_VARIABLES = [ - 'LEMUR_SECURITY_TEAM_EMAIL', - 'LEMUR_DEFAULT_ORGANIZATIONAL_UNIT', - 'LEMUR_DEFAULT_ORGANIZATION', - 'LEMUR_DEFAULT_LOCATION', - 'LEMUR_DEFAULT_COUNTRY', - 'LEMUR_DEFAULT_STATE', - 'SQLALCHEMY_DATABASE_URI' + "LEMUR_SECURITY_TEAM_EMAIL", + "LEMUR_DEFAULT_ORGANIZATIONAL_UNIT", + "LEMUR_DEFAULT_ORGANIZATION", + "LEMUR_DEFAULT_LOCATION", + "LEMUR_DEFAULT_COUNTRY", + "LEMUR_DEFAULT_STATE", + "SQLALCHEMY_DATABASE_URI", ] KEY_LENGTH = 40 -DEFAULT_CONFIG_PATH = '~/.lemur/lemur.conf.py' -DEFAULT_SETTINGS = 'lemur.conf.server' -SETTINGS_ENVVAR = 'LEMUR_CONF' +DEFAULT_CONFIG_PATH = "~/.lemur/lemur.conf.py" +DEFAULT_SETTINGS = "lemur.conf.server" +SETTINGS_ENVVAR = "LEMUR_CONF" CONFIG_TEMPLATE = """ # This is just Python which means you can inherit and tweak settings @@ -144,9 +144,9 @@ SQLALCHEMY_DATABASE_URI = 'postgresql://lemur:lemur@localhost:5432/lemur' @MigrateCommand.command def create(): - database.db.engine.execute(text('CREATE EXTENSION IF NOT EXISTS pg_trgm')) + database.db.engine.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm")) database.db.create_all() - stamp(revision='head') + stamp(revision="head") @MigrateCommand.command @@ -174,9 +174,9 @@ def generate_settings(): output = CONFIG_TEMPLATE.format( # we use Fernet.generate_key to make sure that the key length is # compatible with Fernet - encryption_key=Fernet.generate_key().decode('utf-8'), - secret_token=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'), - flask_secret_key=base64.b64encode(os.urandom(KEY_LENGTH)).decode('utf-8'), + encryption_key=Fernet.generate_key().decode("utf-8"), + secret_token=base64.b64encode(os.urandom(KEY_LENGTH)).decode("utf-8"), + flask_secret_key=base64.b64encode(os.urandom(KEY_LENGTH)).decode("utf-8"), ) return output @@ -190,39 +190,44 @@ class InitializeApp(Command): Additionally a Lemur user will be created as a default user and be used when certificates are discovered by Lemur. """ - option_list = ( - Option('-p', '--password', dest='password'), - ) + + option_list = (Option("-p", "--password", dest="password"),) def run(self, password): create() user = user_service.get_by_username("lemur") - admin_role = role_service.get_by_name('admin') + admin_role = role_service.get_by_name("admin") if admin_role: sys.stdout.write("[-] Admin role already created, skipping...!\n") else: # we create an admin role - admin_role = role_service.create('admin', description='This is the Lemur administrator role.') + admin_role = role_service.create( + "admin", description="This is the Lemur administrator role." + ) sys.stdout.write("[+] Created 'admin' role\n") - operator_role = role_service.get_by_name('operator') + operator_role = role_service.get_by_name("operator") if operator_role: sys.stdout.write("[-] Operator role already created, skipping...!\n") else: # we create an operator role - operator_role = role_service.create('operator', description='This is the Lemur operator role.') + operator_role = role_service.create( + "operator", description="This is the Lemur operator role." + ) sys.stdout.write("[+] Created 'operator' role\n") - read_only_role = role_service.get_by_name('read-only') + read_only_role = role_service.get_by_name("read-only") if read_only_role: sys.stdout.write("[-] Read only role already created, skipping...!\n") else: # we create an read only role - read_only_role = role_service.create('read-only', description='This is the Lemur read only role.') + read_only_role = role_service.create( + "read-only", description="This is the Lemur read only role." + ) sys.stdout.write("[+] Created 'read-only' role\n") if not user: @@ -235,34 +240,54 @@ class InitializeApp(Command): sys.stderr.write("[!] Passwords do not match!\n") sys.exit(1) - user_service.create("lemur", password, 'lemur@nobody.com', True, None, [admin_role]) - sys.stdout.write("[+] Created the user 'lemur' and granted it the 'admin' role!\n") + user_service.create( + "lemur", password, "lemur@nobody.com", True, None, [admin_role] + ) + sys.stdout.write( + "[+] Created the user 'lemur' and granted it the 'admin' role!\n" + ) else: - sys.stdout.write("[-] Default user has already been created, skipping...!\n") + sys.stdout.write( + "[-] Default user has already been created, skipping...!\n" + ) - intervals = current_app.config.get("LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", []) + intervals = current_app.config.get( + "LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", [] + ) sys.stdout.write( "[!] Creating {num} notifications for {intervals} days as specified by LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS\n".format( - num=len(intervals), - intervals=",".join([str(x) for x in intervals]) + num=len(intervals), intervals=",".join([str(x) for x in intervals]) ) ) - recipients = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL') + recipients = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL") sys.stdout.write("[+] Creating expiration email notifications!\n") - sys.stdout.write("[!] Using {0} as specified by LEMUR_SECURITY_TEAM_EMAIL for notifications\n".format(recipients)) - notification_service.create_default_expiration_notifications("DEFAULT_SECURITY", recipients=recipients) + sys.stdout.write( + "[!] Using {0} as specified by LEMUR_SECURITY_TEAM_EMAIL for notifications\n".format( + recipients + ) + ) + notification_service.create_default_expiration_notifications( + "DEFAULT_SECURITY", recipients=recipients + ) - _DEFAULT_ROTATION_INTERVAL = 'default' - default_rotation_interval = policy_service.get_by_name(_DEFAULT_ROTATION_INTERVAL) + _DEFAULT_ROTATION_INTERVAL = "default" + default_rotation_interval = policy_service.get_by_name( + _DEFAULT_ROTATION_INTERVAL + ) if default_rotation_interval: - sys.stdout.write("[-] Default rotation interval policy already created, skipping...!\n") + sys.stdout.write( + "[-] Default rotation interval policy already created, skipping...!\n" + ) else: days = current_app.config.get("LEMUR_DEFAULT_ROTATION_INTERVAL", 30) - sys.stdout.write("[+] Creating default certificate rotation policy of {days} days before issuance.\n".format( - days=days)) + sys.stdout.write( + "[+] Creating default certificate rotation policy of {days} days before issuance.\n".format( + days=days + ) + ) policy_service.create(days=days, name=_DEFAULT_ROTATION_INTERVAL) sys.stdout.write("[/] Done!\n") @@ -272,12 +297,13 @@ class CreateUser(Command): """ This command allows for the creation of a new user within Lemur. """ + option_list = ( - Option('-u', '--username', dest='username', required=True), - Option('-e', '--email', dest='email', required=True), - Option('-a', '--active', dest='active', default=True), - Option('-r', '--roles', dest='roles', action='append', default=[]), - Option('-p', '--password', dest='password', default=None) + Option("-u", "--username", dest="username", required=True), + Option("-e", "--email", dest="email", required=True), + Option("-a", "--active", dest="active", default=True), + Option("-r", "--roles", dest="roles", action="append", default=[]), + Option("-p", "--password", dest="password", default=None), ) def run(self, username, email, active, roles, password): @@ -307,9 +333,8 @@ class ResetPassword(Command): """ This command allows you to reset a user's password. """ - option_list = ( - Option('-u', '--username', dest='username', required=True), - ) + + option_list = (Option("-u", "--username", dest="username", required=True),) def run(self, username): user = user_service.get_by_username(username) @@ -335,10 +360,11 @@ class CreateRole(Command): """ This command allows for the creation of a new role within Lemur """ + option_list = ( - Option('-n', '--name', dest='name', required=True), - Option('-u', '--users', dest='users', default=[]), - Option('-d', '--description', dest='description', required=True) + Option("-n", "--name", dest="name", required=True), + Option("-u", "--users", dest="users", default=[]), + Option("-d", "--description", dest="description", required=True), ) def run(self, name, users, description): @@ -369,7 +395,8 @@ class LemurServer(Command): Will start gunicorn with 4 workers bound to 127.0.0.0:8002 """ - description = 'Run the app within Gunicorn' + + description = "Run the app within Gunicorn" def get_options(self): settings = make_settings() @@ -377,8 +404,10 @@ class LemurServer(Command): for setting, klass in settings.items(): if klass.cli: if klass.action: - if klass.action == 'store_const': - options.append(Option(*klass.cli, const=klass.const, action=klass.action)) + if klass.action == "store_const": + options.append( + Option(*klass.cli, const=klass.const, action=klass.action) + ) else: options.append(Option(*klass.cli, action=klass.action)) else: @@ -394,7 +423,9 @@ class LemurServer(Command): # run startup tasks on an app like object validate_conf(current_app, REQUIRED_VARIABLES) - app.app_uri = 'lemur:create_app(config_path="{0}")'.format(current_app.config.get('CONFIG_PATH')) + app.app_uri = 'lemur:create_app(config_path="{0}")'.format( + current_app.config.get("CONFIG_PATH") + ) return app.run() @@ -414,7 +445,7 @@ def create_config(config_path=None): os.makedirs(dir) config = generate_settings() - with open(config_path, 'w') as f: + with open(config_path, "w") as f: f.write(config) sys.stdout.write("[+] Created a new configuration file {0}\n".format(config_path)) @@ -436,7 +467,7 @@ def lock(path=None): :param: path """ if not path: - path = os.path.expanduser('~/.lemur/keys') + path = os.path.expanduser("~/.lemur/keys") dest_dir = os.path.join(path, "encrypted") sys.stdout.write("[!] Generating a new key...\n") @@ -447,15 +478,17 @@ def lock(path=None): sys.stdout.write("[+] Creating encryption directory: {0}\n".format(dest_dir)) os.makedirs(dest_dir) - for root, dirs, files in os.walk(os.path.join(path, 'decrypted')): + for root, dirs, files in os.walk(os.path.join(path, "decrypted")): for f in files: source = os.path.join(root, f) dest = os.path.join(dest_dir, f + ".enc") - with open(source, 'rb') as in_file, open(dest, 'wb') as out_file: + with open(source, "rb") as in_file, open(dest, "wb") as out_file: f = Fernet(key) data = f.encrypt(in_file.read()) out_file.write(data) - sys.stdout.write("[+] Writing file: {0} Source: {1}\n".format(dest, source)) + sys.stdout.write( + "[+] Writing file: {0} Source: {1}\n".format(dest, source) + ) sys.stdout.write("[+] Keys have been encrypted with key {0}\n".format(key)) @@ -475,7 +508,7 @@ def unlock(path=None): key = prompt_pass("[!] Please enter the encryption password") if not path: - path = os.path.expanduser('~/.lemur/keys') + path = os.path.expanduser("~/.lemur/keys") dest_dir = os.path.join(path, "decrypted") source_dir = os.path.join(path, "encrypted") @@ -488,11 +521,13 @@ def unlock(path=None): for f in files: source = os.path.join(source_dir, f) dest = os.path.join(dest_dir, ".".join(f.split(".")[:-1])) - with open(source, 'rb') as in_file, open(dest, 'wb') as out_file: + with open(source, "rb") as in_file, open(dest, "wb") as out_file: f = Fernet(key) data = f.decrypt(in_file.read()) out_file.write(data) - sys.stdout.write("[+] Writing file: {0} Source: {1}\n".format(dest, source)) + sys.stdout.write( + "[+] Writing file: {0} Source: {1}\n".format(dest, source) + ) sys.stdout.write("[+] Keys have been unencrypted!\n") @@ -505,15 +540,16 @@ def publish_verisign_units(): :return: """ from lemur.plugins import plugins - v = plugins.get('verisign-issuer') + + v = plugins.get("verisign-issuer") units = v.get_available_units() metrics = {} for item in units: - if item['@type'] in metrics.keys(): - metrics[item['@type']] += int(item['@remaining']) + if item["@type"] in metrics.keys(): + metrics[item["@type"]] += int(item["@remaining"]) else: - metrics.update({item['@type']: int(item['@remaining'])}) + metrics.update({item["@type"]: int(item["@remaining"])}) for name, value in metrics.items(): metric = [ @@ -522,16 +558,16 @@ def publish_verisign_units(): "type": "GAUGE", "name": "Symantec {0} Unit Count".format(name), "tags": {}, - "value": value + "value": value, } ] - requests.post('http://localhost:8078/metrics', data=json.dumps(metric)) + requests.post("http://localhost:8078/metrics", data=json.dumps(metric)) def main(): manager.add_command("start", LemurServer()) - manager.add_command("runserver", Server(host='127.0.0.1', threaded=True)) + manager.add_command("runserver", Server(host="127.0.0.1", threaded=True)) manager.add_command("clean", Clean()) manager.add_command("show_urls", ShowUrls()) manager.add_command("db", MigrateCommand) diff --git a/lemur/metrics.py b/lemur/metrics.py index 381dc605..52f8c25b 100644 --- a/lemur/metrics.py +++ b/lemur/metrics.py @@ -11,6 +11,7 @@ class Metrics(object): """ :param app: The Flask application object. Defaults to None. """ + _providers = [] def __init__(self, app=None): @@ -22,11 +23,14 @@ class Metrics(object): :param app: The Flask application object. """ - self._providers = app.config.get('METRIC_PROVIDERS', []) + self._providers = app.config.get("METRIC_PROVIDERS", []) def send(self, metric_name, metric_type, metric_value, *args, **kwargs): for provider in self._providers: current_app.logger.debug( - "Sending metric '{metric}' to the {provider} provider.".format(metric=metric_name, provider=provider)) + "Sending metric '{metric}' to the {provider} provider.".format( + metric=metric_name, provider=provider + ) + ) p = plugins.get(provider) p.submit(metric_name, metric_type, metric_value, *args, **kwargs) diff --git a/lemur/migrations/env.py b/lemur/migrations/env.py index 63425041..008a9952 100644 --- a/lemur/migrations/env.py +++ b/lemur/migrations/env.py @@ -19,8 +19,11 @@ fileConfig(config.config_file_name) # from myapp import mymodel # target_metadata = mymodel.Base.metadata from flask import current_app -config.set_main_option('sqlalchemy.url', current_app.config.get('SQLALCHEMY_DATABASE_URI')) -target_metadata = current_app.extensions['migrate'].db.metadata + +config.set_main_option( + "sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI") +) +target_metadata = current_app.extensions["migrate"].db.metadata # other values from the config, defined by the needs of env.py, # can be acquired: @@ -54,14 +57,18 @@ def run_migrations_online(): and associate a connection with the context. """ - engine = engine_from_config(config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) + engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) connection = engine.connect() - context.configure(connection=connection, - target_metadata=target_metadata, - **current_app.extensions['migrate'].configure_args) + context.configure( + connection=connection, + target_metadata=target_metadata, + **current_app.extensions["migrate"].configure_args + ) try: with context.begin_transaction(): @@ -69,8 +76,8 @@ def run_migrations_online(): finally: connection.close() + if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online() - diff --git a/lemur/migrations/versions/131ec6accff5_.py b/lemur/migrations/versions/131ec6accff5_.py index bddc5fe2..d5b42462 100644 --- a/lemur/migrations/versions/131ec6accff5_.py +++ b/lemur/migrations/versions/131ec6accff5_.py @@ -7,8 +7,8 @@ Create Date: 2016-12-07 17:29:42.049986 """ # revision identifiers, used by Alembic. -revision = '131ec6accff5' -down_revision = 'e3691fc396e9' +revision = "131ec6accff5" +down_revision = "e3691fc396e9" from alembic import op import sqlalchemy as sa @@ -16,13 +16,24 @@ import sqlalchemy as sa def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('certificates', sa.Column('rotation', sa.Boolean(), nullable=False, server_default=sa.false())) - op.add_column('endpoints', sa.Column('last_updated', sa.DateTime(), server_default=sa.text('now()'), nullable=False)) + op.add_column( + "certificates", + sa.Column("rotation", sa.Boolean(), nullable=False, server_default=sa.false()), + ) + op.add_column( + "endpoints", + sa.Column( + "last_updated", + sa.DateTime(), + server_default=sa.text("now()"), + nullable=False, + ), + ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_column('endpoints', 'last_updated') - op.drop_column('certificates', 'rotation') + op.drop_column("endpoints", "last_updated") + op.drop_column("certificates", "rotation") # ### end Alembic commands ### diff --git a/lemur/migrations/versions/1ae8e3104db8_.py b/lemur/migrations/versions/1ae8e3104db8_.py index 3cb3bb9e..9e19f0e7 100644 --- a/lemur/migrations/versions/1ae8e3104db8_.py +++ b/lemur/migrations/versions/1ae8e3104db8_.py @@ -7,15 +7,19 @@ Create Date: 2017-07-13 12:32:09.162800 """ # revision identifiers, used by Alembic. -revision = '1ae8e3104db8' -down_revision = 'a02a678ddc25' +revision = "1ae8e3104db8" +down_revision = "a02a678ddc25" from alembic import op def upgrade(): - op.sync_enum_values('public', 'log_type', ['key_view'], ['create_cert', 'key_view', 'update_cert']) + op.sync_enum_values( + "public", "log_type", ["key_view"], ["create_cert", "key_view", "update_cert"] + ) def downgrade(): - op.sync_enum_values('public', 'log_type', ['create_cert', 'key_view', 'update_cert'], ['key_view']) + op.sync_enum_values( + "public", "log_type", ["create_cert", "key_view", "update_cert"], ["key_view"] + ) diff --git a/lemur/migrations/versions/1db4f82bc780_.py b/lemur/migrations/versions/1db4f82bc780_.py index 2d917e2e..e6fb47f0 100644 --- a/lemur/migrations/versions/1db4f82bc780_.py +++ b/lemur/migrations/versions/1db4f82bc780_.py @@ -7,8 +7,8 @@ Create Date: 2018-08-03 12:56:44.565230 """ # revision identifiers, used by Alembic. -revision = '1db4f82bc780' -down_revision = '3adfdd6598df' +revision = "1db4f82bc780" +down_revision = "3adfdd6598df" import logging @@ -20,12 +20,14 @@ log = logging.getLogger(__name__) def upgrade(): connection = op.get_bind() - result = connection.execute("""\ + result = connection.execute( + """\ UPDATE certificates SET rotation_policy_id=(SELECT id FROM rotation_policies WHERE name='default') WHERE rotation_policy_id IS NULL RETURNING id - """) + """ + ) log.info("Filled rotation_policy for %d certificates" % result.rowcount) diff --git a/lemur/migrations/versions/29d8c8455c86_.py b/lemur/migrations/versions/29d8c8455c86_.py index f0b4749f..3a0e8717 100644 --- a/lemur/migrations/versions/29d8c8455c86_.py +++ b/lemur/migrations/versions/29d8c8455c86_.py @@ -7,8 +7,8 @@ Create Date: 2016-06-28 16:05:25.720213 """ # revision identifiers, used by Alembic. -revision = '29d8c8455c86' -down_revision = '3307381f3b88' +revision = "29d8c8455c86" +down_revision = "3307381f3b88" from alembic import op import sqlalchemy as sa @@ -17,46 +17,60 @@ from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_table('ciphers', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=128), nullable=False), - sa.PrimaryKeyConstraint('id') + op.create_table( + "ciphers", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=128), nullable=False), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('policy', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=128), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "policy", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=128), nullable=True), + sa.PrimaryKeyConstraint("id"), ) - op.create_table('policies_ciphers', - sa.Column('cipher_id', sa.Integer(), nullable=True), - sa.Column('policy_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['cipher_id'], ['ciphers.id'], ), - sa.ForeignKeyConstraint(['policy_id'], ['policy.id'], ) + op.create_table( + "policies_ciphers", + sa.Column("cipher_id", sa.Integer(), nullable=True), + sa.Column("policy_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["cipher_id"], ["ciphers.id"]), + sa.ForeignKeyConstraint(["policy_id"], ["policy.id"]), ) - op.create_index('policies_ciphers_ix', 'policies_ciphers', ['cipher_id', 'policy_id'], unique=False) - op.create_table('endpoints', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('owner', sa.String(length=128), nullable=True), - sa.Column('name', sa.String(length=128), nullable=True), - sa.Column('dnsname', sa.String(length=256), nullable=True), - sa.Column('type', sa.String(length=128), nullable=True), - sa.Column('active', sa.Boolean(), nullable=True), - sa.Column('port', sa.Integer(), nullable=True), - sa.Column('date_created', sa.DateTime(), server_default=sa.text(u'now()'), nullable=False), - sa.Column('policy_id', sa.Integer(), nullable=True), - sa.Column('certificate_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['certificate_id'], ['certificates.id'], ), - sa.ForeignKeyConstraint(['policy_id'], ['policy.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_index( + "policies_ciphers_ix", + "policies_ciphers", + ["cipher_id", "policy_id"], + unique=False, + ) + op.create_table( + "endpoints", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("owner", sa.String(length=128), nullable=True), + sa.Column("name", sa.String(length=128), nullable=True), + sa.Column("dnsname", sa.String(length=256), nullable=True), + sa.Column("type", sa.String(length=128), nullable=True), + sa.Column("active", sa.Boolean(), nullable=True), + sa.Column("port", sa.Integer(), nullable=True), + sa.Column( + "date_created", + sa.DateTime(), + server_default=sa.text(u"now()"), + nullable=False, + ), + sa.Column("policy_id", sa.Integer(), nullable=True), + sa.Column("certificate_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["certificate_id"], ["certificates.id"]), + sa.ForeignKeyConstraint(["policy_id"], ["policy.id"]), + sa.PrimaryKeyConstraint("id"), ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_table('endpoints') - op.drop_index('policies_ciphers_ix', table_name='policies_ciphers') - op.drop_table('policies_ciphers') - op.drop_table('policy') - op.drop_table('ciphers') + op.drop_table("endpoints") + op.drop_index("policies_ciphers_ix", table_name="policies_ciphers") + op.drop_table("policies_ciphers") + op.drop_table("policy") + op.drop_table("ciphers") ### end Alembic commands ### diff --git a/lemur/migrations/versions/318b66568358_.py b/lemur/migrations/versions/318b66568358_.py index 9d4aa48d..8578cd78 100644 --- a/lemur/migrations/versions/318b66568358_.py +++ b/lemur/migrations/versions/318b66568358_.py @@ -7,8 +7,8 @@ Create Date: 2019-02-05 15:42:25.477587 """ # revision identifiers, used by Alembic. -revision = '318b66568358' -down_revision = '9f79024fe67b' +revision = "318b66568358" +down_revision = "9f79024fe67b" from alembic import op @@ -16,7 +16,7 @@ from alembic import op def upgrade(): connection = op.get_bind() # Delete duplicate entries - connection.execute('UPDATE certificates SET deleted = false WHERE deleted IS NULL') + connection.execute("UPDATE certificates SET deleted = false WHERE deleted IS NULL") def downgrade(): diff --git a/lemur/migrations/versions/3307381f3b88_.py b/lemur/migrations/versions/3307381f3b88_.py index e4da96a6..2af0448b 100644 --- a/lemur/migrations/versions/3307381f3b88_.py +++ b/lemur/migrations/versions/3307381f3b88_.py @@ -12,8 +12,8 @@ Create Date: 2016-05-20 17:33:04.360687 """ # revision identifiers, used by Alembic. -revision = '3307381f3b88' -down_revision = '412b22cb656a' +revision = "3307381f3b88" +down_revision = "412b22cb656a" from alembic import op import sqlalchemy as sa @@ -23,109 +23,165 @@ from sqlalchemy.dialects import postgresql def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.alter_column('authorities', 'owner', - existing_type=sa.VARCHAR(length=128), - nullable=True) - op.drop_column('authorities', 'not_after') - op.drop_column('authorities', 'bits') - op.drop_column('authorities', 'cn') - op.drop_column('authorities', 'not_before') - op.add_column('certificates', sa.Column('root_authority_id', sa.Integer(), nullable=True)) - op.alter_column('certificates', 'body', - existing_type=sa.TEXT(), - nullable=False) - op.alter_column('certificates', 'owner', - existing_type=sa.VARCHAR(length=128), - nullable=True) - op.drop_constraint(u'certificates_authority_id_fkey', 'certificates', type_='foreignkey') - op.create_foreign_key(None, 'certificates', 'authorities', ['authority_id'], ['id'], ondelete='CASCADE') - op.create_foreign_key(None, 'certificates', 'authorities', ['root_authority_id'], ['id'], ondelete='CASCADE') + op.alter_column( + "authorities", "owner", existing_type=sa.VARCHAR(length=128), nullable=True + ) + op.drop_column("authorities", "not_after") + op.drop_column("authorities", "bits") + op.drop_column("authorities", "cn") + op.drop_column("authorities", "not_before") + op.add_column( + "certificates", sa.Column("root_authority_id", sa.Integer(), nullable=True) + ) + op.alter_column("certificates", "body", existing_type=sa.TEXT(), nullable=False) + op.alter_column( + "certificates", "owner", existing_type=sa.VARCHAR(length=128), nullable=True + ) + op.drop_constraint( + u"certificates_authority_id_fkey", "certificates", type_="foreignkey" + ) + op.create_foreign_key( + None, + "certificates", + "authorities", + ["authority_id"], + ["id"], + ondelete="CASCADE", + ) + op.create_foreign_key( + None, + "certificates", + "authorities", + ["root_authority_id"], + ["id"], + ondelete="CASCADE", + ) ### end Alembic commands ### # link existing certificate to their authority certificates conn = op.get_bind() - for id, body, owner in conn.execute(text('select id, body, owner from authorities')): + for id, body, owner in conn.execute( + text("select id, body, owner from authorities") + ): if not owner: owner = "lemur@nobody" # look up certificate by body, if duplications are found, pick one - stmt = text('select id from certificates where body=:body') + stmt = text("select id from certificates where body=:body") stmt = stmt.bindparams(body=body) root_certificate = conn.execute(stmt).fetchone() if root_certificate: - stmt = text('update certificates set root_authority_id=:root_authority_id where id=:id') + stmt = text( + "update certificates set root_authority_id=:root_authority_id where id=:id" + ) stmt = stmt.bindparams(root_authority_id=id, id=root_certificate[0]) op.execute(stmt) # link owner roles to their authorities - stmt = text('select id from roles where name=:name') + stmt = text("select id from roles where name=:name") stmt = stmt.bindparams(name=owner) owner_role = conn.execute(stmt).fetchone() if not owner_role: - stmt = text('insert into roles (name, description) values (:name, :description)') - stmt = stmt.bindparams(name=owner, description='Lemur generated role or existing owner.') + stmt = text( + "insert into roles (name, description) values (:name, :description)" + ) + stmt = stmt.bindparams( + name=owner, description="Lemur generated role or existing owner." + ) op.execute(stmt) - stmt = text('select id from roles where name=:name') + stmt = text("select id from roles where name=:name") stmt = stmt.bindparams(name=owner) owner_role = conn.execute(stmt).fetchone() - stmt = text('select * from roles_authorities where role_id=:role_id and authority_id=:authority_id') + stmt = text( + "select * from roles_authorities where role_id=:role_id and authority_id=:authority_id" + ) stmt = stmt.bindparams(role_id=owner_role[0], authority_id=id) exists = conn.execute(stmt).fetchone() if not exists: - stmt = text('insert into roles_authorities (role_id, authority_id) values (:role_id, :authority_id)') + stmt = text( + "insert into roles_authorities (role_id, authority_id) values (:role_id, :authority_id)" + ) stmt = stmt.bindparams(role_id=owner_role[0], authority_id=id) op.execute(stmt) # link owner roles to their certificates - for id, owner in conn.execute(text('select id, owner from certificates')): + for id, owner in conn.execute(text("select id, owner from certificates")): if not owner: owner = "lemur@nobody" - stmt = text('select id from roles where name=:name') + stmt = text("select id from roles where name=:name") stmt = stmt.bindparams(name=owner) owner_role = conn.execute(stmt).fetchone() if not owner_role: - stmt = text('insert into roles (name, description) values (:name, :description)') - stmt = stmt.bindparams(name=owner, description='Lemur generated role or existing owner.') + stmt = text( + "insert into roles (name, description) values (:name, :description)" + ) + stmt = stmt.bindparams( + name=owner, description="Lemur generated role or existing owner." + ) op.execute(stmt) # link owner roles to their authorities - stmt = text('select id from roles where name=:name') + stmt = text("select id from roles where name=:name") stmt = stmt.bindparams(name=owner) owner_role = conn.execute(stmt).fetchone() - stmt = text('select * from roles_certificates where role_id=:role_id and certificate_id=:certificate_id') + stmt = text( + "select * from roles_certificates where role_id=:role_id and certificate_id=:certificate_id" + ) stmt = stmt.bindparams(role_id=owner_role[0], certificate_id=id) exists = conn.execute(stmt).fetchone() if not exists: - stmt = text('insert into roles_certificates (role_id, certificate_id) values (:role_id, :certificate_id)') + stmt = text( + "insert into roles_certificates (role_id, certificate_id) values (:role_id, :certificate_id)" + ) stmt = stmt.bindparams(role_id=owner_role[0], certificate_id=id) op.execute(stmt) def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint(None, 'certificates', type_='foreignkey') - op.drop_constraint(None, 'certificates', type_='foreignkey') - op.create_foreign_key(u'certificates_authority_id_fkey', 'certificates', 'authorities', ['authority_id'], ['id']) - op.alter_column('certificates', 'owner', - existing_type=sa.VARCHAR(length=128), - nullable=True) - op.alter_column('certificates', 'body', - existing_type=sa.TEXT(), - nullable=True) - op.drop_column('certificates', 'root_authority_id') - op.add_column('authorities', sa.Column('not_before', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) - op.add_column('authorities', sa.Column('cn', sa.VARCHAR(length=128), autoincrement=False, nullable=True)) - op.add_column('authorities', sa.Column('bits', sa.INTEGER(), autoincrement=False, nullable=True)) - op.add_column('authorities', sa.Column('not_after', postgresql.TIMESTAMP(), autoincrement=False, nullable=True)) - op.alter_column('authorities', 'owner', - existing_type=sa.VARCHAR(length=128), - nullable=True) + op.drop_constraint(None, "certificates", type_="foreignkey") + op.drop_constraint(None, "certificates", type_="foreignkey") + op.create_foreign_key( + u"certificates_authority_id_fkey", + "certificates", + "authorities", + ["authority_id"], + ["id"], + ) + op.alter_column( + "certificates", "owner", existing_type=sa.VARCHAR(length=128), nullable=True + ) + op.alter_column("certificates", "body", existing_type=sa.TEXT(), nullable=True) + op.drop_column("certificates", "root_authority_id") + op.add_column( + "authorities", + sa.Column( + "not_before", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + ) + op.add_column( + "authorities", + sa.Column("cn", sa.VARCHAR(length=128), autoincrement=False, nullable=True), + ) + op.add_column( + "authorities", + sa.Column("bits", sa.INTEGER(), autoincrement=False, nullable=True), + ) + op.add_column( + "authorities", + sa.Column( + "not_after", postgresql.TIMESTAMP(), autoincrement=False, nullable=True + ), + ) + op.alter_column( + "authorities", "owner", existing_type=sa.VARCHAR(length=128), nullable=True + ) ### end Alembic commands ### diff --git a/lemur/migrations/versions/33de094da890_.py b/lemur/migrations/versions/33de094da890_.py index 76624e96..718e908f 100644 --- a/lemur/migrations/versions/33de094da890_.py +++ b/lemur/migrations/versions/33de094da890_.py @@ -7,25 +7,31 @@ Create Date: 2015-11-30 15:40:19.827272 """ # revision identifiers, used by Alembic. -revision = '33de094da890' +revision = "33de094da890" down_revision = None from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql + def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_table('certificate_replacement_associations', - sa.Column('replaced_certificate_id', sa.Integer(), nullable=True), - sa.Column('certificate_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['certificate_id'], ['certificates.id'], ondelete='cascade'), - sa.ForeignKeyConstraint(['replaced_certificate_id'], ['certificates.id'], ondelete='cascade') + op.create_table( + "certificate_replacement_associations", + sa.Column("replaced_certificate_id", sa.Integer(), nullable=True), + sa.Column("certificate_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["certificate_id"], ["certificates.id"], ondelete="cascade" + ), + sa.ForeignKeyConstraint( + ["replaced_certificate_id"], ["certificates.id"], ondelete="cascade" + ), ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_table('certificate_replacement_associations') + op.drop_table("certificate_replacement_associations") ### end Alembic commands ### diff --git a/lemur/migrations/versions/3adfdd6598df_.py b/lemur/migrations/versions/3adfdd6598df_.py index 1f290153..7f587f49 100644 --- a/lemur/migrations/versions/3adfdd6598df_.py +++ b/lemur/migrations/versions/3adfdd6598df_.py @@ -7,8 +7,8 @@ Create Date: 2018-04-10 13:25:47.007556 """ # revision identifiers, used by Alembic. -revision = '3adfdd6598df' -down_revision = '556ceb3e3c3e' +revision = "3adfdd6598df" +down_revision = "556ceb3e3c3e" import sqlalchemy as sa from alembic import op @@ -22,84 +22,90 @@ def upgrade(): # create provider table print("Creating dns_providers table") op.create_table( - 'dns_providers', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=256), nullable=True), - sa.Column('description', sa.String(length=1024), nullable=True), - sa.Column('provider_type', sa.String(length=256), nullable=True), - sa.Column('credentials', Vault(), nullable=True), - sa.Column('api_endpoint', sa.String(length=256), nullable=True), - sa.Column('date_created', ArrowType(), server_default=sa.text('now()'), nullable=False), - sa.Column('status', sa.String(length=128), nullable=True), - sa.Column('options', JSON), - sa.Column('domains', sa.JSON(), nullable=True), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') + "dns_providers", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=256), nullable=True), + sa.Column("description", sa.String(length=1024), nullable=True), + sa.Column("provider_type", sa.String(length=256), nullable=True), + sa.Column("credentials", Vault(), nullable=True), + sa.Column("api_endpoint", sa.String(length=256), nullable=True), + sa.Column( + "date_created", ArrowType(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("status", sa.String(length=128), nullable=True), + sa.Column("options", JSON), + sa.Column("domains", sa.JSON(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), ) print("Adding dns_provider_id column to certificates") - op.add_column('certificates', sa.Column('dns_provider_id', sa.Integer(), nullable=True)) + op.add_column( + "certificates", sa.Column("dns_provider_id", sa.Integer(), nullable=True) + ) print("Adding dns_provider_id column to pending_certs") - op.add_column('pending_certs', sa.Column('dns_provider_id', sa.Integer(), nullable=True)) + op.add_column( + "pending_certs", sa.Column("dns_provider_id", sa.Integer(), nullable=True) + ) print("Adding options column to pending_certs") - op.add_column('pending_certs', sa.Column('options', JSON)) + op.add_column("pending_certs", sa.Column("options", JSON)) print("Creating pending_dns_authorizations table") op.create_table( - 'pending_dns_authorizations', - sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True), - sa.Column('account_number', sa.String(length=128), nullable=True), - sa.Column('domains', JSON, nullable=True), - sa.Column('dns_provider_type', sa.String(length=128), nullable=True), - sa.Column('options', JSON, nullable=True), + "pending_dns_authorizations", + sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True), + sa.Column("account_number", sa.String(length=128), nullable=True), + sa.Column("domains", JSON, nullable=True), + sa.Column("dns_provider_type", sa.String(length=128), nullable=True), + sa.Column("options", JSON, nullable=True), ) print("Creating certificates_dns_providers_fk foreign key") - op.create_foreign_key('certificates_dns_providers_fk', 'certificates', 'dns_providers', ['dns_provider_id'], ['id'], - ondelete='cascade') + op.create_foreign_key( + "certificates_dns_providers_fk", + "certificates", + "dns_providers", + ["dns_provider_id"], + ["id"], + ondelete="cascade", + ) print("Altering column types in the api_keys table") - op.alter_column('api_keys', 'issued_at', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('api_keys', 'revoked', - existing_type=sa.BOOLEAN(), - nullable=True) - op.alter_column('api_keys', 'ttl', - existing_type=sa.BIGINT(), - nullable=True) - op.alter_column('api_keys', 'user_id', - existing_type=sa.INTEGER(), - nullable=True) + op.alter_column("api_keys", "issued_at", existing_type=sa.BIGINT(), nullable=True) + op.alter_column("api_keys", "revoked", existing_type=sa.BOOLEAN(), nullable=True) + op.alter_column("api_keys", "ttl", existing_type=sa.BIGINT(), nullable=True) + op.alter_column("api_keys", "user_id", existing_type=sa.INTEGER(), nullable=True) print("Creating dns_providers_id foreign key on pending_certs table") - op.create_foreign_key(None, 'pending_certs', 'dns_providers', ['dns_provider_id'], ['id'], ondelete='CASCADE') + op.create_foreign_key( + None, + "pending_certs", + "dns_providers", + ["dns_provider_id"], + ["id"], + ondelete="CASCADE", + ) + def downgrade(): print("Removing dns_providers_id foreign key on pending_certs table") - op.drop_constraint(None, 'pending_certs', type_='foreignkey') + op.drop_constraint(None, "pending_certs", type_="foreignkey") print("Reverting column types in the api_keys table") - op.alter_column('api_keys', 'user_id', - existing_type=sa.INTEGER(), - nullable=False) - op.alter_column('api_keys', 'ttl', - existing_type=sa.BIGINT(), - nullable=False) - op.alter_column('api_keys', 'revoked', - existing_type=sa.BOOLEAN(), - nullable=False) - op.alter_column('api_keys', 'issued_at', - existing_type=sa.BIGINT(), - nullable=False) + op.alter_column("api_keys", "user_id", existing_type=sa.INTEGER(), nullable=False) + op.alter_column("api_keys", "ttl", existing_type=sa.BIGINT(), nullable=False) + op.alter_column("api_keys", "revoked", existing_type=sa.BOOLEAN(), nullable=False) + op.alter_column("api_keys", "issued_at", existing_type=sa.BIGINT(), nullable=False) print("Reverting certificates_dns_providers_fk foreign key") - op.drop_constraint('certificates_dns_providers_fk', 'certificates', type_='foreignkey') + op.drop_constraint( + "certificates_dns_providers_fk", "certificates", type_="foreignkey" + ) print("Dropping pending_dns_authorizations table") - op.drop_table('pending_dns_authorizations') + op.drop_table("pending_dns_authorizations") print("Undoing modifications to pending_certs table") - op.drop_column('pending_certs', 'options') - op.drop_column('pending_certs', 'dns_provider_id') + op.drop_column("pending_certs", "options") + op.drop_column("pending_certs", "dns_provider_id") print("Undoing modifications to certificates table") - op.drop_column('certificates', 'dns_provider_id') + op.drop_column("certificates", "dns_provider_id") print("Deleting dns_providers table") - op.drop_table('dns_providers') + op.drop_table("dns_providers") diff --git a/lemur/migrations/versions/412b22cb656a_.py b/lemur/migrations/versions/412b22cb656a_.py index d95ec701..c24ddfba 100644 --- a/lemur/migrations/versions/412b22cb656a_.py +++ b/lemur/migrations/versions/412b22cb656a_.py @@ -7,8 +7,8 @@ Create Date: 2016-05-17 17:37:41.210232 """ # revision identifiers, used by Alembic. -revision = '412b22cb656a' -down_revision = '4c50b903d1ae' +revision = "412b22cb656a" +down_revision = "4c50b903d1ae" from alembic import op import sqlalchemy as sa @@ -17,47 +17,102 @@ from sqlalchemy.sql import text def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_table('roles_authorities', - sa.Column('authority_id', sa.Integer(), nullable=True), - sa.Column('role_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['authority_id'], ['authorities.id'], ), - sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ) + op.create_table( + "roles_authorities", + sa.Column("authority_id", sa.Integer(), nullable=True), + sa.Column("role_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["authority_id"], ["authorities.id"]), + sa.ForeignKeyConstraint(["role_id"], ["roles.id"]), ) - op.create_index('roles_authorities_ix', 'roles_authorities', ['authority_id', 'role_id'], unique=True) - op.create_table('roles_certificates', - sa.Column('certificate_id', sa.Integer(), nullable=True), - sa.Column('role_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['certificate_id'], ['certificates.id'], ), - sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ) + op.create_index( + "roles_authorities_ix", + "roles_authorities", + ["authority_id", "role_id"], + unique=True, + ) + op.create_table( + "roles_certificates", + sa.Column("certificate_id", sa.Integer(), nullable=True), + sa.Column("role_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["certificate_id"], ["certificates.id"]), + sa.ForeignKeyConstraint(["role_id"], ["roles.id"]), + ) + op.create_index( + "roles_certificates_ix", + "roles_certificates", + ["certificate_id", "role_id"], + unique=True, + ) + op.create_index( + "certificate_associations_ix", + "certificate_associations", + ["domain_id", "certificate_id"], + unique=True, + ) + op.create_index( + "certificate_destination_associations_ix", + "certificate_destination_associations", + ["destination_id", "certificate_id"], + unique=True, + ) + op.create_index( + "certificate_notification_associations_ix", + "certificate_notification_associations", + ["notification_id", "certificate_id"], + unique=True, + ) + op.create_index( + "certificate_replacement_associations_ix", + "certificate_replacement_associations", + ["certificate_id", "certificate_id"], + unique=True, + ) + op.create_index( + "certificate_source_associations_ix", + "certificate_source_associations", + ["source_id", "certificate_id"], + unique=True, + ) + op.create_index( + "roles_users_ix", "roles_users", ["user_id", "role_id"], unique=True ) - op.create_index('roles_certificates_ix', 'roles_certificates', ['certificate_id', 'role_id'], unique=True) - op.create_index('certificate_associations_ix', 'certificate_associations', ['domain_id', 'certificate_id'], unique=True) - op.create_index('certificate_destination_associations_ix', 'certificate_destination_associations', ['destination_id', 'certificate_id'], unique=True) - op.create_index('certificate_notification_associations_ix', 'certificate_notification_associations', ['notification_id', 'certificate_id'], unique=True) - op.create_index('certificate_replacement_associations_ix', 'certificate_replacement_associations', ['certificate_id', 'certificate_id'], unique=True) - op.create_index('certificate_source_associations_ix', 'certificate_source_associations', ['source_id', 'certificate_id'], unique=True) - op.create_index('roles_users_ix', 'roles_users', ['user_id', 'role_id'], unique=True) ### end Alembic commands ### # migrate existing authority_id relationship to many_to_many conn = op.get_bind() - for id, authority_id in conn.execute(text('select id, authority_id from roles where authority_id is not null')): - stmt = text('insert into roles_authoritties (role_id, authority_id) values (:role_id, :authority_id)') + for id, authority_id in conn.execute( + text("select id, authority_id from roles where authority_id is not null") + ): + stmt = text( + "insert into roles_authoritties (role_id, authority_id) values (:role_id, :authority_id)" + ) stmt = stmt.bindparams(role_id=id, authority_id=authority_id) op.execute(stmt) def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_index('roles_users_ix', table_name='roles_users') - op.drop_index('certificate_source_associations_ix', table_name='certificate_source_associations') - op.drop_index('certificate_replacement_associations_ix', table_name='certificate_replacement_associations') - op.drop_index('certificate_notification_associations_ix', table_name='certificate_notification_associations') - op.drop_index('certificate_destination_associations_ix', table_name='certificate_destination_associations') - op.drop_index('certificate_associations_ix', table_name='certificate_associations') - op.drop_index('roles_certificates_ix', table_name='roles_certificates') - op.drop_table('roles_certificates') - op.drop_index('roles_authorities_ix', table_name='roles_authorities') - op.drop_table('roles_authorities') + op.drop_index("roles_users_ix", table_name="roles_users") + op.drop_index( + "certificate_source_associations_ix", + table_name="certificate_source_associations", + ) + op.drop_index( + "certificate_replacement_associations_ix", + table_name="certificate_replacement_associations", + ) + op.drop_index( + "certificate_notification_associations_ix", + table_name="certificate_notification_associations", + ) + op.drop_index( + "certificate_destination_associations_ix", + table_name="certificate_destination_associations", + ) + op.drop_index("certificate_associations_ix", table_name="certificate_associations") + op.drop_index("roles_certificates_ix", table_name="roles_certificates") + op.drop_table("roles_certificates") + op.drop_index("roles_authorities_ix", table_name="roles_authorities") + op.drop_table("roles_authorities") ### end Alembic commands ### diff --git a/lemur/migrations/versions/449c3d5c7299_.py b/lemur/migrations/versions/449c3d5c7299_.py index 0bc30db1..f33548da 100644 --- a/lemur/migrations/versions/449c3d5c7299_.py +++ b/lemur/migrations/versions/449c3d5c7299_.py @@ -7,8 +7,8 @@ Create Date: 2018-02-24 22:51:35.369229 """ # revision identifiers, used by Alembic. -revision = '449c3d5c7299' -down_revision = '5770674184de' +revision = "449c3d5c7299" +down_revision = "5770674184de" from alembic import op from flask_sqlalchemy import SQLAlchemy @@ -23,12 +23,14 @@ COLUMNS = ["notification_id", "certificate_id"] def upgrade(): connection = op.get_bind() # Delete duplicate entries - connection.execute("""\ + connection.execute( + """\ DELETE FROM certificate_notification_associations WHERE ctid NOT IN ( -- Select the first tuple ID for each (notification_id, certificate_id) combination and keep that SELECT min(ctid) FROM certificate_notification_associations GROUP BY notification_id, certificate_id ) - """) + """ + ) op.create_unique_constraint(CONSTRAINT_NAME, TABLE, COLUMNS) diff --git a/lemur/migrations/versions/4c50b903d1ae_.py b/lemur/migrations/versions/4c50b903d1ae_.py index 7b0515d4..93d4a312 100644 --- a/lemur/migrations/versions/4c50b903d1ae_.py +++ b/lemur/migrations/versions/4c50b903d1ae_.py @@ -7,20 +7,21 @@ Create Date: 2015-12-30 10:19:30.057791 """ # revision identifiers, used by Alembic. -revision = '4c50b903d1ae' -down_revision = '33de094da890' +revision = "4c50b903d1ae" +down_revision = "33de094da890" from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql + def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.add_column('domains', sa.Column('sensitive', sa.Boolean(), nullable=True)) + op.add_column("domains", sa.Column("sensitive", sa.Boolean(), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_column('domains', 'sensitive') + op.drop_column("domains", "sensitive") ### end Alembic commands ### diff --git a/lemur/migrations/versions/556ceb3e3c3e_.py b/lemur/migrations/versions/556ceb3e3c3e_.py index 2916c0eb..60304138 100644 --- a/lemur/migrations/versions/556ceb3e3c3e_.py +++ b/lemur/migrations/versions/556ceb3e3c3e_.py @@ -7,8 +7,8 @@ Create Date: 2018-01-05 01:18:45.571595 """ # revision identifiers, used by Alembic. -revision = '556ceb3e3c3e' -down_revision = '449c3d5c7299' +revision = "556ceb3e3c3e" +down_revision = "449c3d5c7299" from alembic import op import sqlalchemy as sa @@ -16,84 +16,150 @@ from lemur.utils import Vault from sqlalchemy.dialects import postgresql from sqlalchemy_utils import ArrowType + def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('pending_certs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('external_id', sa.String(length=128), nullable=True), - sa.Column('owner', sa.String(length=128), nullable=False), - sa.Column('name', sa.String(length=256), nullable=True), - sa.Column('description', sa.String(length=1024), nullable=True), - sa.Column('notify', sa.Boolean(), nullable=True), - sa.Column('number_attempts', sa.Integer(), nullable=True), - sa.Column('rename', sa.Boolean(), nullable=True), - sa.Column('cn', sa.String(length=128), nullable=True), - sa.Column('csr', sa.Text(), nullable=False), - sa.Column('chain', sa.Text(), nullable=True), - sa.Column('private_key', Vault(), nullable=True), - sa.Column('date_created', ArrowType(), server_default=sa.text('now()'), nullable=False), - sa.Column('status', sa.String(length=128), nullable=True), - sa.Column('rotation', sa.Boolean(), nullable=True), - sa.Column('user_id', sa.Integer(), nullable=True), - sa.Column('authority_id', sa.Integer(), nullable=True), - sa.Column('root_authority_id', sa.Integer(), nullable=True), - sa.Column('rotation_policy_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['authority_id'], ['authorities.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['root_authority_id'], ['authorities.id'], ondelete='CASCADE'), - sa.ForeignKeyConstraint(['rotation_policy_id'], ['rotation_policies.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('id'), - sa.UniqueConstraint('name') + op.create_table( + "pending_certs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("external_id", sa.String(length=128), nullable=True), + sa.Column("owner", sa.String(length=128), nullable=False), + sa.Column("name", sa.String(length=256), nullable=True), + sa.Column("description", sa.String(length=1024), nullable=True), + sa.Column("notify", sa.Boolean(), nullable=True), + sa.Column("number_attempts", sa.Integer(), nullable=True), + sa.Column("rename", sa.Boolean(), nullable=True), + sa.Column("cn", sa.String(length=128), nullable=True), + sa.Column("csr", sa.Text(), nullable=False), + sa.Column("chain", sa.Text(), nullable=True), + sa.Column("private_key", Vault(), nullable=True), + sa.Column( + "date_created", ArrowType(), server_default=sa.text("now()"), nullable=False + ), + sa.Column("status", sa.String(length=128), nullable=True), + sa.Column("rotation", sa.Boolean(), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=True), + sa.Column("authority_id", sa.Integer(), nullable=True), + sa.Column("root_authority_id", sa.Integer(), nullable=True), + sa.Column("rotation_policy_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["authority_id"], ["authorities.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint( + ["root_authority_id"], ["authorities.id"], ondelete="CASCADE" + ), + sa.ForeignKeyConstraint(["rotation_policy_id"], ["rotation_policies.id"]), + sa.ForeignKeyConstraint(["user_id"], ["users.id"]), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("name"), ) - op.create_table('pending_cert_destination_associations', - sa.Column('destination_id', sa.Integer(), nullable=True), - sa.Column('pending_cert_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['destination_id'], ['destinations.id'], ondelete='cascade'), - sa.ForeignKeyConstraint(['pending_cert_id'], ['pending_certs.id'], ondelete='cascade') + op.create_table( + "pending_cert_destination_associations", + sa.Column("destination_id", sa.Integer(), nullable=True), + sa.Column("pending_cert_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["destination_id"], ["destinations.id"], ondelete="cascade" + ), + sa.ForeignKeyConstraint( + ["pending_cert_id"], ["pending_certs.id"], ondelete="cascade" + ), ) - op.create_index('pending_cert_destination_associations_ix', 'pending_cert_destination_associations', ['destination_id', 'pending_cert_id'], unique=False) - op.create_table('pending_cert_notification_associations', - sa.Column('notification_id', sa.Integer(), nullable=True), - sa.Column('pending_cert_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['notification_id'], ['notifications.id'], ondelete='cascade'), - sa.ForeignKeyConstraint(['pending_cert_id'], ['pending_certs.id'], ondelete='cascade') + op.create_index( + "pending_cert_destination_associations_ix", + "pending_cert_destination_associations", + ["destination_id", "pending_cert_id"], + unique=False, ) - op.create_index('pending_cert_notification_associations_ix', 'pending_cert_notification_associations', ['notification_id', 'pending_cert_id'], unique=False) - op.create_table('pending_cert_replacement_associations', - sa.Column('replaced_certificate_id', sa.Integer(), nullable=True), - sa.Column('pending_cert_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['pending_cert_id'], ['pending_certs.id'], ondelete='cascade'), - sa.ForeignKeyConstraint(['replaced_certificate_id'], ['certificates.id'], ondelete='cascade') + op.create_table( + "pending_cert_notification_associations", + sa.Column("notification_id", sa.Integer(), nullable=True), + sa.Column("pending_cert_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["notification_id"], ["notifications.id"], ondelete="cascade" + ), + sa.ForeignKeyConstraint( + ["pending_cert_id"], ["pending_certs.id"], ondelete="cascade" + ), ) - op.create_index('pending_cert_replacement_associations_ix', 'pending_cert_replacement_associations', ['replaced_certificate_id', 'pending_cert_id'], unique=False) - op.create_table('pending_cert_role_associations', - sa.Column('pending_cert_id', sa.Integer(), nullable=True), - sa.Column('role_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['pending_cert_id'], ['pending_certs.id'], ), - sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ) + op.create_index( + "pending_cert_notification_associations_ix", + "pending_cert_notification_associations", + ["notification_id", "pending_cert_id"], + unique=False, ) - op.create_index('pending_cert_role_associations_ix', 'pending_cert_role_associations', ['pending_cert_id', 'role_id'], unique=False) - op.create_table('pending_cert_source_associations', - sa.Column('source_id', sa.Integer(), nullable=True), - sa.Column('pending_cert_id', sa.Integer(), nullable=True), - sa.ForeignKeyConstraint(['pending_cert_id'], ['pending_certs.id'], ondelete='cascade'), - sa.ForeignKeyConstraint(['source_id'], ['sources.id'], ondelete='cascade') + op.create_table( + "pending_cert_replacement_associations", + sa.Column("replaced_certificate_id", sa.Integer(), nullable=True), + sa.Column("pending_cert_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["pending_cert_id"], ["pending_certs.id"], ondelete="cascade" + ), + sa.ForeignKeyConstraint( + ["replaced_certificate_id"], ["certificates.id"], ondelete="cascade" + ), + ) + op.create_index( + "pending_cert_replacement_associations_ix", + "pending_cert_replacement_associations", + ["replaced_certificate_id", "pending_cert_id"], + unique=False, + ) + op.create_table( + "pending_cert_role_associations", + sa.Column("pending_cert_id", sa.Integer(), nullable=True), + sa.Column("role_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(["pending_cert_id"], ["pending_certs.id"]), + sa.ForeignKeyConstraint(["role_id"], ["roles.id"]), + ) + op.create_index( + "pending_cert_role_associations_ix", + "pending_cert_role_associations", + ["pending_cert_id", "role_id"], + unique=False, + ) + op.create_table( + "pending_cert_source_associations", + sa.Column("source_id", sa.Integer(), nullable=True), + sa.Column("pending_cert_id", sa.Integer(), nullable=True), + sa.ForeignKeyConstraint( + ["pending_cert_id"], ["pending_certs.id"], ondelete="cascade" + ), + sa.ForeignKeyConstraint(["source_id"], ["sources.id"], ondelete="cascade"), + ) + op.create_index( + "pending_cert_source_associations_ix", + "pending_cert_source_associations", + ["source_id", "pending_cert_id"], + unique=False, ) - op.create_index('pending_cert_source_associations_ix', 'pending_cert_source_associations', ['source_id', 'pending_cert_id'], unique=False) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_index('pending_cert_source_associations_ix', table_name='pending_cert_source_associations') - op.drop_table('pending_cert_source_associations') - op.drop_index('pending_cert_role_associations_ix', table_name='pending_cert_role_associations') - op.drop_table('pending_cert_role_associations') - op.drop_index('pending_cert_replacement_associations_ix', table_name='pending_cert_replacement_associations') - op.drop_table('pending_cert_replacement_associations') - op.drop_index('pending_cert_notification_associations_ix', table_name='pending_cert_notification_associations') - op.drop_table('pending_cert_notification_associations') - op.drop_index('pending_cert_destination_associations_ix', table_name='pending_cert_destination_associations') - op.drop_table('pending_cert_destination_associations') - op.drop_table('pending_certs') + op.drop_index( + "pending_cert_source_associations_ix", + table_name="pending_cert_source_associations", + ) + op.drop_table("pending_cert_source_associations") + op.drop_index( + "pending_cert_role_associations_ix", table_name="pending_cert_role_associations" + ) + op.drop_table("pending_cert_role_associations") + op.drop_index( + "pending_cert_replacement_associations_ix", + table_name="pending_cert_replacement_associations", + ) + op.drop_table("pending_cert_replacement_associations") + op.drop_index( + "pending_cert_notification_associations_ix", + table_name="pending_cert_notification_associations", + ) + op.drop_table("pending_cert_notification_associations") + op.drop_index( + "pending_cert_destination_associations_ix", + table_name="pending_cert_destination_associations", + ) + op.drop_table("pending_cert_destination_associations") + op.drop_table("pending_certs") # ### end Alembic commands ### diff --git a/lemur/migrations/versions/5770674184de_.py b/lemur/migrations/versions/5770674184de_.py index 88262a84..49d89367 100644 --- a/lemur/migrations/versions/5770674184de_.py +++ b/lemur/migrations/versions/5770674184de_.py @@ -7,8 +7,8 @@ Create Date: 2018-02-23 15:27:30.335435 """ # revision identifiers, used by Alembic. -revision = '5770674184de' -down_revision = 'ce547319f7be' +revision = "5770674184de" +down_revision = "ce547319f7be" from flask_sqlalchemy import SQLAlchemy from lemur.models import certificate_notification_associations @@ -32,7 +32,9 @@ def upgrade(): # If we've seen a pair already, delete the duplicates if seen.get("{}-{}".format(x.certificate_id, x.notification_id)): print("Deleting duplicate: {}".format(x)) - d = session.query(certificate_notification_associations).filter(certificate_notification_associations.c.id==x.id) + d = session.query(certificate_notification_associations).filter( + certificate_notification_associations.c.id == x.id + ) d.delete(synchronize_session=False) seen["{}-{}".format(x.certificate_id, x.notification_id)] = True db.session.commit() diff --git a/lemur/migrations/versions/5ae0ecefb01f_.py b/lemur/migrations/versions/5ae0ecefb01f_.py index a471c4bf..7b0d5ae0 100644 --- a/lemur/migrations/versions/5ae0ecefb01f_.py +++ b/lemur/migrations/versions/5ae0ecefb01f_.py @@ -7,8 +7,8 @@ Create Date: 2018-08-14 08:16:43.329316 """ # revision identifiers, used by Alembic. -revision = '5ae0ecefb01f' -down_revision = '1db4f82bc780' +revision = "5ae0ecefb01f" +down_revision = "1db4f82bc780" from alembic import op import sqlalchemy as sa @@ -16,17 +16,14 @@ import sqlalchemy as sa def upgrade(): op.alter_column( - table_name='pending_certs', - column_name='status', - nullable=True, - type_=sa.TEXT() + table_name="pending_certs", column_name="status", nullable=True, type_=sa.TEXT() ) def downgrade(): op.alter_column( - table_name='pending_certs', - column_name='status', + table_name="pending_certs", + column_name="status", nullable=True, - type_=sa.VARCHAR(128) + type_=sa.VARCHAR(128), ) diff --git a/lemur/migrations/versions/5bc47fa7cac4_.py b/lemur/migrations/versions/5bc47fa7cac4_.py index f4a145c8..f786c527 100644 --- a/lemur/migrations/versions/5bc47fa7cac4_.py +++ b/lemur/migrations/versions/5bc47fa7cac4_.py @@ -7,16 +7,18 @@ Create Date: 2017-12-08 14:19:11.903864 """ # revision identifiers, used by Alembic. -revision = '5bc47fa7cac4' -down_revision = 'c05a8998b371' +revision = "5bc47fa7cac4" +down_revision = "c05a8998b371" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('roles', sa.Column('third_party', sa.Boolean(), nullable=True, default=False)) + op.add_column( + "roles", sa.Column("third_party", sa.Boolean(), nullable=True, default=False) + ) def downgrade(): - op.drop_column('roles', 'third_party') + op.drop_column("roles", "third_party") diff --git a/lemur/migrations/versions/5e680529b666_.py b/lemur/migrations/versions/5e680529b666_.py index d59d996f..4cca4521 100644 --- a/lemur/migrations/versions/5e680529b666_.py +++ b/lemur/migrations/versions/5e680529b666_.py @@ -7,20 +7,20 @@ Create Date: 2017-01-26 05:05:25.168125 """ # revision identifiers, used by Alembic. -revision = '5e680529b666' -down_revision = '131ec6accff5' +revision = "5e680529b666" +down_revision = "131ec6accff5" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('endpoints', sa.Column('sensitive', sa.Boolean(), nullable=True)) - op.add_column('endpoints', sa.Column('source_id', sa.Integer(), nullable=True)) - op.create_foreign_key(None, 'endpoints', 'sources', ['source_id'], ['id']) + op.add_column("endpoints", sa.Column("sensitive", sa.Boolean(), nullable=True)) + op.add_column("endpoints", sa.Column("source_id", sa.Integer(), nullable=True)) + op.create_foreign_key(None, "endpoints", "sources", ["source_id"], ["id"]) def downgrade(): - op.drop_constraint(None, 'endpoints', type_='foreignkey') - op.drop_column('endpoints', 'source_id') - op.drop_column('endpoints', 'sensitive') + op.drop_constraint(None, "endpoints", type_="foreignkey") + op.drop_column("endpoints", "source_id") + op.drop_column("endpoints", "sensitive") diff --git a/lemur/migrations/versions/6006c79b6011_.py b/lemur/migrations/versions/6006c79b6011_.py index c41b1d25..86727716 100644 --- a/lemur/migrations/versions/6006c79b6011_.py +++ b/lemur/migrations/versions/6006c79b6011_.py @@ -7,15 +7,15 @@ Create Date: 2018-10-19 15:23:06.750510 """ # revision identifiers, used by Alembic. -revision = '6006c79b6011' -down_revision = '984178255c83' +revision = "6006c79b6011" +down_revision = "984178255c83" from alembic import op def upgrade(): - op.create_unique_constraint("uq_label", 'sources', ['label']) + op.create_unique_constraint("uq_label", "sources", ["label"]) def downgrade(): - op.drop_constraint("uq_label", 'sources', type_='unique') + op.drop_constraint("uq_label", "sources", type_="unique") diff --git a/lemur/migrations/versions/7ead443ba911_.py b/lemur/migrations/versions/7ead443ba911_.py index 62be01aa..10b8e576 100644 --- a/lemur/migrations/versions/7ead443ba911_.py +++ b/lemur/migrations/versions/7ead443ba911_.py @@ -7,15 +7,16 @@ Create Date: 2018-10-21 22:06:23.056906 """ # revision identifiers, used by Alembic. -revision = '7ead443ba911' -down_revision = '6006c79b6011' +revision = "7ead443ba911" +down_revision = "6006c79b6011" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('certificates', sa.Column('csr', sa.TEXT(), nullable=True)) + op.add_column("certificates", sa.Column("csr", sa.TEXT(), nullable=True)) + def downgrade(): - op.drop_column('certificates', 'csr') + op.drop_column("certificates", "csr") diff --git a/lemur/migrations/versions/7f71c0cea31a_.py b/lemur/migrations/versions/7f71c0cea31a_.py index 04bb02ea..5e90cbb1 100644 --- a/lemur/migrations/versions/7f71c0cea31a_.py +++ b/lemur/migrations/versions/7f71c0cea31a_.py @@ -9,8 +9,8 @@ Create Date: 2016-07-28 09:39:12.736506 """ # revision identifiers, used by Alembic. -revision = '7f71c0cea31a' -down_revision = '29d8c8455c86' +revision = "7f71c0cea31a" +down_revision = "29d8c8455c86" from alembic import op import sqlalchemy as sa @@ -19,17 +19,25 @@ from sqlalchemy.sql import text def upgrade(): conn = op.get_bind() - for name in conn.execute(text('select name from certificates group by name having count(*) > 1')): - for idx, id in enumerate(conn.execute(text("select id from certificates where certificates.name like :name order by id ASC").bindparams(name=name[0]))): + for name in conn.execute( + text("select name from certificates group by name having count(*) > 1") + ): + for idx, id in enumerate( + conn.execute( + text( + "select id from certificates where certificates.name like :name order by id ASC" + ).bindparams(name=name[0]) + ) + ): if not idx: continue - new_name = name[0] + '-' + str(idx) - stmt = text('update certificates set name=:name where id=:id') + new_name = name[0] + "-" + str(idx) + stmt = text("update certificates set name=:name where id=:id") stmt = stmt.bindparams(name=new_name, id=id[0]) op.execute(stmt) - op.create_unique_constraint(None, 'certificates', ['name']) + op.create_unique_constraint(None, "certificates", ["name"]) def downgrade(): - op.drop_constraint(None, 'certificates', type_='unique') + op.drop_constraint(None, "certificates", type_="unique") diff --git a/lemur/migrations/versions/8ae67285ff14_.py b/lemur/migrations/versions/8ae67285ff14_.py index f45be70d..e8f6a217 100644 --- a/lemur/migrations/versions/8ae67285ff14_.py +++ b/lemur/migrations/versions/8ae67285ff14_.py @@ -7,18 +7,28 @@ Create Date: 2017-05-10 11:56:13.999332 """ # revision identifiers, used by Alembic. -revision = '8ae67285ff14' -down_revision = '5e680529b666' +revision = "8ae67285ff14" +down_revision = "5e680529b666" from alembic import op import sqlalchemy as sa def upgrade(): - op.drop_index('certificate_replacement_associations_ix') - op.create_index('certificate_replacement_associations_ix', 'certificate_replacement_associations', ['replaced_certificate_id', 'certificate_id'], unique=True) + op.drop_index("certificate_replacement_associations_ix") + op.create_index( + "certificate_replacement_associations_ix", + "certificate_replacement_associations", + ["replaced_certificate_id", "certificate_id"], + unique=True, + ) def downgrade(): - op.drop_index('certificate_replacement_associations_ix') - op.create_index('certificate_replacement_associations_ix', 'certificate_replacement_associations', ['certificate_id', 'certificate_id'], unique=True) + op.drop_index("certificate_replacement_associations_ix") + op.create_index( + "certificate_replacement_associations_ix", + "certificate_replacement_associations", + ["certificate_id", "certificate_id"], + unique=True, + ) diff --git a/lemur/migrations/versions/932525b82f1a_.py b/lemur/migrations/versions/932525b82f1a_.py index 2ee95d07..8ff36d1c 100644 --- a/lemur/migrations/versions/932525b82f1a_.py +++ b/lemur/migrations/versions/932525b82f1a_.py @@ -7,15 +7,15 @@ Create Date: 2016-10-13 20:14:33.928029 """ # revision identifiers, used by Alembic. -revision = '932525b82f1a' -down_revision = '7f71c0cea31a' +revision = "932525b82f1a" +down_revision = "7f71c0cea31a" from alembic import op def upgrade(): - op.alter_column('certificates', 'active', new_column_name='notify') + op.alter_column("certificates", "active", new_column_name="notify") def downgrade(): - op.alter_column('certificates', 'notify', new_column_name='active') + op.alter_column("certificates", "notify", new_column_name="active") diff --git a/lemur/migrations/versions/9392b9f9a805_.py b/lemur/migrations/versions/9392b9f9a805_.py index d6ca734b..8ff09333 100644 --- a/lemur/migrations/versions/9392b9f9a805_.py +++ b/lemur/migrations/versions/9392b9f9a805_.py @@ -6,8 +6,8 @@ Create Date: 2018-09-17 08:33:37.087488 """ # revision identifiers, used by Alembic. -revision = '9392b9f9a805' -down_revision = '5ae0ecefb01f' +revision = "9392b9f9a805" +down_revision = "5ae0ecefb01f" from alembic import op from sqlalchemy_utils import ArrowType @@ -15,10 +15,17 @@ import sqlalchemy as sa def upgrade(): - op.add_column('pending_certs', sa.Column('last_updated', ArrowType, server_default=sa.text('now()'), onupdate=sa.text('now()'), - nullable=False)) + op.add_column( + "pending_certs", + sa.Column( + "last_updated", + ArrowType, + server_default=sa.text("now()"), + onupdate=sa.text("now()"), + nullable=False, + ), + ) def downgrade(): - op.drop_column('pending_certs', 'last_updated') - + op.drop_column("pending_certs", "last_updated") diff --git a/lemur/migrations/versions/984178255c83_.py b/lemur/migrations/versions/984178255c83_.py index 40d2ce31..88cab183 100644 --- a/lemur/migrations/versions/984178255c83_.py +++ b/lemur/migrations/versions/984178255c83_.py @@ -7,18 +7,20 @@ Create Date: 2018-10-11 20:49:12.704563 """ # revision identifiers, used by Alembic. -revision = '984178255c83' -down_revision = 'f2383bf08fbc' +revision = "984178255c83" +down_revision = "f2383bf08fbc" from alembic import op import sqlalchemy as sa def upgrade(): - op.add_column('pending_certs', sa.Column('resolved', sa.Boolean(), nullable=True)) - op.add_column('pending_certs', sa.Column('resolved_cert_id', sa.Integer(), nullable=True)) + op.add_column("pending_certs", sa.Column("resolved", sa.Boolean(), nullable=True)) + op.add_column( + "pending_certs", sa.Column("resolved_cert_id", sa.Integer(), nullable=True) + ) def downgrade(): - op.drop_column('pending_certs', 'resolved_cert_id') - op.drop_column('pending_certs', 'resolved') + op.drop_column("pending_certs", "resolved_cert_id") + op.drop_column("pending_certs", "resolved") diff --git a/lemur/migrations/versions/9f79024fe67b_.py b/lemur/migrations/versions/9f79024fe67b_.py index ad22d5f3..cb7db296 100644 --- a/lemur/migrations/versions/9f79024fe67b_.py +++ b/lemur/migrations/versions/9f79024fe67b_.py @@ -7,16 +7,26 @@ Create Date: 2019-01-03 15:36:59.181911 """ # revision identifiers, used by Alembic. -revision = '9f79024fe67b' -down_revision = 'ee827d1e1974' +revision = "9f79024fe67b" +down_revision = "ee827d1e1974" from alembic import op import sqlalchemy as sa def upgrade(): - op.sync_enum_values('public', 'log_type', ['create_cert', 'key_view', 'revoke_cert', 'update_cert'], ['create_cert', 'delete_cert', 'key_view', 'revoke_cert', 'update_cert']) + op.sync_enum_values( + "public", + "log_type", + ["create_cert", "key_view", "revoke_cert", "update_cert"], + ["create_cert", "delete_cert", "key_view", "revoke_cert", "update_cert"], + ) def downgrade(): - op.sync_enum_values('public', 'log_type', ['create_cert', 'delete_cert', 'key_view', 'revoke_cert', 'update_cert'], ['create_cert', 'key_view', 'revoke_cert', 'update_cert']) + op.sync_enum_values( + "public", + "log_type", + ["create_cert", "delete_cert", "key_view", "revoke_cert", "update_cert"], + ["create_cert", "key_view", "revoke_cert", "update_cert"], + ) diff --git a/lemur/migrations/versions/a02a678ddc25_.py b/lemur/migrations/versions/a02a678ddc25_.py index 603bc06a..f8fa09bb 100644 --- a/lemur/migrations/versions/a02a678ddc25_.py +++ b/lemur/migrations/versions/a02a678ddc25_.py @@ -10,8 +10,8 @@ Create Date: 2017-07-12 11:45:49.257927 """ # revision identifiers, used by Alembic. -revision = 'a02a678ddc25' -down_revision = '8ae67285ff14' +revision = "a02a678ddc25" +down_revision = "8ae67285ff14" from alembic import op import sqlalchemy as sa @@ -20,25 +20,30 @@ from sqlalchemy.sql import text def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.create_table('rotation_policies', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(), nullable=True), - sa.Column('days', sa.Integer(), nullable=True), - sa.PrimaryKeyConstraint('id') + op.create_table( + "rotation_policies", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("days", sa.Integer(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.add_column( + "certificates", sa.Column("rotation_policy_id", sa.Integer(), nullable=True) + ) + op.create_foreign_key( + None, "certificates", "rotation_policies", ["rotation_policy_id"], ["id"] ) - op.add_column('certificates', sa.Column('rotation_policy_id', sa.Integer(), nullable=True)) - op.create_foreign_key(None, 'certificates', 'rotation_policies', ['rotation_policy_id'], ['id']) conn = op.get_bind() - stmt = text('insert into rotation_policies (days, name) values (:days, :name)') - stmt = stmt.bindparams(days=30, name='default') + stmt = text("insert into rotation_policies (days, name) values (:days, :name)") + stmt = stmt.bindparams(days=30, name="default") conn.execute(stmt) - stmt = text('select id from rotation_policies where name=:name') - stmt = stmt.bindparams(name='default') + stmt = text("select id from rotation_policies where name=:name") + stmt = stmt.bindparams(name="default") rotation_policy_id = conn.execute(stmt).fetchone()[0] - stmt = text('update certificates set rotation_policy_id=:rotation_policy_id') + stmt = text("update certificates set rotation_policy_id=:rotation_policy_id") stmt = stmt.bindparams(rotation_policy_id=rotation_policy_id) conn.execute(stmt) # ### end Alembic commands ### @@ -46,9 +51,17 @@ def upgrade(): def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.drop_constraint(None, 'certificates', type_='foreignkey') - op.drop_column('certificates', 'rotation_policy_id') - op.drop_index('certificate_replacement_associations_ix', table_name='certificate_replacement_associations') - op.create_index('certificate_replacement_associations_ix', 'certificate_replacement_associations', ['replaced_certificate_id', 'certificate_id'], unique=True) - op.drop_table('rotation_policies') + op.drop_constraint(None, "certificates", type_="foreignkey") + op.drop_column("certificates", "rotation_policy_id") + op.drop_index( + "certificate_replacement_associations_ix", + table_name="certificate_replacement_associations", + ) + op.create_index( + "certificate_replacement_associations_ix", + "certificate_replacement_associations", + ["replaced_certificate_id", "certificate_id"], + unique=True, + ) + op.drop_table("rotation_policies") # ### end Alembic commands ### diff --git a/lemur/migrations/versions/ac483cfeb230_.py b/lemur/migrations/versions/ac483cfeb230_.py index d28a2599..d1e2361d 100644 --- a/lemur/migrations/versions/ac483cfeb230_.py +++ b/lemur/migrations/versions/ac483cfeb230_.py @@ -7,8 +7,8 @@ Create Date: 2017-10-11 10:16:39.682591 """ # revision identifiers, used by Alembic. -revision = 'ac483cfeb230' -down_revision = 'b29e2c4bf8c9' +revision = "ac483cfeb230" +down_revision = "b29e2c4bf8c9" from alembic import op import sqlalchemy as sa @@ -16,12 +16,18 @@ from sqlalchemy.dialects import postgresql def upgrade(): - op.alter_column('certificates', 'name', - existing_type=sa.VARCHAR(length=128), - type_=sa.String(length=256)) + op.alter_column( + "certificates", + "name", + existing_type=sa.VARCHAR(length=128), + type_=sa.String(length=256), + ) def downgrade(): - op.alter_column('certificates', 'name', - existing_type=sa.VARCHAR(length=256), - type_=sa.String(length=128)) + op.alter_column( + "certificates", + "name", + existing_type=sa.VARCHAR(length=256), + type_=sa.String(length=128), + ) diff --git a/lemur/migrations/versions/b29e2c4bf8c9_.py b/lemur/migrations/versions/b29e2c4bf8c9_.py index 19835e09..6f9dc526 100644 --- a/lemur/migrations/versions/b29e2c4bf8c9_.py +++ b/lemur/migrations/versions/b29e2c4bf8c9_.py @@ -7,8 +7,8 @@ Create Date: 2017-09-26 10:50:35.740367 """ # revision identifiers, used by Alembic. -revision = 'b29e2c4bf8c9' -down_revision = '1ae8e3104db8' +revision = "b29e2c4bf8c9" +down_revision = "1ae8e3104db8" from alembic import op import sqlalchemy as sa @@ -16,13 +16,25 @@ import sqlalchemy as sa def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('certificates', sa.Column('external_id', sa.String(128), nullable=True)) - op.sync_enum_values('public', 'log_type', ['create_cert', 'key_view', 'update_cert'], ['create_cert', 'key_view', 'revoke_cert', 'update_cert']) + op.add_column( + "certificates", sa.Column("external_id", sa.String(128), nullable=True) + ) + op.sync_enum_values( + "public", + "log_type", + ["create_cert", "key_view", "update_cert"], + ["create_cert", "key_view", "revoke_cert", "update_cert"], + ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.sync_enum_values('public', 'log_type', ['create_cert', 'key_view', 'revoke_cert', 'update_cert'], ['create_cert', 'key_view', 'update_cert']) - op.drop_column('certificates', 'external_id') + op.sync_enum_values( + "public", + "log_type", + ["create_cert", "key_view", "revoke_cert", "update_cert"], + ["create_cert", "key_view", "update_cert"], + ) + op.drop_column("certificates", "external_id") # ### end Alembic commands ### diff --git a/lemur/migrations/versions/c05a8998b371_.py b/lemur/migrations/versions/c05a8998b371_.py index cf600043..a5c9abff 100644 --- a/lemur/migrations/versions/c05a8998b371_.py +++ b/lemur/migrations/versions/c05a8998b371_.py @@ -7,25 +7,27 @@ Create Date: 2017-11-10 14:51:28.975927 """ # revision identifiers, used by Alembic. -revision = 'c05a8998b371' -down_revision = 'ac483cfeb230' +revision = "c05a8998b371" +down_revision = "ac483cfeb230" from alembic import op import sqlalchemy as sa import sqlalchemy_utils + def upgrade(): - op.create_table('api_keys', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('name', sa.String(length=128), nullable=True), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.Column('ttl', sa.BigInteger(), nullable=False), - sa.Column('issued_at', sa.BigInteger(), nullable=False), - sa.Column('revoked', sa.Boolean(), nullable=False), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "api_keys", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("name", sa.String(length=128), nullable=True), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("ttl", sa.BigInteger(), nullable=False), + sa.Column("issued_at", sa.BigInteger(), nullable=False), + sa.Column("revoked", sa.Boolean(), nullable=False), + sa.ForeignKeyConstraint(["user_id"], ["users.id"]), + sa.PrimaryKeyConstraint("id"), ) def downgrade(): - op.drop_table('api_keys') + op.drop_table("api_keys") diff --git a/lemur/migrations/versions/c87cb989af04_.py b/lemur/migrations/versions/c87cb989af04_.py index 4959e727..69f53bf4 100644 --- a/lemur/migrations/versions/c87cb989af04_.py +++ b/lemur/migrations/versions/c87cb989af04_.py @@ -5,15 +5,15 @@ Create Date: 2018-10-11 09:44:57.099854 """ -revision = 'c87cb989af04' -down_revision = '9392b9f9a805' +revision = "c87cb989af04" +down_revision = "9392b9f9a805" from alembic import op def upgrade(): - op.create_index(op.f('ix_domains_name'), 'domains', ['name'], unique=False) + op.create_index(op.f("ix_domains_name"), "domains", ["name"], unique=False) def downgrade(): - op.drop_index(op.f('ix_domains_name'), table_name='domains') + op.drop_index(op.f("ix_domains_name"), table_name="domains") diff --git a/lemur/migrations/versions/ce547319f7be_.py b/lemur/migrations/versions/ce547319f7be_.py index 41ef1fa8..d139c6fb 100644 --- a/lemur/migrations/versions/ce547319f7be_.py +++ b/lemur/migrations/versions/ce547319f7be_.py @@ -7,8 +7,8 @@ Create Date: 2018-02-23 11:00:02.150561 """ # revision identifiers, used by Alembic. -revision = 'ce547319f7be' -down_revision = '5bc47fa7cac4' +revision = "ce547319f7be" +down_revision = "5bc47fa7cac4" import sqlalchemy as sa @@ -24,12 +24,12 @@ TABLE = "certificate_notification_associations" def upgrade(): print("Adding id column") op.add_column( - TABLE, - sa.Column('id', sa.Integer, primary_key=True, autoincrement=True) + TABLE, sa.Column("id", sa.Integer, primary_key=True, autoincrement=True) ) db.session.commit() db.session.flush() + def downgrade(): op.drop_column(TABLE, "id") db.session.commit() diff --git a/lemur/migrations/versions/e3691fc396e9_.py b/lemur/migrations/versions/e3691fc396e9_.py index 1c5c2f15..0007b804 100644 --- a/lemur/migrations/versions/e3691fc396e9_.py +++ b/lemur/migrations/versions/e3691fc396e9_.py @@ -7,29 +7,36 @@ Create Date: 2016-11-28 13:15:46.995219 """ # revision identifiers, used by Alembic. -revision = 'e3691fc396e9' -down_revision = '932525b82f1a' +revision = "e3691fc396e9" +down_revision = "932525b82f1a" from alembic import op import sqlalchemy as sa import sqlalchemy_utils + def upgrade(): ### commands auto generated by Alembic - please adjust! ### - op.create_table('logs', - sa.Column('id', sa.Integer(), nullable=False), - sa.Column('certificate_id', sa.Integer(), nullable=True), - sa.Column('log_type', sa.Enum('key_view', name='log_type'), nullable=False), - sa.Column('logged_at', sqlalchemy_utils.types.arrow.ArrowType(), server_default=sa.text('now()'), nullable=False), - sa.Column('user_id', sa.Integer(), nullable=False), - sa.ForeignKeyConstraint(['certificate_id'], ['certificates.id'], ), - sa.ForeignKeyConstraint(['user_id'], ['users.id'], ), - sa.PrimaryKeyConstraint('id') + op.create_table( + "logs", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("certificate_id", sa.Integer(), nullable=True), + sa.Column("log_type", sa.Enum("key_view", name="log_type"), nullable=False), + sa.Column( + "logged_at", + sqlalchemy_utils.types.arrow.ArrowType(), + server_default=sa.text("now()"), + nullable=False, + ), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(["certificate_id"], ["certificates.id"]), + sa.ForeignKeyConstraint(["user_id"], ["users.id"]), + sa.PrimaryKeyConstraint("id"), ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### - op.drop_table('logs') + op.drop_table("logs") ### end Alembic commands ### diff --git a/lemur/migrations/versions/ee827d1e1974_.py b/lemur/migrations/versions/ee827d1e1974_.py index 62ac6222..56696fe3 100644 --- a/lemur/migrations/versions/ee827d1e1974_.py +++ b/lemur/migrations/versions/ee827d1e1974_.py @@ -7,25 +7,44 @@ Create Date: 2018-11-05 09:49:40.226368 """ # revision identifiers, used by Alembic. -revision = 'ee827d1e1974' -down_revision = '7ead443ba911' +revision = "ee827d1e1974" +down_revision = "7ead443ba911" from alembic import op from sqlalchemy.exc import ProgrammingError + def upgrade(): connection = op.get_bind() connection.execute("CREATE EXTENSION IF NOT EXISTS pg_trgm") - op.create_index('ix_certificates_cn', 'certificates', ['cn'], unique=False, postgresql_ops={'cn': 'gin_trgm_ops'}, - postgresql_using='gin') - op.create_index('ix_certificates_name', 'certificates', ['name'], unique=False, - postgresql_ops={'name': 'gin_trgm_ops'}, postgresql_using='gin') - op.create_index('ix_domains_name_gin', 'domains', ['name'], unique=False, postgresql_ops={'name': 'gin_trgm_ops'}, - postgresql_using='gin') + op.create_index( + "ix_certificates_cn", + "certificates", + ["cn"], + unique=False, + postgresql_ops={"cn": "gin_trgm_ops"}, + postgresql_using="gin", + ) + op.create_index( + "ix_certificates_name", + "certificates", + ["name"], + unique=False, + postgresql_ops={"name": "gin_trgm_ops"}, + postgresql_using="gin", + ) + op.create_index( + "ix_domains_name_gin", + "domains", + ["name"], + unique=False, + postgresql_ops={"name": "gin_trgm_ops"}, + postgresql_using="gin", + ) def downgrade(): - op.drop_index('ix_domains_name', table_name='domains') - op.drop_index('ix_certificates_name', table_name='certificates') - op.drop_index('ix_certificates_cn', table_name='certificates') + op.drop_index("ix_domains_name", table_name="domains") + op.drop_index("ix_certificates_name", table_name="certificates") + op.drop_index("ix_certificates_cn", table_name="certificates") diff --git a/lemur/migrations/versions/f2383bf08fbc_.py b/lemur/migrations/versions/f2383bf08fbc_.py index 1fa36960..a54aa5d2 100644 --- a/lemur/migrations/versions/f2383bf08fbc_.py +++ b/lemur/migrations/versions/f2383bf08fbc_.py @@ -7,17 +7,22 @@ Create Date: 2018-10-11 11:23:31.195471 """ -revision = 'f2383bf08fbc' -down_revision = 'c87cb989af04' +revision = "f2383bf08fbc" +down_revision = "c87cb989af04" import sqlalchemy as sa from alembic import op def upgrade(): - op.create_index('ix_certificates_id_desc', 'certificates', [sa.text('id DESC')], unique=True, - postgresql_using='btree') + op.create_index( + "ix_certificates_id_desc", + "certificates", + [sa.text("id DESC")], + unique=True, + postgresql_using="btree", + ) def downgrade(): - op.drop_index('ix_certificates_id_desc', table_name='certificates') + op.drop_index("ix_certificates_id_desc", table_name="certificates") diff --git a/lemur/models.py b/lemur/models.py index 69f82360..163d156f 100644 --- a/lemur/models.py +++ b/lemur/models.py @@ -12,121 +12,201 @@ from sqlalchemy import Column, Integer, ForeignKey, Index, UniqueConstraint from lemur.database import db -certificate_associations = db.Table('certificate_associations', - Column('domain_id', Integer, ForeignKey('domains.id')), - Column('certificate_id', Integer, ForeignKey('certificates.id')) - ) +certificate_associations = db.Table( + "certificate_associations", + Column("domain_id", Integer, ForeignKey("domains.id")), + Column("certificate_id", Integer, ForeignKey("certificates.id")), +) -Index('certificate_associations_ix', certificate_associations.c.domain_id, certificate_associations.c.certificate_id) +Index( + "certificate_associations_ix", + certificate_associations.c.domain_id, + certificate_associations.c.certificate_id, +) -certificate_destination_associations = db.Table('certificate_destination_associations', - Column('destination_id', Integer, - ForeignKey('destinations.id', ondelete='cascade')), - Column('certificate_id', Integer, - ForeignKey('certificates.id', ondelete='cascade')) - ) +certificate_destination_associations = db.Table( + "certificate_destination_associations", + Column( + "destination_id", Integer, ForeignKey("destinations.id", ondelete="cascade") + ), + Column( + "certificate_id", Integer, ForeignKey("certificates.id", ondelete="cascade") + ), +) -Index('certificate_destination_associations_ix', certificate_destination_associations.c.destination_id, certificate_destination_associations.c.certificate_id) +Index( + "certificate_destination_associations_ix", + certificate_destination_associations.c.destination_id, + certificate_destination_associations.c.certificate_id, +) -certificate_source_associations = db.Table('certificate_source_associations', - Column('source_id', Integer, - ForeignKey('sources.id', ondelete='cascade')), - Column('certificate_id', Integer, - ForeignKey('certificates.id', ondelete='cascade')) - ) +certificate_source_associations = db.Table( + "certificate_source_associations", + Column("source_id", Integer, ForeignKey("sources.id", ondelete="cascade")), + Column( + "certificate_id", Integer, ForeignKey("certificates.id", ondelete="cascade") + ), +) -Index('certificate_source_associations_ix', certificate_source_associations.c.source_id, certificate_source_associations.c.certificate_id) +Index( + "certificate_source_associations_ix", + certificate_source_associations.c.source_id, + certificate_source_associations.c.certificate_id, +) -certificate_notification_associations = db.Table('certificate_notification_associations', - Column('notification_id', Integer, - ForeignKey('notifications.id', ondelete='cascade')), - Column('certificate_id', Integer, - ForeignKey('certificates.id', ondelete='cascade')), - Column('id', Integer, primary_key=True, autoincrement=True), - UniqueConstraint('notification_id', 'certificate_id', name='uq_dest_not_ids') - ) +certificate_notification_associations = db.Table( + "certificate_notification_associations", + Column( + "notification_id", Integer, ForeignKey("notifications.id", ondelete="cascade") + ), + Column( + "certificate_id", Integer, ForeignKey("certificates.id", ondelete="cascade") + ), + Column("id", Integer, primary_key=True, autoincrement=True), + UniqueConstraint("notification_id", "certificate_id", name="uq_dest_not_ids"), +) -Index('certificate_notification_associations_ix', certificate_notification_associations.c.notification_id, certificate_notification_associations.c.certificate_id) +Index( + "certificate_notification_associations_ix", + certificate_notification_associations.c.notification_id, + certificate_notification_associations.c.certificate_id, +) -certificate_replacement_associations = db.Table('certificate_replacement_associations', - Column('replaced_certificate_id', Integer, - ForeignKey('certificates.id', ondelete='cascade')), - Column('certificate_id', Integer, - ForeignKey('certificates.id', ondelete='cascade')) - ) +certificate_replacement_associations = db.Table( + "certificate_replacement_associations", + Column( + "replaced_certificate_id", + Integer, + ForeignKey("certificates.id", ondelete="cascade"), + ), + Column( + "certificate_id", Integer, ForeignKey("certificates.id", ondelete="cascade") + ), +) -Index('certificate_replacement_associations_ix', certificate_replacement_associations.c.replaced_certificate_id, certificate_replacement_associations.c.certificate_id, unique=True) +Index( + "certificate_replacement_associations_ix", + certificate_replacement_associations.c.replaced_certificate_id, + certificate_replacement_associations.c.certificate_id, + unique=True, +) -roles_authorities = db.Table('roles_authorities', - Column('authority_id', Integer, ForeignKey('authorities.id')), - Column('role_id', Integer, ForeignKey('roles.id')) - ) +roles_authorities = db.Table( + "roles_authorities", + Column("authority_id", Integer, ForeignKey("authorities.id")), + Column("role_id", Integer, ForeignKey("roles.id")), +) -Index('roles_authorities_ix', roles_authorities.c.authority_id, roles_authorities.c.role_id) +Index( + "roles_authorities_ix", + roles_authorities.c.authority_id, + roles_authorities.c.role_id, +) -roles_certificates = db.Table('roles_certificates', - Column('certificate_id', Integer, ForeignKey('certificates.id')), - Column('role_id', Integer, ForeignKey('roles.id')) - ) +roles_certificates = db.Table( + "roles_certificates", + Column("certificate_id", Integer, ForeignKey("certificates.id")), + Column("role_id", Integer, ForeignKey("roles.id")), +) -Index('roles_certificates_ix', roles_certificates.c.certificate_id, roles_certificates.c.role_id) +Index( + "roles_certificates_ix", + roles_certificates.c.certificate_id, + roles_certificates.c.role_id, +) -roles_users = db.Table('roles_users', - Column('user_id', Integer, ForeignKey('users.id')), - Column('role_id', Integer, ForeignKey('roles.id')) - ) +roles_users = db.Table( + "roles_users", + Column("user_id", Integer, ForeignKey("users.id")), + Column("role_id", Integer, ForeignKey("roles.id")), +) -Index('roles_users_ix', roles_users.c.user_id, roles_users.c.role_id) +Index("roles_users_ix", roles_users.c.user_id, roles_users.c.role_id) -policies_ciphers = db.Table('policies_ciphers', - Column('cipher_id', Integer, ForeignKey('ciphers.id')), - Column('policy_id', Integer, ForeignKey('policy.id'))) +policies_ciphers = db.Table( + "policies_ciphers", + Column("cipher_id", Integer, ForeignKey("ciphers.id")), + Column("policy_id", Integer, ForeignKey("policy.id")), +) -Index('policies_ciphers_ix', policies_ciphers.c.cipher_id, policies_ciphers.c.policy_id) +Index("policies_ciphers_ix", policies_ciphers.c.cipher_id, policies_ciphers.c.policy_id) -pending_cert_destination_associations = db.Table('pending_cert_destination_associations', - Column('destination_id', Integer, - ForeignKey('destinations.id', ondelete='cascade')), - Column('pending_cert_id', Integer, - ForeignKey('pending_certs.id', ondelete='cascade')) - ) +pending_cert_destination_associations = db.Table( + "pending_cert_destination_associations", + Column( + "destination_id", Integer, ForeignKey("destinations.id", ondelete="cascade") + ), + Column( + "pending_cert_id", Integer, ForeignKey("pending_certs.id", ondelete="cascade") + ), +) -Index('pending_cert_destination_associations_ix', pending_cert_destination_associations.c.destination_id, pending_cert_destination_associations.c.pending_cert_id) +Index( + "pending_cert_destination_associations_ix", + pending_cert_destination_associations.c.destination_id, + pending_cert_destination_associations.c.pending_cert_id, +) -pending_cert_notification_associations = db.Table('pending_cert_notification_associations', - Column('notification_id', Integer, - ForeignKey('notifications.id', ondelete='cascade')), - Column('pending_cert_id', Integer, - ForeignKey('pending_certs.id', ondelete='cascade')) - ) +pending_cert_notification_associations = db.Table( + "pending_cert_notification_associations", + Column( + "notification_id", Integer, ForeignKey("notifications.id", ondelete="cascade") + ), + Column( + "pending_cert_id", Integer, ForeignKey("pending_certs.id", ondelete="cascade") + ), +) -Index('pending_cert_notification_associations_ix', pending_cert_notification_associations.c.notification_id, pending_cert_notification_associations.c.pending_cert_id) +Index( + "pending_cert_notification_associations_ix", + pending_cert_notification_associations.c.notification_id, + pending_cert_notification_associations.c.pending_cert_id, +) -pending_cert_source_associations = db.Table('pending_cert_source_associations', - Column('source_id', Integer, - ForeignKey('sources.id', ondelete='cascade')), - Column('pending_cert_id', Integer, - ForeignKey('pending_certs.id', ondelete='cascade')) - ) +pending_cert_source_associations = db.Table( + "pending_cert_source_associations", + Column("source_id", Integer, ForeignKey("sources.id", ondelete="cascade")), + Column( + "pending_cert_id", Integer, ForeignKey("pending_certs.id", ondelete="cascade") + ), +) -Index('pending_cert_source_associations_ix', pending_cert_source_associations.c.source_id, pending_cert_source_associations.c.pending_cert_id) +Index( + "pending_cert_source_associations_ix", + pending_cert_source_associations.c.source_id, + pending_cert_source_associations.c.pending_cert_id, +) -pending_cert_replacement_associations = db.Table('pending_cert_replacement_associations', - Column('replaced_certificate_id', Integer, - ForeignKey('certificates.id', ondelete='cascade')), - Column('pending_cert_id', Integer, - ForeignKey('pending_certs.id', ondelete='cascade')) - ) +pending_cert_replacement_associations = db.Table( + "pending_cert_replacement_associations", + Column( + "replaced_certificate_id", + Integer, + ForeignKey("certificates.id", ondelete="cascade"), + ), + Column( + "pending_cert_id", Integer, ForeignKey("pending_certs.id", ondelete="cascade") + ), +) -Index('pending_cert_replacement_associations_ix', pending_cert_replacement_associations.c.replaced_certificate_id, pending_cert_replacement_associations.c.pending_cert_id) +Index( + "pending_cert_replacement_associations_ix", + pending_cert_replacement_associations.c.replaced_certificate_id, + pending_cert_replacement_associations.c.pending_cert_id, +) -pending_cert_role_associations = db.Table('pending_cert_role_associations', - Column('pending_cert_id', Integer, ForeignKey('pending_certs.id')), - Column('role_id', Integer, ForeignKey('roles.id')) - ) +pending_cert_role_associations = db.Table( + "pending_cert_role_associations", + Column("pending_cert_id", Integer, ForeignKey("pending_certs.id")), + Column("role_id", Integer, ForeignKey("roles.id")), +) -Index('pending_cert_role_associations_ix', pending_cert_role_associations.c.pending_cert_id, pending_cert_role_associations.c.role_id) +Index( + "pending_cert_role_associations_ix", + pending_cert_role_associations.c.pending_cert_id, + pending_cert_role_associations.c.role_id, +) diff --git a/lemur/notifications/cli.py b/lemur/notifications/cli.py index e3bf431e..a2848117 100644 --- a/lemur/notifications/cli.py +++ b/lemur/notifications/cli.py @@ -14,7 +14,14 @@ from lemur.notifications.messaging import send_expiration_notifications manager = Manager(usage="Handles notification related tasks.") -@manager.option('-e', '--exclude', dest='exclude', action='append', default=[], help='Common name matching of certificates that should be excluded from notification') +@manager.option( + "-e", + "--exclude", + dest="exclude", + action="append", + default=[], + help="Common name matching of certificates that should be excluded from notification", +) def expirations(exclude): """ Runs Lemur's notification engine, that looks for expired certificates and sends @@ -33,12 +40,13 @@ def expirations(exclude): success, failed = send_expiration_notifications(exclude) print( "Finished notifying subscribers about expiring certificates! Sent: {success} Failed: {failed}".format( - success=success, - failed=failed + success=success, failed=failed ) ) status = SUCCESS_METRIC_STATUS except Exception as e: sentry.captureException() - metrics.send('expiration_notification_job', 'counter', 1, metric_tags={'status': status}) + metrics.send( + "expiration_notification_job", "counter", 1, metric_tags={"status": status} + ) diff --git a/lemur/notifications/messaging.py b/lemur/notifications/messaging.py index cd88ebc8..919b73db 100644 --- a/lemur/notifications/messaging.py +++ b/lemur/notifications/messaging.py @@ -36,15 +36,17 @@ def get_certificates(exclude=None): now = arrow.utcnow() max = now + timedelta(days=90) - q = database.db.session.query(Certificate) \ - .filter(Certificate.not_after <= max) \ - .filter(Certificate.notify == True) \ - .filter(Certificate.expired == False) # noqa + q = ( + database.db.session.query(Certificate) + .filter(Certificate.not_after <= max) + .filter(Certificate.notify == True) + .filter(Certificate.expired == False) + ) # noqa exclude_conditions = [] if exclude: for e in exclude: - exclude_conditions.append(~Certificate.name.ilike('%{}%'.format(e))) + exclude_conditions.append(~Certificate.name.ilike("%{}%".format(e))) q = q.filter(and_(*exclude_conditions)) @@ -101,7 +103,12 @@ def send_notification(event_type, data, targets, notification): except Exception as e: sentry.captureException() - metrics.send('notification', 'counter', 1, metric_tags={'status': status, 'event_type': event_type}) + metrics.send( + "notification", + "counter", + 1, + metric_tags={"status": status, "event_type": event_type}, + ) if status == SUCCESS_METRIC_STATUS: return True @@ -115,7 +122,7 @@ def send_expiration_notifications(exclude): success = failure = 0 # security team gets all - security_email = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL') + security_email = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL") security_data = [] for owner, notification_group in get_eligible_certificates(exclude=exclude).items(): @@ -127,26 +134,43 @@ def send_expiration_notifications(exclude): for data in certificates: n, certificate = data - cert_data = certificate_notification_output_schema.dump(certificate).data + cert_data = certificate_notification_output_schema.dump( + certificate + ).data notification_data.append(cert_data) security_data.append(cert_data) - notification_recipient = get_plugin_option('recipients', notification.options) + notification_recipient = get_plugin_option( + "recipients", notification.options + ) if notification_recipient: notification_recipient = notification_recipient.split(",") - if send_notification('expiration', notification_data, [owner], notification): + if send_notification( + "expiration", notification_data, [owner], notification + ): success += 1 else: failure += 1 - if notification_recipient and owner != notification_recipient and security_email != notification_recipient: - if send_notification('expiration', notification_data, notification_recipient, notification): + if ( + notification_recipient + and owner != notification_recipient + and security_email != notification_recipient + ): + if send_notification( + "expiration", + notification_data, + notification_recipient, + notification, + ): success += 1 else: failure += 1 - if send_notification('expiration', security_data, security_email, notification): + if send_notification( + "expiration", security_data, security_email, notification + ): success += 1 else: failure += 1 @@ -165,24 +189,35 @@ def send_rotation_notification(certificate, notification_plugin=None): """ status = FAILURE_METRIC_STATUS if not notification_plugin: - notification_plugin = plugins.get(current_app.config.get('LEMUR_DEFAULT_NOTIFICATION_PLUGIN')) + notification_plugin = plugins.get( + current_app.config.get("LEMUR_DEFAULT_NOTIFICATION_PLUGIN") + ) data = certificate_notification_output_schema.dump(certificate).data try: - notification_plugin.send('rotation', data, [data['owner']]) + notification_plugin.send("rotation", data, [data["owner"]]) status = SUCCESS_METRIC_STATUS except Exception as e: - current_app.logger.error('Unable to send notification to {}.'.format(data['owner']), exc_info=True) + current_app.logger.error( + "Unable to send notification to {}.".format(data["owner"]), exc_info=True + ) sentry.captureException() - metrics.send('notification', 'counter', 1, metric_tags={'status': status, 'event_type': 'rotation'}) + metrics.send( + "notification", + "counter", + 1, + metric_tags={"status": status, "event_type": "rotation"}, + ) if status == SUCCESS_METRIC_STATUS: return True -def send_pending_failure_notification(pending_cert, notify_owner=True, notify_security=True, notification_plugin=None): +def send_pending_failure_notification( + pending_cert, notify_owner=True, notify_security=True, notification_plugin=None +): """ Sends a report to certificate owners when their pending certificate failed to be created. @@ -194,32 +229,47 @@ def send_pending_failure_notification(pending_cert, notify_owner=True, notify_se if not notification_plugin: notification_plugin = plugins.get( - current_app.config.get('LEMUR_DEFAULT_NOTIFICATION_PLUGIN', 'email-notification') + current_app.config.get( + "LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification" + ) ) data = pending_certificate_output_schema.dump(pending_cert).data - data["security_email"] = current_app.config.get('LEMUR_SECURITY_TEAM_EMAIL') + data["security_email"] = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL") if notify_owner: try: - notification_plugin.send('failed', data, [data['owner']], pending_cert) + notification_plugin.send("failed", data, [data["owner"]], pending_cert) status = SUCCESS_METRIC_STATUS except Exception as e: - current_app.logger.error('Unable to send pending failure notification to {}.'.format(data['owner']), - exc_info=True) + current_app.logger.error( + "Unable to send pending failure notification to {}.".format( + data["owner"] + ), + exc_info=True, + ) sentry.captureException() if notify_security: try: - notification_plugin.send('failed', data, data["security_email"], pending_cert) + notification_plugin.send( + "failed", data, data["security_email"], pending_cert + ) status = SUCCESS_METRIC_STATUS except Exception as e: - current_app.logger.error('Unable to send pending failure notification to ' - '{}.'.format(data['security_email']), - exc_info=True) + current_app.logger.error( + "Unable to send pending failure notification to " + "{}.".format(data["security_email"]), + exc_info=True, + ) sentry.captureException() - metrics.send('notification', 'counter', 1, metric_tags={'status': status, 'event_type': 'rotation'}) + metrics.send( + "notification", + "counter", + 1, + metric_tags={"status": status, "event_type": "rotation"}, + ) if status == SUCCESS_METRIC_STATUS: return True @@ -242,20 +292,22 @@ def needs_notification(certificate): if not notification.active or not notification.options: return - interval = get_plugin_option('interval', notification.options) - unit = get_plugin_option('unit', notification.options) + interval = get_plugin_option("interval", notification.options) + unit = get_plugin_option("unit", notification.options) - if unit == 'weeks': + if unit == "weeks": interval *= 7 - elif unit == 'months': + elif unit == "months": interval *= 30 - elif unit == 'days': # it's nice to be explicit about the base unit + elif unit == "days": # it's nice to be explicit about the base unit pass else: - raise Exception("Invalid base unit for expiration interval: {0}".format(unit)) + raise Exception( + "Invalid base unit for expiration interval: {0}".format(unit) + ) if days == interval: notifications.append(notification) diff --git a/lemur/notifications/models.py b/lemur/notifications/models.py index 87646b4c..7053b8d7 100644 --- a/lemur/notifications/models.py +++ b/lemur/notifications/models.py @@ -11,12 +11,14 @@ from sqlalchemy_utils import JSONType from lemur.database import db from lemur.plugins.base import plugins -from lemur.models import certificate_notification_associations, \ - pending_cert_notification_associations +from lemur.models import ( + certificate_notification_associations, + pending_cert_notification_associations, +) class Notification(db.Model): - __tablename__ = 'notifications' + __tablename__ = "notifications" id = Column(Integer, primary_key=True) label = Column(String(128), unique=True) description = Column(Text()) @@ -28,14 +30,14 @@ class Notification(db.Model): secondary=certificate_notification_associations, passive_deletes=True, backref="notification", - cascade='all,delete' + cascade="all,delete", ) pending_certificates = relationship( "PendingCertificate", secondary=pending_cert_notification_associations, passive_deletes=True, backref="notification", - cascade='all,delete' + cascade="all,delete", ) @property diff --git a/lemur/notifications/schemas.py b/lemur/notifications/schemas.py index b5d4e1e6..a3ff4c99 100644 --- a/lemur/notifications/schemas.py +++ b/lemur/notifications/schemas.py @@ -7,7 +7,11 @@ """ from marshmallow import fields, post_dump from lemur.common.schema import LemurInputSchema, LemurOutputSchema -from lemur.schemas import PluginInputSchema, PluginOutputSchema, AssociatedCertificateSchema +from lemur.schemas import ( + PluginInputSchema, + PluginOutputSchema, + AssociatedCertificateSchema, +) class NotificationInputSchema(LemurInputSchema): @@ -30,7 +34,7 @@ class NotificationOutputSchema(LemurOutputSchema): @post_dump def fill_object(self, data): if data: - data['plugin']['pluginOptions'] = data['options'] + data["plugin"]["pluginOptions"] = data["options"] return data diff --git a/lemur/notifications/service.py b/lemur/notifications/service.py index 957757bd..ac624d1c 100644 --- a/lemur/notifications/service.py +++ b/lemur/notifications/service.py @@ -31,26 +31,28 @@ def create_default_expiration_notifications(name, recipients, intervals=None): options = [ { - 'name': 'unit', - 'type': 'select', - 'required': True, - 'validation': '', - 'available': ['days', 'weeks', 'months'], - 'helpMessage': 'Interval unit', - 'value': 'days', + "name": "unit", + "type": "select", + "required": True, + "validation": "", + "available": ["days", "weeks", "months"], + "helpMessage": "Interval unit", + "value": "days", }, { - 'name': 'recipients', - 'type': 'str', - 'required': True, - 'validation': '^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$', - 'helpMessage': 'Comma delimited list of email addresses', - 'value': ','.join(recipients) + "name": "recipients", + "type": "str", + "required": True, + "validation": "^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$", + "helpMessage": "Comma delimited list of email addresses", + "value": ",".join(recipients), }, ] if intervals is None: - intervals = current_app.config.get("LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", [30, 15, 2]) + intervals = current_app.config.get( + "LEMUR_DEFAULT_EXPIRATION_NOTIFICATION_INTERVALS", [30, 15, 2] + ) notifications = [] for i in intervals: @@ -58,21 +60,25 @@ def create_default_expiration_notifications(name, recipients, intervals=None): if not n: inter = [ { - 'name': 'interval', - 'type': 'int', - 'required': True, - 'validation': '^\d+$', - 'helpMessage': 'Number of days to be alert before expiration.', - 'value': i, + "name": "interval", + "type": "int", + "required": True, + "validation": "^\d+$", + "helpMessage": "Number of days to be alert before expiration.", + "value": i, } ] inter.extend(options) n = create( label="{name}_{interval}_DAY".format(name=name, interval=i), - plugin_name=current_app.config.get("LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification"), + plugin_name=current_app.config.get( + "LEMUR_DEFAULT_NOTIFICATION_PLUGIN", "email-notification" + ), options=list(inter), - description="Default {interval} day expiration notification".format(interval=i), - certificates=[] + description="Default {interval} day expiration notification".format( + interval=i + ), + certificates=[], ) notifications.append(n) @@ -91,7 +97,9 @@ def create(label, plugin_name, options, description, certificates): :rtype : Notification :return: """ - notification = Notification(label=label, options=options, plugin_name=plugin_name, description=description) + notification = Notification( + label=label, options=options, plugin_name=plugin_name, description=description + ) notification.certificates = certificates return database.create(notification) @@ -147,7 +155,7 @@ def get_by_label(label): :param label: :return: """ - return database.get(Notification, label, field='label') + return database.get(Notification, label, field="label") def get_all(): @@ -161,18 +169,20 @@ def get_all(): def render(args): - filt = args.pop('filter') - certificate_id = args.pop('certificate_id', None) + filt = args.pop("filter") + certificate_id = args.pop("certificate_id", None) if certificate_id: - query = database.session_query(Notification).join(Certificate, Notification.certificate) + query = database.session_query(Notification).join( + Certificate, Notification.certificate + ) query = query.filter(Certificate.id == certificate_id) else: query = database.session_query(Notification) if filt: - terms = filt.split(';') - if terms[0] == 'active': + terms = filt.split(";") + if terms[0] == "active": query = query.filter(Notification.active == truthiness(terms[1])) else: query = database.filter(query, Notification, terms) diff --git a/lemur/notifications/views.py b/lemur/notifications/views.py index 4a2d82a8..cdabb4d4 100644 --- a/lemur/notifications/views.py +++ b/lemur/notifications/views.py @@ -9,7 +9,11 @@ from flask import Blueprint from flask_restful import Api, reqparse, inputs from lemur.notifications import service -from lemur.notifications.schemas import notification_input_schema, notification_output_schema, notifications_output_schema +from lemur.notifications.schemas import ( + notification_input_schema, + notification_output_schema, + notifications_output_schema, +) from lemur.auth.service import AuthenticatedResource from lemur.common.utils import paginated_parser @@ -17,12 +21,13 @@ from lemur.common.utils import paginated_parser from lemur.common.schema import validate_schema -mod = Blueprint('notifications', __name__) +mod = Blueprint("notifications", __name__) api = Api(mod) class NotificationsList(AuthenticatedResource): """ Defines the 'notifications' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(NotificationsList, self).__init__() @@ -103,7 +108,7 @@ class NotificationsList(AuthenticatedResource): :statuscode 200: no error """ parser = paginated_parser.copy() - parser.add_argument('active', type=inputs.boolean, location='args') + parser.add_argument("active", type=inputs.boolean, location="args") args = parser.parse_args() return service.render(args) @@ -215,11 +220,11 @@ class NotificationsList(AuthenticatedResource): :statuscode 200: no error """ return service.create( - data['label'], - data['plugin']['slug'], - data['plugin']['plugin_options'], - data['description'], - data['certificates'] + data["label"], + data["plugin"]["slug"], + data["plugin"]["plugin_options"], + data["description"], + data["certificates"], ) @@ -334,20 +339,21 @@ class Notifications(AuthenticatedResource): """ return service.update( notification_id, - data['label'], - data['plugin']['plugin_options'], - data['description'], - data['active'], - data['certificates'] + data["label"], + data["plugin"]["plugin_options"], + data["description"], + data["active"], + data["certificates"], ) def delete(self, notification_id): service.delete(notification_id) - return {'result': True} + return {"result": True} class CertificateNotifications(AuthenticatedResource): """ Defines the 'certificate/', endpoint='notification') -api.add_resource(CertificateNotifications, '/certificates//notifications', - endpoint='certificateNotifications') +api.add_resource(NotificationsList, "/notifications", endpoint="notifications") +api.add_resource( + Notifications, "/notifications/", endpoint="notification" +) +api.add_resource( + CertificateNotifications, + "/certificates//notifications", + endpoint="certificateNotifications", +) diff --git a/lemur/pending_certificates/cli.py b/lemur/pending_certificates/cli.py index 65e2e19a..2ff29f10 100644 --- a/lemur/pending_certificates/cli.py +++ b/lemur/pending_certificates/cli.py @@ -19,7 +19,9 @@ from lemur.plugins.base import plugins manager = Manager(usage="Handles pending certificate related tasks.") -@manager.option('-i', dest='ids', action='append', help='IDs of pending certificates to fetch') +@manager.option( + "-i", dest="ids", action="append", help="IDs of pending certificates to fetch" +) def fetch(ids): """ Attempt to get full certificate for each pending certificate listed. @@ -39,25 +41,18 @@ def fetch(ids): if real_cert: # If a real certificate was returned from issuer, then create it in Lemur and mark # the pending certificate as resolved - final_cert = pending_certificate_service.create_certificate(cert, real_cert, cert.user) - pending_certificate_service.update( - cert.id, - resolved_cert_id=final_cert.id - ) - pending_certificate_service.update( - cert.id, - resolved=True + final_cert = pending_certificate_service.create_certificate( + cert, real_cert, cert.user ) + pending_certificate_service.update(cert.id, resolved_cert_id=final_cert.id) + pending_certificate_service.update(cert.id, resolved=True) # add metrics to metrics extension new += 1 else: pending_certificate_service.increment_attempt(cert) failed += 1 print( - "[+] Certificates: New: {new} Failed: {failed}".format( - new=new, - failed=failed, - ) + "[+] Certificates: New: {new} Failed: {failed}".format(new=new, failed=failed) ) @@ -69,9 +64,7 @@ def fetch_all_acme(): certificates. """ - log_data = { - "function": "{}.{}".format(__name__, sys._getframe().f_code.co_name) - } + log_data = {"function": "{}.{}".format(__name__, sys._getframe().f_code.co_name)} pending_certs = pending_certificate_service.get_unresolved_pending_certs() new = 0 failed = 0 @@ -81,7 +74,7 @@ def fetch_all_acme(): # We only care about certs using the acme-issuer plugin for cert in pending_certs: cert_authority = get_authority(cert.authority_id) - if cert_authority.plugin_name == 'acme-issuer': + if cert_authority.plugin_name == "acme-issuer": acme_certs.append(cert) else: wrong_issuer += 1 @@ -97,15 +90,13 @@ def fetch_all_acme(): if real_cert: # If a real certificate was returned from issuer, then create it in Lemur and mark # the pending certificate as resolved - final_cert = pending_certificate_service.create_certificate(pending_cert, real_cert, pending_cert.user) - pending_certificate_service.update( - pending_cert.id, - resolved_cert_id=final_cert.id + final_cert = pending_certificate_service.create_certificate( + pending_cert, real_cert, pending_cert.user ) pending_certificate_service.update( - pending_cert.id, - resolved=True + pending_cert.id, resolved_cert_id=final_cert.id ) + pending_certificate_service.update(pending_cert.id, resolved=True) # add metrics to metrics extension new += 1 else: @@ -118,17 +109,15 @@ def fetch_all_acme(): if pending_cert.number_attempts > 4: error_log["message"] = "Marking pending certificate as resolved" - send_pending_failure_notification(pending_cert, notify_owner=pending_cert.notify) - # Mark "resolved" as True - pending_certificate_service.update( - cert.id, - resolved=True + send_pending_failure_notification( + pending_cert, notify_owner=pending_cert.notify ) + # Mark "resolved" as True + pending_certificate_service.update(cert.id, resolved=True) else: pending_certificate_service.increment_attempt(pending_cert) pending_certificate_service.update( - cert.get("pending_cert").id, - status=str(cert.get("last_error")) + cert.get("pending_cert").id, status=str(cert.get("last_error")) ) current_app.logger.error(error_log) log_data["message"] = "Complete" @@ -138,8 +127,6 @@ def fetch_all_acme(): current_app.logger.debug(log_data) print( "[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format( - new=new, - failed=failed, - wrong_issuer=wrong_issuer + new=new, failed=failed, wrong_issuer=wrong_issuer ) ) diff --git a/lemur/pending_certificates/models.py b/lemur/pending_certificates/models.py index 7dc8e602..fa6be073 100644 --- a/lemur/pending_certificates/models.py +++ b/lemur/pending_certificates/models.py @@ -5,7 +5,16 @@ """ from datetime import datetime as dt -from sqlalchemy import Integer, ForeignKey, String, PassiveDefault, func, Column, Text, Boolean +from sqlalchemy import ( + Integer, + ForeignKey, + String, + PassiveDefault, + func, + Column, + Text, + Boolean, +) from sqlalchemy.orm import relationship from sqlalchemy_utils import JSONType from sqlalchemy_utils.types.arrow import ArrowType @@ -13,20 +22,28 @@ from sqlalchemy_utils.types.arrow import ArrowType from lemur.certificates.models import get_sequence from lemur.common import defaults, utils from lemur.database import db -from lemur.models import pending_cert_source_associations, \ - pending_cert_destination_associations, pending_cert_notification_associations, \ - pending_cert_replacement_associations, pending_cert_role_associations +from lemur.models import ( + pending_cert_source_associations, + pending_cert_destination_associations, + pending_cert_notification_associations, + pending_cert_replacement_associations, + pending_cert_role_associations, +) from lemur.utils import Vault def get_or_increase_name(name, serial): - certificates = PendingCertificate.query.filter(PendingCertificate.name.ilike('{0}%'.format(name))).all() + certificates = PendingCertificate.query.filter( + PendingCertificate.name.ilike("{0}%".format(name)) + ).all() if not certificates: return name - serial_name = '{0}-{1}'.format(name, hex(int(serial))[2:].upper()) - certificates = PendingCertificate.query.filter(PendingCertificate.name.ilike('{0}%'.format(serial_name))).all() + serial_name = "{0}-{1}".format(name, hex(int(serial))[2:].upper()) + certificates = PendingCertificate.query.filter( + PendingCertificate.name.ilike("{0}%".format(serial_name)) + ).all() if not certificates: return serial_name @@ -38,11 +55,11 @@ def get_or_increase_name(name, serial): if end: ends.append(end) - return '{0}-{1}'.format(root, max(ends) + 1) + return "{0}-{1}".format(root, max(ends) + 1) class PendingCertificate(db.Model): - __tablename__ = 'pending_certs' + __tablename__ = "pending_certs" id = Column(Integer, primary_key=True) external_id = Column(String(128)) owner = Column(String(128), nullable=False) @@ -60,69 +77,101 @@ class PendingCertificate(db.Model): private_key = Column(Vault, nullable=True) date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False) - dns_provider_id = Column(Integer, ForeignKey('dns_providers.id', ondelete="CASCADE")) + dns_provider_id = Column( + Integer, ForeignKey("dns_providers.id", ondelete="CASCADE") + ) status = Column(Text(), nullable=True) - last_updated = Column(ArrowType, PassiveDefault(func.now()), onupdate=func.now(), nullable=False) + last_updated = Column( + ArrowType, PassiveDefault(func.now()), onupdate=func.now(), nullable=False + ) rotation = Column(Boolean, default=False) - user_id = Column(Integer, ForeignKey('users.id')) - authority_id = Column(Integer, ForeignKey('authorities.id', ondelete="CASCADE")) - root_authority_id = Column(Integer, ForeignKey('authorities.id', ondelete="CASCADE")) - rotation_policy_id = Column(Integer, ForeignKey('rotation_policies.id')) + user_id = Column(Integer, ForeignKey("users.id")) + authority_id = Column(Integer, ForeignKey("authorities.id", ondelete="CASCADE")) + root_authority_id = Column( + Integer, ForeignKey("authorities.id", ondelete="CASCADE") + ) + rotation_policy_id = Column(Integer, ForeignKey("rotation_policies.id")) - notifications = relationship('Notification', secondary=pending_cert_notification_associations, - backref='pending_cert', passive_deletes=True) - destinations = relationship('Destination', secondary=pending_cert_destination_associations, backref='pending_cert', - passive_deletes=True) - sources = relationship('Source', secondary=pending_cert_source_associations, backref='pending_cert', - passive_deletes=True) - roles = relationship('Role', secondary=pending_cert_role_associations, backref='pending_cert', passive_deletes=True) - replaces = relationship('Certificate', - secondary=pending_cert_replacement_associations, - backref='pending_cert', - passive_deletes=True) + notifications = relationship( + "Notification", + secondary=pending_cert_notification_associations, + backref="pending_cert", + passive_deletes=True, + ) + destinations = relationship( + "Destination", + secondary=pending_cert_destination_associations, + backref="pending_cert", + passive_deletes=True, + ) + sources = relationship( + "Source", + secondary=pending_cert_source_associations, + backref="pending_cert", + passive_deletes=True, + ) + roles = relationship( + "Role", + secondary=pending_cert_role_associations, + backref="pending_cert", + passive_deletes=True, + ) + replaces = relationship( + "Certificate", + secondary=pending_cert_replacement_associations, + backref="pending_cert", + passive_deletes=True, + ) options = Column(JSONType) rotation_policy = relationship("RotationPolicy") - sensitive_fields = ('private_key',) + sensitive_fields = ("private_key",) def __init__(self, **kwargs): - self.csr = kwargs.get('csr') - self.private_key = kwargs.get('private_key', "") + self.csr = kwargs.get("csr") + self.private_key = kwargs.get("private_key", "") if self.private_key: # If the request does not send private key, the key exists but the value is None self.private_key = self.private_key.strip() - self.external_id = kwargs.get('external_id') + self.external_id = kwargs.get("external_id") # when destinations are appended they require a valid name. - if kwargs.get('name'): - self.name = get_or_increase_name(defaults.text_to_slug(kwargs['name']), 0) + if kwargs.get("name"): + self.name = get_or_increase_name(defaults.text_to_slug(kwargs["name"]), 0) self.rename = False else: # TODO: Fix auto-generated name, it should be renamed on creation self.name = get_or_increase_name( - defaults.certificate_name(kwargs['common_name'], kwargs['authority'].name, - dt.now(), dt.now(), False), self.external_id) + defaults.certificate_name( + kwargs["common_name"], + kwargs["authority"].name, + dt.now(), + dt.now(), + False, + ), + self.external_id, + ) self.rename = True self.cn = defaults.common_name(utils.parse_csr(self.csr)) - self.owner = kwargs['owner'] + self.owner = kwargs["owner"] self.number_attempts = 0 - if kwargs.get('chain'): - self.chain = kwargs['chain'].strip() + if kwargs.get("chain"): + self.chain = kwargs["chain"].strip() - self.notify = kwargs.get('notify', True) - self.destinations = kwargs.get('destinations', []) - self.notifications = kwargs.get('notifications', []) - self.description = kwargs.get('description') - self.roles = list(set(kwargs.get('roles', []))) - self.replaces = kwargs.get('replaces', []) - self.rotation = kwargs.get('rotation') - self.rotation_policy = kwargs.get('rotation_policy') + self.notify = kwargs.get("notify", True) + self.destinations = kwargs.get("destinations", []) + self.notifications = kwargs.get("notifications", []) + self.description = kwargs.get("description") + self.roles = list(set(kwargs.get("roles", []))) + self.replaces = kwargs.get("replaces", []) + self.rotation = kwargs.get("rotation") + self.rotation_policy = kwargs.get("rotation_policy") try: - self.dns_provider_id = kwargs.get('dns_provider').id + self.dns_provider_id = kwargs.get("dns_provider").id except (AttributeError, KeyError, TypeError, Exception): pass diff --git a/lemur/pending_certificates/schemas.py b/lemur/pending_certificates/schemas.py index 3dd70b16..68f22b4a 100644 --- a/lemur/pending_certificates/schemas.py +++ b/lemur/pending_certificates/schemas.py @@ -17,14 +17,14 @@ from lemur.schemas import ( AssociatedNotificationSchema, AssociatedRoleSchema, EndpointNestedOutputSchema, - ExtensionSchema + ExtensionSchema, ) from lemur.users.schemas import UserNestedOutputSchema class PendingCertificateSchema(LemurInputSchema): owner = fields.Email(required=True) - description = fields.String(missing='', allow_none=True) + description = fields.String(missing="", allow_none=True) class PendingCertificateOutputSchema(LemurOutputSchema): @@ -46,10 +46,10 @@ class PendingCertificateOutputSchema(LemurOutputSchema): # Note aliasing is the first step in deprecating these fields. notify = fields.Boolean() - active = fields.Boolean(attribute='notify') + active = fields.Boolean(attribute="notify") cn = fields.String() - common_name = fields.String(attribute='cn') + common_name = fields.String(attribute="cn") owner = fields.Email() @@ -66,7 +66,9 @@ class PendingCertificateOutputSchema(LemurOutputSchema): authority = fields.Nested(AuthorityNestedOutputSchema) roles = fields.Nested(RoleNestedOutputSchema, many=True) endpoints = fields.Nested(EndpointNestedOutputSchema, many=True, missing=[]) - replaced_by = fields.Nested(CertificateNestedOutputSchema, many=True, attribute='replaced') + replaced_by = fields.Nested( + CertificateNestedOutputSchema, many=True, attribute="replaced" + ) rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema) @@ -89,10 +91,15 @@ class PendingCertificateEditInputSchema(PendingCertificateSchema): :param data: :return: """ - if data['owner']: - notification_name = "DEFAULT_{0}".format(data['owner'].split('@')[0].upper()) - data['notifications'] += notification_service.create_default_expiration_notifications(notification_name, - [data['owner']]) + if data["owner"]: + notification_name = "DEFAULT_{0}".format( + data["owner"].split("@")[0].upper() + ) + data[ + "notifications" + ] += notification_service.create_default_expiration_notifications( + notification_name, [data["owner"]] + ) return data @@ -108,17 +115,21 @@ class PendingCertificateUploadInputSchema(LemurInputSchema): @validates_schema def validate_cert_chain(self, data): cert = None - if data.get('body'): + if data.get("body"): try: - cert = utils.parse_certificate(data['body']) + cert = utils.parse_certificate(data["body"]) except ValueError: - raise ValidationError("Public certificate presented is not valid.", field_names=['body']) + raise ValidationError( + "Public certificate presented is not valid.", field_names=["body"] + ) - if data.get('chain'): + if data.get("chain"): try: - chain = utils.parse_cert_chain(data['chain']) + chain = utils.parse_cert_chain(data["chain"]) except ValueError: - raise ValidationError("Invalid certificate in certificate chain.", field_names=['chain']) + raise ValidationError( + "Invalid certificate in certificate chain.", field_names=["chain"] + ) # Throws ValidationError validators.verify_cert_chain([cert] + chain) diff --git a/lemur/pending_certificates/service.py b/lemur/pending_certificates/service.py index 287bd42b..935ea689 100644 --- a/lemur/pending_certificates/service.py +++ b/lemur/pending_certificates/service.py @@ -40,17 +40,18 @@ def get_by_external_id(issuer, external_id): """ if isinstance(external_id, int): external_id = str(external_id) - return PendingCertificate.query \ - .filter(PendingCertificate.authority_id == issuer.id) \ - .filter(PendingCertificate.external_id == external_id) \ + return ( + PendingCertificate.query.filter(PendingCertificate.authority_id == issuer.id) + .filter(PendingCertificate.external_id == external_id) .one_or_none() + ) def get_by_name(pending_cert_name): """ Retrieve pending certificate by name """ - return database.get(PendingCertificate, pending_cert_name, field='name') + return database.get(PendingCertificate, pending_cert_name, field="name") def delete(pending_certificate): @@ -66,7 +67,9 @@ def get_unresolved_pending_certs(): Retrieve a list of unresolved pending certs given a list of ids Filters out non-existing pending certs """ - query = database.session_query(PendingCertificate).filter(PendingCertificate.resolved.is_(False)) + query = database.session_query(PendingCertificate).filter( + PendingCertificate.resolved.is_(False) + ) return database.find_all(query, PendingCertificate, {}).all() @@ -76,7 +79,7 @@ def get_pending_certs(pending_ids): Filters out non-existing pending certs """ pending_certs = [] - if 'all' in pending_ids: + if "all" in pending_ids: query = database.session_query(PendingCertificate) return database.find_all(query, PendingCertificate, {}).all() else: @@ -96,23 +99,25 @@ def create_certificate(pending_certificate, certificate, user): user: User that called this function, used as 'creator' of the certificate if it does not have an owner """ - certificate['owner'] = pending_certificate.owner + certificate["owner"] = pending_certificate.owner data, errors = CertificateUploadInputSchema().load(certificate) if errors: - raise Exception("Unable to create certificate: {reasons}".format(reasons=errors)) + raise Exception( + "Unable to create certificate: {reasons}".format(reasons=errors) + ) data.update(vars(pending_certificate)) # Copy relationships, vars doesn't copy this without explicit fields - data['notifications'] = list(pending_certificate.notifications) - data['destinations'] = list(pending_certificate.destinations) - data['sources'] = list(pending_certificate.sources) - data['roles'] = list(pending_certificate.roles) - data['replaces'] = list(pending_certificate.replaces) - data['rotation_policy'] = pending_certificate.rotation_policy + data["notifications"] = list(pending_certificate.notifications) + data["destinations"] = list(pending_certificate.destinations) + data["sources"] = list(pending_certificate.sources) + data["roles"] = list(pending_certificate.roles) + data["replaces"] = list(pending_certificate.replaces) + data["rotation_policy"] = pending_certificate.rotation_policy # Replace external id and chain with the one fetched from source - data['external_id'] = certificate['external_id'] - data['chain'] = certificate['chain'] + data["external_id"] = certificate["external_id"] + data["chain"] = certificate["chain"] creator = user_service.get_by_email(pending_certificate.owner) if not creator: # Owner of the pending certificate is not the creator, so use the current user who called @@ -121,8 +126,8 @@ def create_certificate(pending_certificate, certificate, user): if pending_certificate.rename: # If generating name from certificate, remove the one from pending certificate - del data['name'] - data['creator'] = creator + del data["name"] + data["creator"] = creator cert = certificate_service.import_certificate(**data) database.update(cert) @@ -159,75 +164,91 @@ def cancel(pending_certificate, **kwargs): """ plugin = plugins.get(pending_certificate.authority.plugin_name) plugin.cancel_ordered_certificate(pending_certificate, **kwargs) - pending_certificate.status = 'Cancelled' + pending_certificate.status = "Cancelled" database.update(pending_certificate) return pending_certificate def render(args): query = database.session_query(PendingCertificate) - time_range = args.pop('time_range') - destination_id = args.pop('destination_id') - notification_id = args.pop('notification_id', None) - show = args.pop('show') + time_range = args.pop("time_range") + destination_id = args.pop("destination_id") + notification_id = args.pop("notification_id", None) + show = args.pop("show") # owner = args.pop('owner') # creator = args.pop('creator') # TODO we should enabling filtering by owner - filt = args.pop('filter') + filt = args.pop("filter") if filt: - terms = filt.split(';') + terms = filt.split(";") - if 'issuer' in terms: + if "issuer" in terms: # we can't rely on issuer being correct in the cert directly so we combine queries - sub_query = database.session_query(Authority.id) \ - .filter(Authority.name.ilike('%{0}%'.format(terms[1]))) \ + sub_query = ( + database.session_query(Authority.id) + .filter(Authority.name.ilike("%{0}%".format(terms[1]))) .subquery() + ) query = query.filter( or_( - PendingCertificate.issuer.ilike('%{0}%'.format(terms[1])), - PendingCertificate.authority_id.in_(sub_query) + PendingCertificate.issuer.ilike("%{0}%".format(terms[1])), + PendingCertificate.authority_id.in_(sub_query), ) ) - elif 'destination' in terms: - query = query.filter(PendingCertificate.destinations.any(Destination.id == terms[1])) - elif 'notify' in filt: + elif "destination" in terms: + query = query.filter( + PendingCertificate.destinations.any(Destination.id == terms[1]) + ) + elif "notify" in filt: query = query.filter(PendingCertificate.notify == truthiness(terms[1])) - elif 'active' in filt: + elif "active" in filt: query = query.filter(PendingCertificate.active == truthiness(terms[1])) - elif 'cn' in terms: + elif "cn" in terms: query = query.filter( or_( - PendingCertificate.cn.ilike('%{0}%'.format(terms[1])), - PendingCertificate.domains.any(Domain.name.ilike('%{0}%'.format(terms[1]))) + PendingCertificate.cn.ilike("%{0}%".format(terms[1])), + PendingCertificate.domains.any( + Domain.name.ilike("%{0}%".format(terms[1])) + ), ) ) - elif 'id' in terms: + elif "id" in terms: query = query.filter(PendingCertificate.id == cast(terms[1], Integer)) else: query = database.filter(query, PendingCertificate, terms) if show: - sub_query = database.session_query(Role.name).filter(Role.user_id == args['user'].id).subquery() + sub_query = ( + database.session_query(Role.name) + .filter(Role.user_id == args["user"].id) + .subquery() + ) query = query.filter( or_( - PendingCertificate.user_id == args['user'].id, - PendingCertificate.owner.in_(sub_query) + PendingCertificate.user_id == args["user"].id, + PendingCertificate.owner.in_(sub_query), ) ) if destination_id: - query = query.filter(PendingCertificate.destinations.any(Destination.id == destination_id)) + query = query.filter( + PendingCertificate.destinations.any(Destination.id == destination_id) + ) if notification_id: - query = query.filter(PendingCertificate.notifications.any(Notification.id == notification_id)) + query = query.filter( + PendingCertificate.notifications.any(Notification.id == notification_id) + ) if time_range: - to = arrow.now().replace(weeks=+time_range).format('YYYY-MM-DD') - now = arrow.now().format('YYYY-MM-DD') - query = query.filter(PendingCertificate.not_after <= to).filter(PendingCertificate.not_after >= now) + to = arrow.now().replace(weeks=+time_range).format("YYYY-MM-DD") + now = arrow.now().format("YYYY-MM-DD") + query = query.filter(PendingCertificate.not_after <= to).filter( + PendingCertificate.not_after >= now + ) # Only show unresolved certificates in the UI query = query.filter(PendingCertificate.resolved.is_(False)) @@ -242,30 +263,26 @@ def upload(pending_certificate_id, **kwargs): """ pending_cert = get(pending_certificate_id) partial_cert = kwargs - uploaded_chain = partial_cert['chain'] + uploaded_chain = partial_cert["chain"] authority = authorities_service.get(pending_cert.authority.id) # Construct the chain for cert validation if uploaded_chain: - chain = uploaded_chain + '\n' + authority.authority_certificate.body + chain = uploaded_chain + "\n" + authority.authority_certificate.body else: chain = authority.authority_certificate.body parsed_chain = parse_cert_chain(chain) # Check that the certificate is actually signed by the CA to avoid incorrect cert pasting - validators.verify_cert_chain([parse_certificate(partial_cert['body'])] + parsed_chain) + validators.verify_cert_chain( + [parse_certificate(partial_cert["body"])] + parsed_chain + ) final_cert = create_certificate(pending_cert, partial_cert, pending_cert.user) - pending_cert_final_result = update( - pending_cert.id, - resolved_cert_id=final_cert.id - ) - update( - pending_cert.id, - resolved=True - ) + pending_cert_final_result = update(pending_cert.id, resolved_cert_id=final_cert.id) + update(pending_cert.id, resolved=True) return pending_cert_final_result diff --git a/lemur/pending_certificates/views.py b/lemur/pending_certificates/views.py index 935f00c1..4651aed7 100644 --- a/lemur/pending_certificates/views.py +++ b/lemur/pending_certificates/views.py @@ -23,7 +23,7 @@ from lemur.pending_certificates.schemas import ( pending_certificate_upload_input_schema, ) -mod = Blueprint('pending_certificates', __name__) +mod = Blueprint("pending_certificates", __name__) api = Api(mod) @@ -110,15 +110,17 @@ class PendingCertificatesList(AuthenticatedResource): """ parser = paginated_parser.copy() - parser.add_argument('timeRange', type=int, dest='time_range', location='args') - parser.add_argument('owner', type=inputs.boolean, location='args') - parser.add_argument('id', type=str, location='args') - parser.add_argument('active', type=inputs.boolean, location='args') - parser.add_argument('destinationId', type=int, dest="destination_id", location='args') - parser.add_argument('creator', type=str, location='args') - parser.add_argument('show', type=str, location='args') + parser.add_argument("timeRange", type=int, dest="time_range", location="args") + parser.add_argument("owner", type=inputs.boolean, location="args") + parser.add_argument("id", type=str, location="args") + parser.add_argument("active", type=inputs.boolean, location="args") + parser.add_argument( + "destinationId", type=int, dest="destination_id", location="args" + ) + parser.add_argument("creator", type=str, location="args") + parser.add_argument("show", type=str, location="args") args = parser.parse_args() - args['user'] = g.user + args["user"] = g.user return service.render(args) @@ -206,7 +208,9 @@ class PendingCertificates(AuthenticatedResource): """ return service.get(pending_certificate_id) - @validate_schema(pending_certificate_edit_input_schema, pending_certificate_output_schema) + @validate_schema( + pending_certificate_edit_input_schema, pending_certificate_output_schema + ) def put(self, pending_certificate_id, data=None): """ .. http:put:: /pending_certificates/1 @@ -297,19 +301,27 @@ class PendingCertificates(AuthenticatedResource): # allow creators if g.current_user != pending_cert.user: owner_role = role_service.get_by_name(pending_cert.owner) - permission = CertificatePermission(owner_role, [x.name for x in pending_cert.roles]) + permission = CertificatePermission( + owner_role, [x.name for x in pending_cert.roles] + ) if not permission.can(): - return dict(message='You are not authorized to update this certificate'), 403 + return ( + dict(message="You are not authorized to update this certificate"), + 403, + ) - for destination in data['destinations']: + for destination in data["destinations"]: if destination.plugin.requires_key: if not pending_cert.private_key: - return dict( - message='Unable to add destination: {0}. Certificate does not have required private key.'.format( - destination.label - ) - ), 400 + return ( + dict( + message="Unable to add destination: {0}. Certificate does not have required private key.".format( + destination.label + ) + ), + 400, + ) pending_cert = service.update(pending_certificate_id, **data) return pending_cert @@ -354,18 +366,28 @@ class PendingCertificates(AuthenticatedResource): # allow creators if g.current_user != pending_cert.user: owner_role = role_service.get_by_name(pending_cert.owner) - permission = CertificatePermission(owner_role, [x.name for x in pending_cert.roles]) + permission = CertificatePermission( + owner_role, [x.name for x in pending_cert.roles] + ) if not permission.can(): - return dict(message='You are not authorized to update this certificate'), 403 + return ( + dict(message="You are not authorized to update this certificate"), + 403, + ) if service.cancel(pending_cert, **data): service.delete(pending_cert) - return('', 204) + return ("", 204) else: # service.cancel raises exception if there was an issue, but this will ensure something # is relayed to user in case of something unexpected (unsuccessful update somehow). - return dict(message="Unexpected error occurred while trying to cancel this certificate"), 500 + return ( + dict( + message="Unexpected error occurred while trying to cancel this certificate" + ), + 500, + ) class PendingCertificatePrivateKey(AuthenticatedResource): @@ -412,11 +434,11 @@ class PendingCertificatePrivateKey(AuthenticatedResource): permission = CertificatePermission(owner_role, [x.name for x in cert.roles]) if not permission.can(): - return dict(message='You are not authorized to view this key'), 403 + return dict(message="You are not authorized to view this key"), 403 response = make_response(jsonify(key=cert.private_key), 200) - response.headers['cache-control'] = 'private, max-age=0, no-cache, no-store' - response.headers['pragma'] = 'no-cache' + response.headers["cache-control"] = "private, max-age=0, no-cache, no-store" + response.headers["pragma"] = "no-cache" return response @@ -427,7 +449,9 @@ class PendingCertificatesUpload(AuthenticatedResource): self.reqparse = reqparse.RequestParser() super(PendingCertificatesUpload, self).__init__() - @validate_schema(pending_certificate_upload_input_schema, pending_certificate_output_schema) + @validate_schema( + pending_certificate_upload_input_schema, pending_certificate_output_schema + ) def post(self, pending_certificate_id, data=None): """ .. http:post:: /pending_certificates/1/upload @@ -514,7 +538,21 @@ class PendingCertificatesUpload(AuthenticatedResource): return service.upload(pending_certificate_id, **data) -api.add_resource(PendingCertificatesList, '/pending_certificates', endpoint='pending_certificates') -api.add_resource(PendingCertificates, '/pending_certificates/', endpoint='pending_certificate') -api.add_resource(PendingCertificatesUpload, '/pending_certificates//upload', endpoint='pendingCertificateUpload') -api.add_resource(PendingCertificatePrivateKey, '/pending_certificates//key', endpoint='privateKeyPendingCertificates') +api.add_resource( + PendingCertificatesList, "/pending_certificates", endpoint="pending_certificates" +) +api.add_resource( + PendingCertificates, + "/pending_certificates/", + endpoint="pending_certificate", +) +api.add_resource( + PendingCertificatesUpload, + "/pending_certificates//upload", + endpoint="pendingCertificateUpload", +) +api.add_resource( + PendingCertificatePrivateKey, + "/pending_certificates//key", + endpoint="privateKeyPendingCertificates", +) diff --git a/lemur/plugins/base/manager.py b/lemur/plugins/base/manager.py index a2306445..117700a6 100644 --- a/lemur/plugins/base/manager.py +++ b/lemur/plugins/base/manager.py @@ -18,7 +18,9 @@ class PluginManager(InstanceManager): return sum(1 for i in self.all()) def all(self, version=1, plugin_type=None): - for plugin in sorted(super(PluginManager, self).all(), key=lambda x: x.get_title()): + for plugin in sorted( + super(PluginManager, self).all(), key=lambda x: x.get_title() + ): if not plugin.type == plugin_type and plugin_type: continue if not plugin.is_enabled(): @@ -36,29 +38,34 @@ class PluginManager(InstanceManager): return plugin current_app.logger.error( "Unable to find slug: {} in self.all version 1: {} or version 2: {}".format( - slug, self.all(version=1), self.all(version=2)) + slug, self.all(version=1), self.all(version=2) + ) ) raise KeyError(slug) def first(self, func_name, *args, **kwargs): - version = kwargs.pop('version', 1) + version = kwargs.pop("version", 1) for plugin in self.all(version=version): try: result = getattr(plugin, func_name)(*args, **kwargs) except Exception as e: - current_app.logger.error('Error processing %s() on %r: %s', func_name, plugin.__class__, e, extra={ - 'func_arg': args, - 'func_kwargs': kwargs, - }, exc_info=True) + current_app.logger.error( + "Error processing %s() on %r: %s", + func_name, + plugin.__class__, + e, + extra={"func_arg": args, "func_kwargs": kwargs}, + exc_info=True, + ) continue if result is not None: return result def register(self, cls): - self.add('%s.%s' % (cls.__module__, cls.__name__)) + self.add("%s.%s" % (cls.__module__, cls.__name__)) return cls def unregister(self, cls): - self.remove('%s.%s' % (cls.__module__, cls.__name__)) + self.remove("%s.%s" % (cls.__module__, cls.__name__)) return cls diff --git a/lemur/plugins/base/v1.py b/lemur/plugins/base/v1.py index fb688c73..664385b3 100644 --- a/lemur/plugins/base/v1.py +++ b/lemur/plugins/base/v1.py @@ -18,7 +18,7 @@ class PluginMount(type): if new_cls.title is None: new_cls.title = new_cls.__name__ if not new_cls.slug: - new_cls.slug = new_cls.title.replace(' ', '-').lower() + new_cls.slug = new_cls.title.replace(" ", "-").lower() return new_cls @@ -36,6 +36,7 @@ class IPlugin(local): As a general rule all inherited methods should allow ``**kwargs`` to ensure ease of future compatibility. """ + # Generic plugin information title = None slug = None @@ -72,7 +73,7 @@ class IPlugin(local): Returns a string representing the configuration keyspace prefix for this plugin. """ if not self.conf_key: - self.conf_key = self.get_conf_title().lower().replace(' ', '_') + self.conf_key = self.get_conf_title().lower().replace(" ", "_") return self.conf_key def get_conf_title(self): @@ -111,8 +112,8 @@ class IPlugin(local): @staticmethod def get_option(name, options): for o in options: - if o.get('name') == name: - return o.get('value', o.get('default')) + if o.get("name") == name: + return o.get("value", o.get("default")) class Plugin(IPlugin): @@ -121,5 +122,6 @@ class Plugin(IPlugin): control when or how the plugin gets instantiated, nor is it guaranteed that it will happen, or happen more than once. """ + __version__ = 1 __metaclass__ = PluginMount diff --git a/lemur/plugins/bases/destination.py b/lemur/plugins/bases/destination.py index fc73ebcb..e00c5090 100644 --- a/lemur/plugins/bases/destination.py +++ b/lemur/plugins/bases/destination.py @@ -10,10 +10,10 @@ from lemur.plugins.base import Plugin, plugins class DestinationPlugin(Plugin): - type = 'destination' + type = "destination" requires_key = True sync_as_source = False - sync_as_source_name = '' + sync_as_source_name = "" def upload(self, name, body, private_key, cert_chain, options, **kwargs): raise NotImplementedError @@ -22,10 +22,10 @@ class DestinationPlugin(Plugin): class ExportDestinationPlugin(DestinationPlugin): default_options = [ { - 'name': 'exportPlugin', - 'type': 'export-plugin', - 'required': True, - 'helpMessage': 'Export plugin to use before sending data to destination.' + "name": "exportPlugin", + "type": "export-plugin", + "required": True, + "helpMessage": "Export plugin to use before sending data to destination.", } ] @@ -34,15 +34,17 @@ class ExportDestinationPlugin(DestinationPlugin): return self.default_options + self.additional_options def export(self, body, private_key, cert_chain, options): - export_plugin = self.get_option('exportPlugin', options) + export_plugin = self.get_option("exportPlugin", options) if export_plugin: - plugin = plugins.get(export_plugin['slug']) - extension, passphrase, data = plugin.export(body, cert_chain, private_key, export_plugin['plugin_options']) + plugin = plugins.get(export_plugin["slug"]) + extension, passphrase, data = plugin.export( + body, cert_chain, private_key, export_plugin["plugin_options"] + ) return [(extension, passphrase, data)] - data = body + '\n' + cert_chain + '\n' + private_key - return [('.pem', '', data)] + data = body + "\n" + cert_chain + "\n" + private_key + return [(".pem", "", data)] def upload(self, name, body, private_key, cert_chain, options, **kwargs): raise NotImplementedError diff --git a/lemur/plugins/bases/export.py b/lemur/plugins/bases/export.py index 1466c1ab..6d078906 100644 --- a/lemur/plugins/bases/export.py +++ b/lemur/plugins/bases/export.py @@ -14,7 +14,8 @@ class ExportPlugin(Plugin): This is the base class from which all supported exporters will inherit from. """ - type = 'export' + + type = "export" requires_key = True def export(self, body, chain, key, options, **kwargs): diff --git a/lemur/plugins/bases/issuer.py b/lemur/plugins/bases/issuer.py index 5eb0964c..f1e6aa0e 100644 --- a/lemur/plugins/bases/issuer.py +++ b/lemur/plugins/bases/issuer.py @@ -14,7 +14,8 @@ class IssuerPlugin(Plugin): This is the base class from which all of the supported issuers will inherit from. """ - type = 'issuer' + + type = "issuer" def create_certificate(self, csr, issuer_options): raise NotImplementedError diff --git a/lemur/plugins/bases/metric.py b/lemur/plugins/bases/metric.py index 259af235..2e4ce69b 100644 --- a/lemur/plugins/bases/metric.py +++ b/lemur/plugins/bases/metric.py @@ -10,7 +10,9 @@ from lemur.plugins.base import Plugin class MetricPlugin(Plugin): - type = 'metric' + type = "metric" - def submit(self, metric_name, metric_type, metric_value, metric_tags=None, options=None): + def submit( + self, metric_name, metric_type, metric_value, metric_tags=None, options=None + ): raise NotImplementedError diff --git a/lemur/plugins/bases/notification.py b/lemur/plugins/bases/notification.py index a7ba4e0d..730f68be 100644 --- a/lemur/plugins/bases/notification.py +++ b/lemur/plugins/bases/notification.py @@ -14,7 +14,8 @@ class NotificationPlugin(Plugin): This is the base class from which all of the supported issuers will inherit from. """ - type = 'notification' + + type = "notification" def send(self, notification_type, message, targets, options, **kwargs): raise NotImplementedError @@ -26,22 +27,23 @@ class ExpirationNotificationPlugin(NotificationPlugin): It contains some default options that are needed for all expiration notification plugins. """ + default_options = [ { - 'name': 'interval', - 'type': 'int', - 'required': True, - 'validation': '^\d+$', - 'helpMessage': 'Number of days to be alert before expiration.', + "name": "interval", + "type": "int", + "required": True, + "validation": "^\d+$", + "helpMessage": "Number of days to be alert before expiration.", }, { - 'name': 'unit', - 'type': 'select', - 'required': True, - 'validation': '', - 'available': ['days', 'weeks', 'months'], - 'helpMessage': 'Interval unit', - } + "name": "unit", + "type": "select", + "required": True, + "validation": "", + "available": ["days", "weeks", "months"], + "helpMessage": "Interval unit", + }, ] @property diff --git a/lemur/plugins/bases/source.py b/lemur/plugins/bases/source.py index ff3492fe..6f521e40 100644 --- a/lemur/plugins/bases/source.py +++ b/lemur/plugins/bases/source.py @@ -10,15 +10,15 @@ from lemur.plugins.base import Plugin class SourcePlugin(Plugin): - type = 'source' + type = "source" default_options = [ { - 'name': 'pollRate', - 'type': 'int', - 'required': False, - 'helpMessage': 'Rate in seconds to poll source for new information.', - 'default': '60', + "name": "pollRate", + "type": "int", + "required": False, + "helpMessage": "Rate in seconds to poll source for new information.", + "default": "60", } ] diff --git a/lemur/plugins/lemur_acme/__init__.py b/lemur/plugins/lemur_acme/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_acme/__init__.py +++ b/lemur/plugins/lemur_acme/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_acme/cloudflare.py b/lemur/plugins/lemur_acme/cloudflare.py index a6308025..a19495f8 100644 --- a/lemur/plugins/lemur_acme/cloudflare.py +++ b/lemur/plugins/lemur_acme/cloudflare.py @@ -5,24 +5,24 @@ from flask import current_app def cf_api_call(): - cf_key = current_app.config.get('ACME_CLOUDFLARE_KEY', '') - cf_email = current_app.config.get('ACME_CLOUDFLARE_EMAIL', '') + cf_key = current_app.config.get("ACME_CLOUDFLARE_KEY", "") + cf_email = current_app.config.get("ACME_CLOUDFLARE_EMAIL", "") return CloudFlare.CloudFlare(email=cf_email, token=cf_key) def find_zone_id(host): - elements = host.split('.') + elements = host.split(".") cf = cf_api_call() n = 1 while n < 5: n = n + 1 - domain = '.'.join(elements[-n:]) + domain = ".".join(elements[-n:]) current_app.logger.debug("Trying to get ID for zone {0}".format(domain)) try: - zone = cf.zones.get(params={'name': domain, 'per_page': 1}) + zone = cf.zones.get(params={"name": domain, "per_page": 1}) except Exception as e: current_app.logger.error("Cloudflare API error: %s" % e) pass @@ -31,10 +31,10 @@ def find_zone_id(host): break if len(zone) == 0: - current_app.logger.error('No zone found') + current_app.logger.error("No zone found") return else: - return zone[0]['id'] + return zone[0]["id"] def wait_for_dns_change(change_id, account_number=None): @@ -42,8 +42,8 @@ def wait_for_dns_change(change_id, account_number=None): zone_id, record_id = change_id while True: r = cf.zones.get(zone_id, record_id) - current_app.logger.debug("Record status: %s" % r['status']) - if r['status'] == 'active': + current_app.logger.debug("Record status: %s" % r["status"]) + if r["status"] == "active": break time.sleep(1) return @@ -55,15 +55,19 @@ def create_txt_record(host, value, account_number): if not zone_id: return - txt_record = {'name': host, 'type': 'TXT', 'content': value} + txt_record = {"name": host, "type": "TXT", "content": value} - current_app.logger.debug("Creating TXT record {0} with value {1}".format(host, value)) + current_app.logger.debug( + "Creating TXT record {0} with value {1}".format(host, value) + ) try: r = cf.zones.dns_records.post(zone_id, data=txt_record) except Exception as e: - current_app.logger.error('/zones.dns_records.post %s: %s' % (txt_record['name'], e)) - return zone_id, r['id'] + current_app.logger.error( + "/zones.dns_records.post %s: %s" % (txt_record["name"], e) + ) + return zone_id, r["id"] def delete_txt_record(change_ids, account_number, host, value): @@ -74,4 +78,4 @@ def delete_txt_record(change_ids, account_number, host, value): try: cf.zones.dns_records.delete(zone_id, record_id) except Exception as e: - current_app.logger.error('/zones.dns_records.post: %s' % e) + current_app.logger.error("/zones.dns_records.post: %s" % e) diff --git a/lemur/plugins/lemur_acme/dyn.py b/lemur/plugins/lemur_acme/dyn.py index db33caf0..00a48eb6 100644 --- a/lemur/plugins/lemur_acme/dyn.py +++ b/lemur/plugins/lemur_acme/dyn.py @@ -5,7 +5,12 @@ import dns.exception import dns.name import dns.query import dns.resolver -from dyn.tm.errors import DynectCreateError, DynectDeleteError, DynectGetError, DynectUpdateError +from dyn.tm.errors import ( + DynectCreateError, + DynectDeleteError, + DynectGetError, + DynectUpdateError, +) from dyn.tm.session import DynectSession from dyn.tm.zones import Node, Zone, get_all_zones from flask import current_app @@ -16,13 +21,13 @@ from lemur.extensions import metrics, sentry def get_dynect_session(): try: dynect_session = DynectSession( - current_app.config.get('ACME_DYN_CUSTOMER_NAME', ''), - current_app.config.get('ACME_DYN_USERNAME', ''), - current_app.config.get('ACME_DYN_PASSWORD', ''), + current_app.config.get("ACME_DYN_CUSTOMER_NAME", ""), + current_app.config.get("ACME_DYN_USERNAME", ""), + current_app.config.get("ACME_DYN_PASSWORD", ""), ) except Exception as e: sentry.captureException() - metrics.send('get_dynect_session_fail', 'counter', 1) + metrics.send("get_dynect_session_fail", "counter", 1) current_app.logger.debug("Unable to establish connection to Dyn", exc_info=True) raise return dynect_session @@ -33,17 +38,17 @@ def _has_dns_propagated(name, token): try: dns_resolver = dns.resolver.Resolver() dns_resolver.nameservers = [get_authoritative_nameserver(name)] - dns_response = dns_resolver.query(name, 'TXT') + dns_response = dns_resolver.query(name, "TXT") for rdata in dns_response: for txt_record in rdata.strings: txt_records.append(txt_record.decode("utf-8")) except dns.exception.DNSException: - metrics.send('has_dns_propagated_fail', 'counter', 1) + metrics.send("has_dns_propagated_fail", "counter", 1) return False for txt_record in txt_records: if txt_record == token: - metrics.send('has_dns_propagated_success', 'counter', 1) + metrics.send("has_dns_propagated_success", "counter", 1) return True return False @@ -56,18 +61,19 @@ def wait_for_dns_change(change_id, account_number=None): status = _has_dns_propagated(fqdn, token) current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) if status: - metrics.send('wait_for_dns_change_success', 'counter', 1) + metrics.send("wait_for_dns_change_success", "counter", 1) break time.sleep(10) if not status: # TODO: Delete associated DNS text record here - metrics.send('wait_for_dns_change_fail', 'counter', 1) - sentry.captureException( - extra={ - "fqdn": str(fqdn), "txt_record": str(token)} + metrics.send("wait_for_dns_change_fail", "counter", 1) + sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)}) + metrics.send( + "wait_for_dns_change_error", + "counter", + 1, + metric_tags={"fqdn": fqdn, "txt_record": token}, ) - metrics.send('wait_for_dns_change_error', 'counter', 1, - metric_tags={'fqdn': fqdn, 'txt_record': token}) return @@ -84,7 +90,7 @@ def get_zone_name(domain): if z.name.count(".") > zone_name.count("."): zone_name = z.name if not zone_name: - metrics.send('dyn_no_zone_name', 'counter', 1) + metrics.send("dyn_no_zone_name", "counter", 1) raise Exception("No Dyn zone found for domain: {}".format(domain)) return zone_name @@ -101,23 +107,28 @@ def get_zones(account_number): def create_txt_record(domain, token, account_number): get_dynect_session() zone_name = get_zone_name(domain) - zone_parts = len(zone_name.split('.')) - node_name = '.'.join(domain.split('.')[:-zone_parts]) + zone_parts = len(zone_name.split(".")) + node_name = ".".join(domain.split(".")[:-zone_parts]) fqdn = "{0}.{1}".format(node_name, zone_name) zone = Zone(zone_name) try: - zone.add_record(node_name, record_type='TXT', txtdata="\"{}\"".format(token), ttl=5) + zone.add_record( + node_name, record_type="TXT", txtdata='"{}"'.format(token), ttl=5 + ) zone.publish() - current_app.logger.debug("TXT record created: {0}, token: {1}".format(fqdn, token)) + current_app.logger.debug( + "TXT record created: {0}, token: {1}".format(fqdn, token) + ) except (DynectCreateError, DynectUpdateError) as e: if "Cannot duplicate existing record data" in e.message: current_app.logger.debug( "Unable to add record. Domain: {}. Token: {}. " - "Record already exists: {}".format(domain, token, e), exc_info=True + "Record already exists: {}".format(domain, token, e), + exc_info=True, ) else: - metrics.send('create_txt_record_error', 'counter', 1) + metrics.send("create_txt_record_error", "counter", 1) sentry.captureException() raise @@ -132,17 +143,17 @@ def delete_txt_record(change_id, account_number, domain, token): return zone_name = get_zone_name(domain) - zone_parts = len(zone_name.split('.')) - node_name = '.'.join(domain.split('.')[:-zone_parts]) + zone_parts = len(zone_name.split(".")) + node_name = ".".join(domain.split(".")[:-zone_parts]) fqdn = "{0}.{1}".format(node_name, zone_name) zone = Zone(zone_name) node = Node(zone_name, fqdn) try: - all_txt_records = node.get_all_records_by_type('TXT') + all_txt_records = node.get_all_records_by_type("TXT") except DynectGetError: - metrics.send('delete_txt_record_geterror', 'counter', 1) + metrics.send("delete_txt_record_geterror", "counter", 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return for txt_record in all_txt_records: @@ -153,22 +164,36 @@ def delete_txt_record(change_id, account_number, domain, token): except DynectDeleteError: sentry.captureException( extra={ - "fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name), - "txt_record": str(txt_record.txtdata)} + "fqdn": str(fqdn), + "zone_name": str(zone_name), + "node_name": str(node_name), + "txt_record": str(txt_record.txtdata), + } + ) + metrics.send( + "delete_txt_record_deleteerror", + "counter", + 1, + metric_tags={"fqdn": fqdn, "txt_record": txt_record.txtdata}, ) - metrics.send('delete_txt_record_deleteerror', 'counter', 1, - metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) try: zone.publish() except DynectUpdateError: sentry.captureException( extra={ - "fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name), - "txt_record": str(txt_record.txtdata)} + "fqdn": str(fqdn), + "zone_name": str(zone_name), + "node_name": str(node_name), + "txt_record": str(txt_record.txtdata), + } + ) + metrics.send( + "delete_txt_record_publish_error", + "counter", + 1, + metric_tags={"fqdn": str(fqdn), "txt_record": str(txt_record.txtdata)}, ) - metrics.send('delete_txt_record_publish_error', 'counter', 1, - metric_tags={'fqdn': str(fqdn), 'txt_record': str(txt_record.txtdata)}) def delete_acme_txt_records(domain): @@ -180,18 +205,21 @@ def delete_acme_txt_records(domain): if not domain.startswith(acme_challenge_string): current_app.logger.debug( "delete_acme_txt_records: Domain {} doesn't start with string {}. " - "Cowardly refusing to delete TXT records".format(domain, acme_challenge_string)) + "Cowardly refusing to delete TXT records".format( + domain, acme_challenge_string + ) + ) return zone_name = get_zone_name(domain) - zone_parts = len(zone_name.split('.')) - node_name = '.'.join(domain.split('.')[:-zone_parts]) + zone_parts = len(zone_name.split(".")) + node_name = ".".join(domain.split(".")[:-zone_parts]) fqdn = "{0}.{1}".format(node_name, zone_name) zone = Zone(zone_name) node = Node(zone_name, fqdn) - all_txt_records = node.get_all_records_by_type('TXT') + all_txt_records = node.get_all_records_by_type("TXT") for txt_record in all_txt_records: current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn)) try: @@ -199,16 +227,23 @@ def delete_acme_txt_records(domain): except DynectDeleteError: sentry.captureException( extra={ - "fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name), - "txt_record": str(txt_record.txtdata)} + "fqdn": str(fqdn), + "zone_name": str(zone_name), + "node_name": str(node_name), + "txt_record": str(txt_record.txtdata), + } + ) + metrics.send( + "delete_txt_record_deleteerror", + "counter", + 1, + metric_tags={"fqdn": fqdn, "txt_record": txt_record.txtdata}, ) - metrics.send('delete_txt_record_deleteerror', 'counter', 1, - metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata}) zone.publish() def get_authoritative_nameserver(domain): - if current_app.config.get('ACME_DYN_GET_AUTHORATATIVE_NAMESERVER'): + if current_app.config.get("ACME_DYN_GET_AUTHORATATIVE_NAMESERVER"): n = dns.name.from_text(domain) depth = 2 @@ -219,7 +254,7 @@ def get_authoritative_nameserver(domain): while not last: s = n.split(depth) - last = s[0].to_unicode() == u'@' + last = s[0].to_unicode() == u"@" sub = s[1] query = dns.message.make_query(sub, dns.rdatatype.NS) @@ -227,11 +262,11 @@ def get_authoritative_nameserver(domain): rcode = response.rcode() if rcode != dns.rcode.NOERROR: - metrics.send('get_authoritative_nameserver_error', 'counter', 1) + metrics.send("get_authoritative_nameserver_error", "counter", 1) if rcode == dns.rcode.NXDOMAIN: - raise Exception('%s does not exist.' % sub) + raise Exception("%s does not exist." % sub) else: - raise Exception('Error %s' % dns.rcode.to_text(rcode)) + raise Exception("Error %s" % dns.rcode.to_text(rcode)) if len(response.authority) > 0: rrset = response.authority[0] diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index d9c41968..c734923a 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -48,7 +48,7 @@ class AcmeHandler(object): try: self.all_dns_providers = dns_provider_service.get_all_dns_providers() except Exception as e: - metrics.send('AcmeHandler_init_error', 'counter', 1) + metrics.send("AcmeHandler_init_error", "counter", 1) sentry.captureException() current_app.logger.error(f"Unable to fetch DNS Providers: {e}") self.all_dns_providers = [] @@ -67,45 +67,60 @@ class AcmeHandler(object): return host.replace("*.", "") def maybe_add_extension(self, host, dns_provider_options): - if dns_provider_options and dns_provider_options.get("acme_challenge_extension"): + if dns_provider_options and dns_provider_options.get( + "acme_challenge_extension" + ): host = host + dns_provider_options.get("acme_challenge_extension") return host - def start_dns_challenge(self, acme_client, account_number, host, dns_provider, order, dns_provider_options): + def start_dns_challenge( + self, + acme_client, + account_number, + host, + dns_provider, + order, + dns_provider_options, + ): current_app.logger.debug("Starting DNS challenge for {0}".format(host)) change_ids = [] host_to_validate = self.maybe_remove_wildcard(host) dns_challenges = self.find_dns_challenge(host_to_validate, order.authorizations) - host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) + host_to_validate = self.maybe_add_extension( + host_to_validate, dns_provider_options + ) if not dns_challenges: sentry.captureException() - metrics.send('start_dns_challenge_error_no_dns_challenges', 'counter', 1) + metrics.send("start_dns_challenge_error_no_dns_challenges", "counter", 1) raise Exception("Unable to determine DNS challenges from authorizations") for dns_challenge in dns_challenges: change_id = dns_provider.create_txt_record( dns_challenge.validation_domain_name(host_to_validate), dns_challenge.validation(acme_client.client.net.key), - account_number + account_number, ) change_ids.append(change_id) return AuthorizationRecord( - host, - order.authorizations, - dns_challenges, - change_ids + host, order.authorizations, dns_challenges, change_ids ) def complete_dns_challenge(self, acme_client, authz_record): - current_app.logger.debug("Finalizing DNS challenge for {0}".format(authz_record.authz[0].body.identifier.value)) + current_app.logger.debug( + "Finalizing DNS challenge for {0}".format( + authz_record.authz[0].body.identifier.value + ) + ) dns_providers = self.dns_providers_for_domain.get(authz_record.host) if not dns_providers: - metrics.send('complete_dns_challenge_error_no_dnsproviders', 'counter', 1) - raise Exception("No DNS providers found for domain: {}".format(authz_record.host)) + metrics.send("complete_dns_challenge_error_no_dnsproviders", "counter", 1) + raise Exception( + "No DNS providers found for domain: {}".format(authz_record.host) + ) for dns_provider in dns_providers: # Grab account number (For Route53) @@ -114,13 +129,17 @@ class AcmeHandler(object): dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) for change_id in authz_record.change_id: try: - dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number) + dns_provider_plugin.wait_for_dns_change( + change_id, account_number=account_number + ) except Exception: - metrics.send('complete_dns_challenge_error', 'counter', 1) + metrics.send("complete_dns_challenge_error", "counter", 1) sentry.captureException() current_app.logger.debug( f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: " - f"{account_number}", exc_info=True) + f"{account_number}", + exc_info=True, + ) raise for dns_challenge in authz_record.dns_challenge: @@ -129,11 +148,11 @@ class AcmeHandler(object): verified = response.simple_verify( dns_challenge.chall, authz_record.host, - acme_client.client.net.key.public_key() + acme_client.client.net.key.public_key(), ) if not verified: - metrics.send('complete_dns_challenge_verification_error', 'counter', 1) + metrics.send("complete_dns_challenge_verification_error", "counter", 1) raise ValueError("Failed verification") time.sleep(5) @@ -152,8 +171,10 @@ class AcmeHandler(object): except (AcmeError, TimeoutError): sentry.captureException(extra={"order_url": str(order.uri)}) - metrics.send('request_certificate_error', 'counter', 1) - current_app.logger.error(f"Unable to resolve Acme order: {order.uri}", exc_info=True) + metrics.send("request_certificate_error", "counter", 1) + current_app.logger.error( + f"Unable to resolve Acme order: {order.uri}", exc_info=True + ) raise except errors.ValidationError: if order.fullchain_pem: @@ -161,12 +182,19 @@ class AcmeHandler(object): else: raise - pem_certificate = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM, - OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM, - orderr.fullchain_pem)).decode() - pem_certificate_chain = orderr.fullchain_pem[len(pem_certificate):].lstrip() + pem_certificate = OpenSSL.crypto.dump_certificate( + OpenSSL.crypto.FILETYPE_PEM, + OpenSSL.crypto.load_certificate( + OpenSSL.crypto.FILETYPE_PEM, orderr.fullchain_pem + ), + ).decode() + pem_certificate_chain = orderr.fullchain_pem[ + len(pem_certificate) : # noqa + ].lstrip() - current_app.logger.debug("{0} {1}".format(type(pem_certificate), type(pem_certificate_chain))) + current_app.logger.debug( + "{0} {1}".format(type(pem_certificate), type(pem_certificate_chain)) + ) return pem_certificate, pem_certificate_chain def setup_acme_client(self, authority): @@ -176,30 +204,40 @@ class AcmeHandler(object): for option in json.loads(authority.options): options[option["name"]] = option.get("value") - email = options.get('email', current_app.config.get('ACME_EMAIL')) - tel = options.get('telephone', current_app.config.get('ACME_TEL')) - directory_url = options.get('acme_url', current_app.config.get('ACME_DIRECTORY_URL')) + email = options.get("email", current_app.config.get("ACME_EMAIL")) + tel = options.get("telephone", current_app.config.get("ACME_TEL")) + directory_url = options.get( + "acme_url", current_app.config.get("ACME_DIRECTORY_URL") + ) - existing_key = options.get('acme_private_key', current_app.config.get('ACME_PRIVATE_KEY')) - existing_regr = options.get('acme_regr', current_app.config.get('ACME_REGR')) + existing_key = options.get( + "acme_private_key", current_app.config.get("ACME_PRIVATE_KEY") + ) + existing_regr = options.get("acme_regr", current_app.config.get("ACME_REGR")) if existing_key and existing_regr: # Reuse the same account for each certificate issuance key = jose.JWK.json_loads(existing_key) regr = messages.RegistrationResource.json_loads(existing_regr) - current_app.logger.debug("Connecting with directory at {0}".format(directory_url)) + current_app.logger.debug( + "Connecting with directory at {0}".format(directory_url) + ) net = ClientNetwork(key, account=regr) client = BackwardsCompatibleClientV2(net, key, directory_url) return client, {} else: # Create an account for each certificate issuance - key = jose.JWKRSA(key=generate_private_key('RSA2048')) + key = jose.JWKRSA(key=generate_private_key("RSA2048")) - current_app.logger.debug("Connecting with directory at {0}".format(directory_url)) + current_app.logger.debug( + "Connecting with directory at {0}".format(directory_url) + ) net = ClientNetwork(key, account=None, timeout=3600) client = BackwardsCompatibleClientV2(net, key, directory_url) - registration = client.new_account_and_tos(messages.NewRegistration.from_data(email=email)) + registration = client.new_account_and_tos( + messages.NewRegistration.from_data(email=email) + ) current_app.logger.debug("Connected: {0}".format(registration.uri)) return client, registration @@ -212,9 +250,9 @@ class AcmeHandler(object): """ current_app.logger.debug("Fetching domains") - domains = [options['common_name']] - if options.get('extensions'): - for name in options['extensions']['sub_alt_names']['names']: + domains = [options["common_name"]] + if options.get("extensions"): + for name in options["extensions"]["sub_alt_names"]["names"]: domains.append(name) current_app.logger.debug("Got these domains: {0}".format(domains)) @@ -225,16 +263,22 @@ class AcmeHandler(object): for domain in order_info.domains: if not self.dns_providers_for_domain.get(domain): - metrics.send('get_authorizations_no_dns_provider_for_domain', 'counter', 1) + metrics.send( + "get_authorizations_no_dns_provider_for_domain", "counter", 1 + ) raise Exception("No DNS providers found for domain: {}".format(domain)) for dns_provider in self.dns_providers_for_domain[domain]: dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) dns_provider_options = json.loads(dns_provider.credentials) account_number = dns_provider_options.get("account_id") - authz_record = self.start_dns_challenge(acme_client, account_number, domain, - dns_provider_plugin, - order, - dns_provider.options) + authz_record = self.start_dns_challenge( + acme_client, + account_number, + domain, + dns_provider_plugin, + order, + dns_provider.options, + ) authorizations.append(authz_record) return authorizations @@ -268,16 +312,20 @@ class AcmeHandler(object): dns_providers = self.dns_providers_for_domain.get(authz_record.host) for dns_provider in dns_providers: # Grab account number (For Route53) - dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) + dns_provider_plugin = self.get_dns_provider( + dns_provider.provider_type + ) dns_provider_options = json.loads(dns_provider.credentials) account_number = dns_provider_options.get("account_id") host_to_validate = self.maybe_remove_wildcard(authz_record.host) - host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) + host_to_validate = self.maybe_add_extension( + host_to_validate, dns_provider_options + ) dns_provider_plugin.delete_txt_record( authz_record.change_id, account_number, dns_challenge.validation_domain_name(host_to_validate), - dns_challenge.validation(acme_client.client.net.key) + dns_challenge.validation(acme_client.client.net.key), ) return authorizations @@ -302,7 +350,9 @@ class AcmeHandler(object): account_number = dns_provider_options.get("account_id") dns_challenges = authz_record.dns_challenge host_to_validate = self.maybe_remove_wildcard(authz_record.host) - host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options) + host_to_validate = self.maybe_add_extension( + host_to_validate, dns_provider_options + ) dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type) for dns_challenge in dns_challenges: try: @@ -310,21 +360,17 @@ class AcmeHandler(object): authz_record.change_id, account_number, dns_challenge.validation_domain_name(host_to_validate), - dns_challenge.validation(acme_client.client.net.key) + dns_challenge.validation(acme_client.client.net.key), ) except Exception as e: # If this fails, it's most likely because the record doesn't exist (It was already cleaned up) # or we're not authorized to modify it. - metrics.send('cleanup_dns_challenges_error', 'counter', 1) + metrics.send("cleanup_dns_challenges_error", "counter", 1) sentry.captureException() pass def get_dns_provider(self, type): - provider_types = { - 'cloudflare': cloudflare, - 'dyn': dyn, - 'route53': route53, - } + provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53} provider = provider_types.get(type) if not provider: raise UnknownProvider("No such DNS provider: {}".format(type)) @@ -332,41 +378,43 @@ class AcmeHandler(object): class ACMEIssuerPlugin(IssuerPlugin): - title = 'Acme' - slug = 'acme-issuer' - description = 'Enables the creation of certificates via ACME CAs (including Let\'s Encrypt)' + title = "Acme" + slug = "acme-issuer" + description = ( + "Enables the creation of certificates via ACME CAs (including Let's Encrypt)" + ) version = acme.VERSION - author = 'Netflix' - author_url = 'https://github.com/netflix/lemur.git' + author = "Netflix" + author_url = "https://github.com/netflix/lemur.git" options = [ { - 'name': 'acme_url', - 'type': 'str', - 'required': True, - 'validation': '/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/', - 'helpMessage': 'Must be a valid web url starting with http[s]://', + "name": "acme_url", + "type": "str", + "required": True, + "validation": "/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/", + "helpMessage": "Must be a valid web url starting with http[s]://", }, { - 'name': 'telephone', - 'type': 'str', - 'default': '', - 'helpMessage': 'Telephone to use' + "name": "telephone", + "type": "str", + "default": "", + "helpMessage": "Telephone to use", }, { - 'name': 'email', - 'type': 'str', - 'default': '', - 'validation': '/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/', - 'helpMessage': 'Email to use' + "name": "email", + "type": "str", + "default": "", + "validation": "/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/", + "helpMessage": "Email to use", }, { - 'name': 'certificate', - 'type': 'textarea', - 'default': '', - 'validation': '/^-----BEGIN CERTIFICATE-----/', - 'helpMessage': 'Certificate to use' + "name": "certificate", + "type": "textarea", + "default": "", + "validation": "/^-----BEGIN CERTIFICATE-----/", + "helpMessage": "Certificate to use", }, ] @@ -376,11 +424,7 @@ class ACMEIssuerPlugin(IssuerPlugin): def get_dns_provider(self, type): self.acme = AcmeHandler() - provider_types = { - 'cloudflare': cloudflare, - 'dyn': dyn, - 'route53': route53, - } + provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53} provider = provider_types.get(type) if not provider: raise UnknownProvider("No such DNS provider: {}".format(type)) @@ -411,24 +455,31 @@ class ACMEIssuerPlugin(IssuerPlugin): try: order = acme_client.new_order(pending_cert.csr) except WildcardUnsupportedError: - metrics.send('get_ordered_certificate_wildcard_unsupported', 'counter', 1) - raise Exception("The currently selected ACME CA endpoint does" - " not support issuing wildcard certificates.") + metrics.send("get_ordered_certificate_wildcard_unsupported", "counter", 1) + raise Exception( + "The currently selected ACME CA endpoint does" + " not support issuing wildcard certificates." + ) try: - authorizations = self.acme.get_authorizations(acme_client, order, order_info) + authorizations = self.acme.get_authorizations( + acme_client, order, order_info + ) except ClientError: sentry.captureException() - metrics.send('get_ordered_certificate_error', 'counter', 1) - current_app.logger.error(f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True) + metrics.send("get_ordered_certificate_error", "counter", 1) + current_app.logger.error( + f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True + ) return False authorizations = self.acme.finalize_authorizations(acme_client, authorizations) pem_certificate, pem_certificate_chain = self.acme.request_certificate( - acme_client, authorizations, order) + acme_client, authorizations, order + ) cert = { - 'body': "\n".join(str(pem_certificate).splitlines()), - 'chain': "\n".join(str(pem_certificate_chain).splitlines()), - 'external_id': str(pending_cert.external_id) + "body": "\n".join(str(pem_certificate).splitlines()), + "chain": "\n".join(str(pem_certificate_chain).splitlines()), + "external_id": str(pending_cert.external_id), } return cert @@ -438,10 +489,14 @@ class ACMEIssuerPlugin(IssuerPlugin): certs = [] for pending_cert in pending_certs: try: - acme_client, registration = self.acme.setup_acme_client(pending_cert.authority) + acme_client, registration = self.acme.setup_acme_client( + pending_cert.authority + ) order_info = authorization_service.get(pending_cert.external_id) if pending_cert.dns_provider_id: - dns_provider = dns_provider_service.get(pending_cert.dns_provider_id) + dns_provider = dns_provider_service.get( + pending_cert.dns_provider_id + ) for domain in order_info.domains: # Currently, we only support specifying one DNS provider per certificate, even if that @@ -455,70 +510,79 @@ class ACMEIssuerPlugin(IssuerPlugin): order = acme_client.new_order(pending_cert.csr) except WildcardUnsupportedError: sentry.captureException() - metrics.send('get_ordered_certificates_wildcard_unsupported_error', 'counter', 1) - raise Exception("The currently selected ACME CA endpoint does" - " not support issuing wildcard certificates.") + metrics.send( + "get_ordered_certificates_wildcard_unsupported_error", + "counter", + 1, + ) + raise Exception( + "The currently selected ACME CA endpoint does" + " not support issuing wildcard certificates." + ) - authorizations = self.acme.get_authorizations(acme_client, order, order_info) + authorizations = self.acme.get_authorizations( + acme_client, order, order_info + ) - pending.append({ - "acme_client": acme_client, - "authorizations": authorizations, - "pending_cert": pending_cert, - "order": order, - }) + pending.append( + { + "acme_client": acme_client, + "authorizations": authorizations, + "pending_cert": pending_cert, + "order": order, + } + ) except (ClientError, ValueError, Exception) as e: sentry.captureException() - metrics.send('get_ordered_certificates_pending_creation_error', 'counter', 1) - current_app.logger.error(f"Unable to resolve pending cert: {pending_cert}", exc_info=True) + metrics.send( + "get_ordered_certificates_pending_creation_error", "counter", 1 + ) + current_app.logger.error( + f"Unable to resolve pending cert: {pending_cert}", exc_info=True + ) error = e if globals().get("order") and order: error += f" Order uri: {order.uri}" - certs.append({ - "cert": False, - "pending_cert": pending_cert, - "last_error": e, - }) + certs.append( + {"cert": False, "pending_cert": pending_cert, "last_error": e} + ) for entry in pending: try: entry["authorizations"] = self.acme.finalize_authorizations( - entry["acme_client"], - entry["authorizations"], + entry["acme_client"], entry["authorizations"] ) pem_certificate, pem_certificate_chain = self.acme.request_certificate( - entry["acme_client"], - entry["authorizations"], - entry["order"] + entry["acme_client"], entry["authorizations"], entry["order"] ) cert = { - 'body': "\n".join(str(pem_certificate).splitlines()), - 'chain': "\n".join(str(pem_certificate_chain).splitlines()), - 'external_id': str(entry["pending_cert"].external_id) + "body": "\n".join(str(pem_certificate).splitlines()), + "chain": "\n".join(str(pem_certificate_chain).splitlines()), + "external_id": str(entry["pending_cert"].external_id), } - certs.append({ - "cert": cert, - "pending_cert": entry["pending_cert"], - }) + certs.append({"cert": cert, "pending_cert": entry["pending_cert"]}) except (PollError, AcmeError, Exception) as e: sentry.captureException() - metrics.send('get_ordered_certificates_resolution_error', 'counter', 1) + metrics.send("get_ordered_certificates_resolution_error", "counter", 1) order_url = order.uri error = f"{e}. Order URI: {order_url}" current_app.logger.error( f"Unable to resolve pending cert: {pending_cert}. " - f"Check out {order_url} for more information.", exc_info=True) - certs.append({ - "cert": False, - "pending_cert": entry["pending_cert"], - "last_error": error, - }) + f"Check out {order_url} for more information.", + exc_info=True, + ) + certs.append( + { + "cert": False, + "pending_cert": entry["pending_cert"], + "last_error": error, + } + ) # Ensure DNS records get deleted self.acme.cleanup_dns_challenges( - entry["acme_client"], - entry["authorizations"], + entry["acme_client"], entry["authorizations"] ) return certs @@ -531,20 +595,26 @@ class ACMEIssuerPlugin(IssuerPlugin): :return: :raise Exception: """ self.acme = AcmeHandler() - authority = issuer_options.get('authority') - create_immediately = issuer_options.get('create_immediately', False) + authority = issuer_options.get("authority") + create_immediately = issuer_options.get("create_immediately", False) acme_client, registration = self.acme.setup_acme_client(authority) - dns_provider = issuer_options.get('dns_provider', {}) + dns_provider = issuer_options.get("dns_provider", {}) if dns_provider: dns_provider_options = dns_provider.options credentials = json.loads(dns_provider.credentials) - current_app.logger.debug("Using DNS provider: {0}".format(dns_provider.provider_type)) - dns_provider_plugin = __import__(dns_provider.provider_type, globals(), locals(), [], 1) + current_app.logger.debug( + "Using DNS provider: {0}".format(dns_provider.provider_type) + ) + dns_provider_plugin = __import__( + dns_provider.provider_type, globals(), locals(), [], 1 + ) account_number = credentials.get("account_id") provider_type = dns_provider.provider_type if provider_type == "route53" and not account_number: - error = "Route53 DNS Provider {} does not have an account number configured.".format(dns_provider.name) + error = "Route53 DNS Provider {} does not have an account number configured.".format( + dns_provider.name + ) current_app.logger.error(error) raise InvalidConfiguration(error) else: @@ -563,16 +633,29 @@ class ACMEIssuerPlugin(IssuerPlugin): else: authz_domains.append(d.value) - dns_authorization = authorization_service.create(account_number, authz_domains, - provider_type) + dns_authorization = authorization_service.create( + account_number, authz_domains, provider_type + ) # Return id of the DNS Authorization return None, None, dns_authorization.id - authorizations = self.acme.get_authorizations(acme_client, account_number, domains, dns_provider_plugin, - dns_provider_options) - self.acme.finalize_authorizations(acme_client, account_number, dns_provider_plugin, authorizations, - dns_provider_options) - pem_certificate, pem_certificate_chain = self.acme.request_certificate(acme_client, authorizations, csr) + authorizations = self.acme.get_authorizations( + acme_client, + account_number, + domains, + dns_provider_plugin, + dns_provider_options, + ) + self.acme.finalize_authorizations( + acme_client, + account_number, + dns_provider_plugin, + authorizations, + dns_provider_options, + ) + pem_certificate, pem_certificate_chain = self.acme.request_certificate( + acme_client, authorizations, csr + ) # TODO add external ID (if possible) return pem_certificate, pem_certificate_chain, None @@ -585,18 +668,18 @@ class ACMEIssuerPlugin(IssuerPlugin): :param options: :return: """ - role = {'username': '', 'password': '', 'name': 'acme'} - plugin_options = options.get('plugin', {}).get('plugin_options') + role = {"username": "", "password": "", "name": "acme"} + plugin_options = options.get("plugin", {}).get("plugin_options") if not plugin_options: error = "Invalid options for lemur_acme plugin: {}".format(options) current_app.logger.error(error) raise InvalidConfiguration(error) # Define static acme_root based off configuration variable by default. However, if user has passed a # certificate, use this certificate as the root. - acme_root = current_app.config.get('ACME_ROOT') + acme_root = current_app.config.get("ACME_ROOT") for option in plugin_options: - if option.get('name') == 'certificate': - acme_root = option.get('value') + if option.get("name") == "certificate": + acme_root = option.get("value") return acme_root, "", [role] def cancel_ordered_certificate(self, pending_cert, **kwargs): diff --git a/lemur/plugins/lemur_acme/route53.py b/lemur/plugins/lemur_acme/route53.py index 3b6c5b32..55da5161 100644 --- a/lemur/plugins/lemur_acme/route53.py +++ b/lemur/plugins/lemur_acme/route53.py @@ -3,7 +3,7 @@ import time from lemur.plugins.lemur_aws.sts import sts_client -@sts_client('route53') +@sts_client("route53") def wait_for_dns_change(change_id, client=None): _, change_id = change_id @@ -14,7 +14,7 @@ def wait_for_dns_change(change_id, client=None): time.sleep(5) -@sts_client('route53') +@sts_client("route53") def find_zone_id(domain, client=None): paginator = client.get_paginator("list_hosted_zones") zones = [] @@ -25,34 +25,35 @@ def find_zone_id(domain, client=None): zones.append((zone["Name"], zone["Id"])) if not zones: - raise ValueError( - "Unable to find a Route53 hosted zone for {}".format(domain) - ) + raise ValueError("Unable to find a Route53 hosted zone for {}".format(domain)) return zones[0][1] -@sts_client('route53') +@sts_client("route53") def get_zones(client=None): paginator = client.get_paginator("list_hosted_zones") zones = [] for page in paginator.paginate(): for zone in page["HostedZones"]: - zones.append(zone["Name"][:-1]) # We need [:-1] to strip out the trailing dot. + zones.append( + zone["Name"][:-1] + ) # We need [:-1] to strip out the trailing dot. return zones -@sts_client('route53') +@sts_client("route53") def change_txt_record(action, zone_id, domain, value, client=None): current_txt_records = [] try: current_records = client.list_resource_record_sets( HostedZoneId=zone_id, StartRecordName=domain, - StartRecordType='TXT', - MaxItems="1")["ResourceRecordSets"] + StartRecordType="TXT", + MaxItems="1", + )["ResourceRecordSets"] for record in current_records: - if record.get('Type') == 'TXT': + if record.get("Type") == "TXT": current_txt_records.extend(record.get("ResourceRecords", [])) except Exception as e: # Current Resource Record does not exist @@ -72,7 +73,9 @@ def change_txt_record(action, zone_id, domain, value, client=None): # If we want to delete one record out of many, we'll update the record to not include the deleted value instead. # This allows us to support concurrent issuance. current_txt_records = [ - record for record in current_txt_records if not (record.get('Value') == '"{}"'.format(value)) + record + for record in current_txt_records + if not (record.get("Value") == '"{}"'.format(value)) ] action = "UPSERT" @@ -87,10 +90,10 @@ def change_txt_record(action, zone_id, domain, value, client=None): "Type": "TXT", "TTL": 300, "ResourceRecords": current_txt_records, - } + }, } ] - } + }, ) return response["ChangeInfo"]["Id"] @@ -98,11 +101,7 @@ def change_txt_record(action, zone_id, domain, value, client=None): def create_txt_record(host, value, account_number): zone_id = find_zone_id(host, account_number=account_number) change_id = change_txt_record( - "UPSERT", - zone_id, - host, - value, - account_number=account_number + "UPSERT", zone_id, host, value, account_number=account_number ) return zone_id, change_id @@ -113,11 +112,7 @@ def delete_txt_record(change_ids, account_number, host, value): zone_id, _ = change_id try: change_txt_record( - "DELETE", - zone_id, - host, - value, - account_number=account_number + "DELETE", zone_id, host, value, account_number=account_number ) except Exception as e: if "but it was not found" in e.response.get("Error", {}).get("Message"): diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 0c406627..3bf1d05c 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -6,8 +6,7 @@ from lemur.plugins.lemur_acme import plugin class TestAcme(unittest.TestCase): - - @patch('lemur.plugins.lemur_acme.plugin.dns_provider_service') + @patch("lemur.plugins.lemur_acme.plugin.dns_provider_service") def setUp(self, mock_dns_provider_service): self.ACMEIssuerPlugin = plugin.ACMEIssuerPlugin() self.acme = plugin.AcmeHandler() @@ -15,14 +14,17 @@ class TestAcme(unittest.TestCase): mock_dns_provider.name = "cloudflare" mock_dns_provider.credentials = "{}" mock_dns_provider.provider_type = "cloudflare" - self.acme.dns_providers_for_domain = {"www.test.com": [mock_dns_provider], - "test.fakedomain.net": [mock_dns_provider]} + self.acme.dns_providers_for_domain = { + "www.test.com": [mock_dns_provider], + "test.fakedomain.net": [mock_dns_provider], + } - @patch('lemur.plugins.lemur_acme.plugin.len', return_value=1) + @patch("lemur.plugins.lemur_acme.plugin.len", return_value=1) def test_find_dns_challenge(self, mock_len): assert mock_len from acme import challenges + c = challenges.DNS01() mock_authz = Mock() @@ -37,11 +39,13 @@ class TestAcme(unittest.TestCase): a = plugin.AuthorizationRecord("host", "authz", "challenge", "id") self.assertEqual(type(a), plugin.AuthorizationRecord) - @patch('acme.client.Client') - @patch('lemur.plugins.lemur_acme.plugin.current_app') - @patch('lemur.plugins.lemur_acme.plugin.len', return_value=1) - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge') - def test_start_dns_challenge(self, mock_find_dns_challenge, mock_len, mock_app, mock_acme): + @patch("acme.client.Client") + @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch("lemur.plugins.lemur_acme.plugin.len", return_value=1) + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge") + def test_start_dns_challenge( + self, mock_find_dns_challenge, mock_len, mock_app, mock_acme + ): assert mock_len mock_order = Mock() mock_app.logger.debug = Mock() @@ -49,6 +53,7 @@ class TestAcme(unittest.TestCase): mock_authz.body.resolved_combinations = [] mock_entry = MagicMock() from acme import challenges + c = challenges.DNS01() mock_entry.chall = TestAcme.test_complete_dns_challenge_fail mock_authz.body.resolved_combinations.append(mock_entry) @@ -60,13 +65,17 @@ class TestAcme(unittest.TestCase): iterable = mock_find_dns_challenge.return_value iterator = iter(values) iterable.__iter__.return_value = iterator - result = self.acme.start_dns_challenge(mock_acme, "accountid", "host", mock_dns_provider, mock_order, {}) + result = self.acme.start_dns_challenge( + mock_acme, "accountid", "host", mock_dns_provider, mock_order, {} + ) self.assertEqual(type(result), plugin.AuthorizationRecord) - @patch('acme.client.Client') - @patch('lemur.plugins.lemur_acme.plugin.current_app') - @patch('lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change') - def test_complete_dns_challenge_success(self, mock_wait_for_dns_change, mock_current_app, mock_acme): + @patch("acme.client.Client") + @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") + def test_complete_dns_challenge_success( + self, mock_wait_for_dns_change, mock_current_app, mock_acme + ): mock_dns_provider = Mock() mock_dns_provider.wait_for_dns_change = Mock(return_value=True) mock_authz = Mock() @@ -84,10 +93,12 @@ class TestAcme(unittest.TestCase): mock_authz.dns_challenge.append(dns_challenge) self.acme.complete_dns_challenge(mock_acme, mock_authz) - @patch('acme.client.Client') - @patch('lemur.plugins.lemur_acme.plugin.current_app') - @patch('lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change') - def test_complete_dns_challenge_fail(self, mock_wait_for_dns_change, mock_current_app, mock_acme): + @patch("acme.client.Client") + @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") + def test_complete_dns_challenge_fail( + self, mock_wait_for_dns_change, mock_current_app, mock_acme + ): mock_dns_provider = Mock() mock_dns_provider.wait_for_dns_change = Mock(return_value=True) @@ -105,16 +116,22 @@ class TestAcme(unittest.TestCase): dns_challenge = Mock() mock_authz.dns_challenge.append(dns_challenge) self.assertRaises( - ValueError, - self.acme.complete_dns_challenge(mock_acme, mock_authz) + ValueError, self.acme.complete_dns_challenge(mock_acme, mock_authz) ) - @patch('acme.client.Client') - @patch('OpenSSL.crypto', return_value="mock_cert") - @patch('josepy.util.ComparableX509') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge') - @patch('lemur.plugins.lemur_acme.plugin.current_app') - def test_request_certificate(self, mock_current_app, mock_find_dns_challenge, mock_jose, mock_crypto, mock_acme): + @patch("acme.client.Client") + @patch("OpenSSL.crypto", return_value="mock_cert") + @patch("josepy.util.ComparableX509") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge") + @patch("lemur.plugins.lemur_acme.plugin.current_app") + def test_request_certificate( + self, + mock_current_app, + mock_find_dns_challenge, + mock_jose, + mock_crypto, + mock_acme, + ): mock_cert_response = Mock() mock_cert_response.body = "123" mock_cert_response_full = [mock_cert_response, True] @@ -124,7 +141,7 @@ class TestAcme(unittest.TestCase): mock_authz_record.authz = Mock() mock_authz.append(mock_authz_record) mock_acme.fetch_chain = Mock(return_value="mock_chain") - mock_crypto.dump_certificate = Mock(return_value=b'chain') + mock_crypto.dump_certificate = Mock(return_value=b"chain") mock_order = Mock() self.acme.request_certificate(mock_acme, [], mock_order) @@ -134,8 +151,8 @@ class TestAcme(unittest.TestCase): with self.assertRaises(Exception): self.acme.setup_acme_client(mock_authority) - @patch('lemur.plugins.lemur_acme.plugin.BackwardsCompatibleClientV2') - @patch('lemur.plugins.lemur_acme.plugin.current_app') + @patch("lemur.plugins.lemur_acme.plugin.BackwardsCompatibleClientV2") + @patch("lemur.plugins.lemur_acme.plugin.current_app") def test_setup_acme_client_success(self, mock_current_app, mock_acme): mock_authority = Mock() mock_authority.options = '[{"name": "mock_name", "value": "mock_value"}]' @@ -150,31 +167,29 @@ class TestAcme(unittest.TestCase): assert result_client assert result_registration - @patch('lemur.plugins.lemur_acme.plugin.current_app') + @patch("lemur.plugins.lemur_acme.plugin.current_app") def test_get_domains_single(self, mock_current_app): - options = { - "common_name": "test.netflix.net" - } + options = {"common_name": "test.netflix.net"} result = self.acme.get_domains(options) self.assertEqual(result, [options["common_name"]]) - @patch('lemur.plugins.lemur_acme.plugin.current_app') + @patch("lemur.plugins.lemur_acme.plugin.current_app") def test_get_domains_multiple(self, mock_current_app): options = { "common_name": "test.netflix.net", "extensions": { - "sub_alt_names": { - "names": [ - "test2.netflix.net", - "test3.netflix.net" - ] - } - } + "sub_alt_names": {"names": ["test2.netflix.net", "test3.netflix.net"]} + }, } result = self.acme.get_domains(options) - self.assertEqual(result, [options["common_name"], "test2.netflix.net", "test3.netflix.net"]) + self.assertEqual( + result, [options["common_name"], "test2.netflix.net", "test3.netflix.net"] + ) - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.start_dns_challenge', return_value="test") + @patch( + "lemur.plugins.lemur_acme.plugin.AcmeHandler.start_dns_challenge", + return_value="test", + ) def test_get_authorizations(self, mock_start_dns_challenge): mock_order = Mock() mock_order.body.identifiers = [] @@ -183,10 +198,15 @@ class TestAcme(unittest.TestCase): mock_order_info = Mock() mock_order_info.account_number = 1 mock_order_info.domains = ["test.fakedomain.net"] - result = self.acme.get_authorizations("acme_client", mock_order, mock_order_info) + result = self.acme.get_authorizations( + "acme_client", mock_order, mock_order_info + ) self.assertEqual(result, ["test"]) - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.complete_dns_challenge', return_value="test") + @patch( + "lemur.plugins.lemur_acme.plugin.AcmeHandler.complete_dns_challenge", + return_value="test", + ) def test_finalize_authorizations(self, mock_complete_dns_challenge): mock_authz = [] mock_authz_record = MagicMock() @@ -202,28 +222,28 @@ class TestAcme(unittest.TestCase): result = self.acme.finalize_authorizations(mock_acme_client, mock_authz) self.assertEqual(result, mock_authz) - @patch('lemur.plugins.lemur_acme.plugin.current_app') + @patch("lemur.plugins.lemur_acme.plugin.current_app") def test_create_authority(self, mock_current_app): mock_current_app.config = Mock() options = { - "plugin": { - "plugin_options": [{ - "name": "certificate", - "value": "123" - }] - } + "plugin": {"plugin_options": [{"name": "certificate", "value": "123"}]} } acme_root, b, role = self.ACMEIssuerPlugin.create_authority(options) self.assertEqual(acme_root, "123") self.assertEqual(b, "") - self.assertEqual(role, [{'username': '', 'password': '', 'name': 'acme'}]) + self.assertEqual(role, [{"username": "", "password": "", "name": "acme"}]) - @patch('lemur.plugins.lemur_acme.plugin.current_app') - @patch('lemur.plugins.lemur_acme.dyn.current_app') - @patch('lemur.plugins.lemur_acme.cloudflare.current_app') - @patch('lemur.plugins.lemur_acme.plugin.dns_provider_service') - def test_get_dns_provider(self, mock_dns_provider_service, mock_current_app_cloudflare, mock_current_app_dyn, - mock_current_app): + @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch("lemur.plugins.lemur_acme.dyn.current_app") + @patch("lemur.plugins.lemur_acme.cloudflare.current_app") + @patch("lemur.plugins.lemur_acme.plugin.dns_provider_service") + def test_get_dns_provider( + self, + mock_dns_provider_service, + mock_current_app_cloudflare, + mock_current_app_dyn, + mock_current_app, + ): provider = plugin.ACMEIssuerPlugin() route53 = provider.get_dns_provider("route53") assert route53 @@ -232,16 +252,23 @@ class TestAcme(unittest.TestCase): dyn = provider.get_dns_provider("dyn") assert dyn - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client') - @patch('lemur.plugins.lemur_acme.plugin.current_app') - @patch('lemur.plugins.lemur_acme.plugin.authorization_service') - @patch('lemur.plugins.lemur_acme.plugin.dns_provider_service') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate') + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client") + @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch("lemur.plugins.lemur_acme.plugin.authorization_service") + @patch("lemur.plugins.lemur_acme.plugin.dns_provider_service") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") def test_get_ordered_certificate( - self, mock_request_certificate, mock_finalize_authorizations, mock_get_authorizations, - mock_dns_provider_service, mock_authorization_service, mock_current_app, mock_acme): + self, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_dns_provider_service, + mock_authorization_service, + mock_current_app, + mock_acme, + ): mock_client = Mock() mock_acme.return_value = (mock_client, "") mock_request_certificate.return_value = ("pem_certificate", "chain") @@ -253,24 +280,26 @@ class TestAcme(unittest.TestCase): provider.get_dns_provider = Mock() result = provider.get_ordered_certificate(mock_cert) self.assertEqual( - result, - { - 'body': "pem_certificate", - 'chain': "chain", - 'external_id': "1" - } + result, {"body": "pem_certificate", "chain": "chain", "external_id": "1"} ) - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client') - @patch('lemur.plugins.lemur_acme.plugin.current_app') - @patch('lemur.plugins.lemur_acme.plugin.authorization_service') - @patch('lemur.plugins.lemur_acme.plugin.dns_provider_service') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate') + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client") + @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch("lemur.plugins.lemur_acme.plugin.authorization_service") + @patch("lemur.plugins.lemur_acme.plugin.dns_provider_service") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") def test_get_ordered_certificates( - self, mock_request_certificate, mock_finalize_authorizations, mock_get_authorizations, - mock_dns_provider_service, mock_authorization_service, mock_current_app, mock_acme): + self, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_dns_provider_service, + mock_authorization_service, + mock_current_app, + mock_acme, + ): mock_client = Mock() mock_acme.return_value = (mock_client, "") mock_request_certificate.return_value = ("pem_certificate", "chain") @@ -285,19 +314,32 @@ class TestAcme(unittest.TestCase): provider.get_dns_provider = Mock() result = provider.get_ordered_certificates([mock_cert, mock_cert2]) self.assertEqual(len(result), 2) - self.assertEqual(result[0]['cert'], {'body': 'pem_certificate', 'chain': 'chain', 'external_id': '1'}) - self.assertEqual(result[1]['cert'], {'body': 'pem_certificate', 'chain': 'chain', 'external_id': '2'}) + self.assertEqual( + result[0]["cert"], + {"body": "pem_certificate", "chain": "chain", "external_id": "1"}, + ) + self.assertEqual( + result[1]["cert"], + {"body": "pem_certificate", "chain": "chain", "external_id": "2"}, + ) - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client') - @patch('lemur.plugins.lemur_acme.plugin.dns_provider_service') - @patch('lemur.plugins.lemur_acme.plugin.current_app') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations') - @patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate') - @patch('lemur.plugins.lemur_acme.plugin.authorization_service') - def test_create_certificate(self, mock_authorization_service, mock_request_certificate, - mock_finalize_authorizations, mock_get_authorizations, - mock_current_app, mock_dns_provider_service, mock_acme): + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client") + @patch("lemur.plugins.lemur_acme.plugin.dns_provider_service") + @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") + @patch("lemur.plugins.lemur_acme.plugin.authorization_service") + def test_create_certificate( + self, + mock_authorization_service, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_current_app, + mock_dns_provider_service, + mock_acme, + ): provider = plugin.ACMEIssuerPlugin() mock_authority = Mock() @@ -310,9 +352,9 @@ class TestAcme(unittest.TestCase): mock_dns_provider_service.get.return_value = mock_dns_provider issuer_options = { - 'authority': mock_authority, - 'dns_provider': mock_dns_provider, - "common_name": "test.netflix.net" + "authority": mock_authority, + "dns_provider": mock_dns_provider, + "common_name": "test.netflix.net", } csr = "123" mock_request_certificate.return_value = ("pem_certificate", "chain") diff --git a/lemur/plugins/lemur_adcs/__init__.py b/lemur/plugins/lemur_adcs/__init__.py index 6b61e936..b902ed7a 100644 --- a/lemur/plugins/lemur_adcs/__init__.py +++ b/lemur/plugins/lemur_adcs/__init__.py @@ -1,6 +1,5 @@ """Set the version information.""" try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_adcs/plugin.py b/lemur/plugins/lemur_adcs/plugin.py index b7698474..bc07ede3 100644 --- a/lemur/plugins/lemur_adcs/plugin.py +++ b/lemur/plugins/lemur_adcs/plugin.py @@ -7,13 +7,13 @@ from flask import current_app class ADCSIssuerPlugin(IssuerPlugin): - title = 'ADCS' - slug = 'adcs-issuer' - description = 'Enables the creation of certificates by ADCS (Active Directory Certificate Services)' + title = "ADCS" + slug = "adcs-issuer" + description = "Enables the creation of certificates by ADCS (Active Directory Certificate Services)" version = ADCS.VERSION - author = 'sirferl' - author_url = 'https://github.com/sirferl/lemur' + author = "sirferl" + author_url = "https://github.com/sirferl/lemur" def __init__(self, *args, **kwargs): """Initialize the issuer with the appropriate details.""" @@ -30,66 +30,80 @@ class ADCSIssuerPlugin(IssuerPlugin): :param options: :return: """ - adcs_root = current_app.config.get('ADCS_ROOT') - adcs_issuing = current_app.config.get('ADCS_ISSUING') - role = {'username': '', 'password': '', 'name': 'adcs'} + adcs_root = current_app.config.get("ADCS_ROOT") + adcs_issuing = current_app.config.get("ADCS_ISSUING") + role = {"username": "", "password": "", "name": "adcs"} return adcs_root, adcs_issuing, [role] def create_certificate(self, csr, issuer_options): - adcs_server = current_app.config.get('ADCS_SERVER') - adcs_user = current_app.config.get('ADCS_USER') - adcs_pwd = current_app.config.get('ADCS_PWD') - adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD') - adcs_template = current_app.config.get('ADCS_TEMPLATE') - ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method) + adcs_server = current_app.config.get("ADCS_SERVER") + adcs_user = current_app.config.get("ADCS_USER") + adcs_pwd = current_app.config.get("ADCS_PWD") + adcs_auth_method = current_app.config.get("ADCS_AUTH_METHOD") + adcs_template = current_app.config.get("ADCS_TEMPLATE") + ca_server = Certsrv( + adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method + ) current_app.logger.info("Requesting CSR: {0}".format(csr)) current_app.logger.info("Issuer options: {0}".format(issuer_options)) - cert, req_id = ca_server.get_cert(csr, adcs_template, encoding='b64').decode('utf-8').replace('\r\n', '\n') - chain = ca_server.get_ca_cert(encoding='b64').decode('utf-8').replace('\r\n', '\n') + cert, req_id = ( + ca_server.get_cert(csr, adcs_template, encoding="b64") + .decode("utf-8") + .replace("\r\n", "\n") + ) + chain = ( + ca_server.get_ca_cert(encoding="b64").decode("utf-8").replace("\r\n", "\n") + ) return cert, chain, req_id def revoke_certificate(self, certificate, comments): - raise NotImplementedError('Not implemented\n', self, certificate, comments) + raise NotImplementedError("Not implemented\n", self, certificate, comments) def get_ordered_certificate(self, order_id): - raise NotImplementedError('Not implemented\n', self, order_id) + raise NotImplementedError("Not implemented\n", self, order_id) def canceled_ordered_certificate(self, pending_cert, **kwargs): - raise NotImplementedError('Not implemented\n', self, pending_cert, **kwargs) + raise NotImplementedError("Not implemented\n", self, pending_cert, **kwargs) class ADCSSourcePlugin(SourcePlugin): - title = 'ADCS' - slug = 'adcs-source' - description = 'Enables the collecion of certificates' + title = "ADCS" + slug = "adcs-source" + description = "Enables the collecion of certificates" version = ADCS.VERSION - author = 'sirferl' - author_url = 'https://github.com/sirferl/lemur' + author = "sirferl" + author_url = "https://github.com/sirferl/lemur" options = [ { - 'name': 'dummy', - 'type': 'str', - 'required': False, - 'validation': '/^[0-9]{12,12}$/', - 'helpMessage': 'Just to prevent error' + "name": "dummy", + "type": "str", + "required": False, + "validation": "/^[0-9]{12,12}$/", + "helpMessage": "Just to prevent error", } ] def get_certificates(self, options, **kwargs): - adcs_server = current_app.config.get('ADCS_SERVER') - adcs_user = current_app.config.get('ADCS_USER') - adcs_pwd = current_app.config.get('ADCS_PWD') - adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD') - adcs_start = current_app.config.get('ADCS_START') - adcs_stop = current_app.config.get('ADCS_STOP') - ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method) + adcs_server = current_app.config.get("ADCS_SERVER") + adcs_user = current_app.config.get("ADCS_USER") + adcs_pwd = current_app.config.get("ADCS_PWD") + adcs_auth_method = current_app.config.get("ADCS_AUTH_METHOD") + adcs_start = current_app.config.get("ADCS_START") + adcs_stop = current_app.config.get("ADCS_STOP") + ca_server = Certsrv( + adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method + ) out_certlist = [] for id in range(adcs_start, adcs_stop): try: - cert = ca_server.get_existing_cert(id, encoding='b64').decode('utf-8').replace('\r\n', '\n') + cert = ( + ca_server.get_existing_cert(id, encoding="b64") + .decode("utf-8") + .replace("\r\n", "\n") + ) except Exception as err: - if '{0}'.format(err).find("CERTSRV_E_PROPERTY_EMPTY"): + if "{0}".format(err).find("CERTSRV_E_PROPERTY_EMPTY"): # this error indicates end of certificate list(?), so we stop break else: @@ -101,16 +115,16 @@ class ADCSSourcePlugin(SourcePlugin): # loop through extensions to see if we find "TLS Web Server Authentication" for e_id in range(0, pubkey.get_extension_count() - 1): try: - extension = '{0}'.format(pubkey.get_extension(e_id)) + extension = "{0}".format(pubkey.get_extension(e_id)) except Exception: - extensionn = '' + extensionn = "" if extension.find("TLS Web Server Authentication") != -1: - out_certlist.append({ - 'name': format(pubkey.get_subject().CN), - 'body': cert}) + out_certlist.append( + {"name": format(pubkey.get_subject().CN), "body": cert} + ) break return out_certlist def get_endpoints(self, options, **kwargs): # There are no endpoints in the ADCS - raise NotImplementedError('Not implemented\n', self, options, **kwargs) + raise NotImplementedError("Not implemented\n", self, options, **kwargs) diff --git a/lemur/plugins/lemur_atlas/__init__.py b/lemur/plugins/lemur_atlas/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_atlas/__init__.py +++ b/lemur/plugins/lemur_atlas/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_atlas/plugin.py b/lemur/plugins/lemur_atlas/plugin.py index 09d4c9f9..7cf78ed2 100644 --- a/lemur/plugins/lemur_atlas/plugin.py +++ b/lemur/plugins/lemur_atlas/plugin.py @@ -26,44 +26,41 @@ def millis_since_epoch(): class AtlasMetricPlugin(MetricPlugin): - title = 'Atlas' - slug = 'atlas-metric' - description = 'Adds support for sending key metrics to Atlas' + title = "Atlas" + slug = "atlas-metric" + description = "Adds support for sending key metrics to Atlas" version = atlas.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur" options = [ { - 'name': 'sidecar_host', - 'type': 'str', - 'required': False, - 'help_message': 'If no host is provided localhost is assumed', - 'default': 'localhost' + "name": "sidecar_host", + "type": "str", + "required": False, + "help_message": "If no host is provided localhost is assumed", + "default": "localhost", }, - { - 'name': 'sidecar_port', - 'type': 'int', - 'required': False, - 'default': 8078 - } + {"name": "sidecar_port", "type": "int", "required": False, "default": 8078}, ] metric_data = {} sidecar_host = None sidecar_port = None - def submit(self, metric_name, metric_type, metric_value, metric_tags=None, options=None): + def submit( + self, metric_name, metric_type, metric_value, metric_tags=None, options=None + ): if not options: options = self.options # TODO marshmallow schema? - valid_types = ['COUNTER', 'GAUGE', 'TIMER'] + valid_types = ["COUNTER", "GAUGE", "TIMER"] if metric_type.upper() not in valid_types: raise Exception( "Invalid Metric Type for Atlas: '{metric}' choose from: {options}".format( - metric=metric_type, options=','.join(valid_types) + metric=metric_type, options=",".join(valid_types) ) ) @@ -73,31 +70,35 @@ class AtlasMetricPlugin(MetricPlugin): "Invalid Metric Tags for Atlas: Tags must be in dict format" ) - if metric_value == "NaN" or isinstance(metric_value, int) or isinstance(metric_value, float): - self.metric_data['value'] = metric_value + if ( + metric_value == "NaN" + or isinstance(metric_value, int) + or isinstance(metric_value, float) + ): + self.metric_data["value"] = metric_value else: - raise Exception( - "Invalid Metric Value for Atlas: Metric must be a number" - ) + raise Exception("Invalid Metric Value for Atlas: Metric must be a number") - self.metric_data['type'] = metric_type.upper() - self.metric_data['name'] = str(metric_name) - self.metric_data['tags'] = metric_tags - self.metric_data['timestamp'] = millis_since_epoch() + self.metric_data["type"] = metric_type.upper() + self.metric_data["name"] = str(metric_name) + self.metric_data["tags"] = metric_tags + self.metric_data["timestamp"] = millis_since_epoch() - self.sidecar_host = self.get_option('sidecar_host', options) - self.sidecar_port = self.get_option('sidecar_port', options) + self.sidecar_host = self.get_option("sidecar_host", options) + self.sidecar_port = self.get_option("sidecar_port", options) try: res = requests.post( - 'http://{host}:{port}/metrics'.format( - host=self.sidecar_host, - port=self.sidecar_port), - data=json.dumps([self.metric_data]) + "http://{host}:{port}/metrics".format( + host=self.sidecar_host, port=self.sidecar_port + ), + data=json.dumps([self.metric_data]), ) if res.status_code != 200: - current_app.logger.warning("Failed to publish altas metric. {0}".format(res.content)) + current_app.logger.warning( + "Failed to publish altas metric. {0}".format(res.content) + ) except ConnectionError: current_app.logger.warning( diff --git a/lemur/plugins/lemur_aws/__init__.py b/lemur/plugins/lemur_aws/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_aws/__init__.py +++ b/lemur/plugins/lemur_aws/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_aws/ec2.py b/lemur/plugins/lemur_aws/ec2.py index 3bd20e60..04b42140 100644 --- a/lemur/plugins/lemur_aws/ec2.py +++ b/lemur/plugins/lemur_aws/ec2.py @@ -8,16 +8,16 @@ from lemur.plugins.lemur_aws.sts import sts_client -@sts_client('ec2') +@sts_client("ec2") def get_regions(**kwargs): - regions = kwargs['client'].describe_regions() - return [x['RegionName'] for x in regions['Regions']] + regions = kwargs["client"].describe_regions() + return [x["RegionName"] for x in regions["Regions"]] -@sts_client('ec2') +@sts_client("ec2") def get_all_instances(**kwargs): """ Fetches all instance objects for a given account and region. """ - paginator = kwargs['client'].get_paginator('describe_instances') + paginator = kwargs["client"].get_paginator("describe_instances") return paginator.paginate() diff --git a/lemur/plugins/lemur_aws/elb.py b/lemur/plugins/lemur_aws/elb.py index 618f75e8..1ab71b65 100644 --- a/lemur/plugins/lemur_aws/elb.py +++ b/lemur/plugins/lemur_aws/elb.py @@ -27,15 +27,14 @@ def retry_throttled(exception): raise exception except Exception as e: current_app.logger.error("ELB retry_throttled triggered", exc_info=True) - metrics.send('elb_retry', 'counter', 1, - metric_tags={"exception": e}) + metrics.send("elb_retry", "counter", 1, metric_tags={"exception": e}) sentry.captureException() if isinstance(exception, botocore.exceptions.ClientError): - if exception.response['Error']['Code'] == 'LoadBalancerNotFound': + if exception.response["Error"]["Code"] == "LoadBalancerNotFound": return False - if exception.response['Error']['Code'] == 'CertificateNotFound': + if exception.response["Error"]["Code"] == "CertificateNotFound": return False return True @@ -56,7 +55,7 @@ def is_valid(listener_tuple): :param listener_tuple: """ lb_port, i_port, lb_protocol, arn = listener_tuple - if lb_protocol.lower() in ['ssl', 'https']: + if lb_protocol.lower() in ["ssl", "https"]: if not arn: raise InvalidListener @@ -75,14 +74,14 @@ def get_all_elbs(**kwargs): while True: response = get_elbs(**kwargs) - elbs += response['LoadBalancerDescriptions'] + elbs += response["LoadBalancerDescriptions"] - if not response.get('NextMarker'): + if not response.get("NextMarker"): return elbs else: - kwargs.update(dict(Marker=response['NextMarker'])) + kwargs.update(dict(Marker=response["NextMarker"])) except Exception as e: # noqa - metrics.send('get_all_elbs_error', 'counter', 1) + metrics.send("get_all_elbs_error", "counter", 1) sentry.captureException() raise @@ -99,19 +98,19 @@ def get_all_elbs_v2(**kwargs): try: while True: response = get_elbs_v2(**kwargs) - elbs += response['LoadBalancers'] + elbs += response["LoadBalancers"] - if not response.get('NextMarker'): + if not response.get("NextMarker"): return elbs else: - kwargs.update(dict(Marker=response['NextMarker'])) + kwargs.update(dict(Marker=response["NextMarker"])) except Exception as e: # noqa - metrics.send('get_all_elbs_v2_error', 'counter', 1) + metrics.send("get_all_elbs_v2_error", "counter", 1) sentry.captureException() raise -@sts_client('elbv2') +@sts_client("elbv2") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs): """ @@ -121,38 +120,51 @@ def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs): :return: """ try: - client = kwargs.pop('client') + client = kwargs.pop("client") elbs = client.describe_load_balancers(Names=[endpoint_name]) - for elb in elbs['LoadBalancers']: - listeners = client.describe_listeners(LoadBalancerArn=elb['LoadBalancerArn']) - for listener in listeners['Listeners']: - if listener['Port'] == endpoint_port: - return listener['ListenerArn'] + for elb in elbs["LoadBalancers"]: + listeners = client.describe_listeners( + LoadBalancerArn=elb["LoadBalancerArn"] + ) + for listener in listeners["Listeners"]: + if listener["Port"] == endpoint_port: + return listener["ListenerArn"] except Exception as e: # noqa - metrics.send('get_listener_arn_from_endpoint_error', 'counter', 1, - metric_tags={"error": e, "endpoint_name": endpoint_name, "endpoint_port": endpoint_port}) - sentry.captureException(extra={"endpoint_name": str(endpoint_name), - "endpoint_port": str(endpoint_port)}) + metrics.send( + "get_listener_arn_from_endpoint_error", + "counter", + 1, + metric_tags={ + "error": e, + "endpoint_name": endpoint_name, + "endpoint_port": endpoint_port, + }, + ) + sentry.captureException( + extra={ + "endpoint_name": str(endpoint_name), + "endpoint_port": str(endpoint_port), + } + ) raise -@sts_client('elb') +@sts_client("elb") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def get_elbs(**kwargs): """ Fetches one page elb objects for a given account and region. """ try: - client = kwargs.pop('client') + client = kwargs.pop("client") return client.describe_load_balancers(**kwargs) except Exception as e: # noqa - metrics.send('get_elbs_error', 'counter', 1, - metric_tags={"error": e}) + metrics.send("get_elbs_error", "counter", 1, metric_tags={"error": e}) sentry.captureException() raise -@sts_client('elbv2') +@sts_client("elbv2") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def get_elbs_v2(**kwargs): """ @@ -162,16 +174,15 @@ def get_elbs_v2(**kwargs): :return: """ try: - client = kwargs.pop('client') + client = kwargs.pop("client") return client.describe_load_balancers(**kwargs) except Exception as e: # noqa - metrics.send('get_elbs_v2_error', 'counter', 1, - metric_tags={"error": e}) + metrics.send("get_elbs_v2_error", "counter", 1, metric_tags={"error": e}) sentry.captureException() raise -@sts_client('elbv2') +@sts_client("elbv2") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_listeners_v2(**kwargs): """ @@ -181,16 +192,17 @@ def describe_listeners_v2(**kwargs): :return: """ try: - client = kwargs.pop('client') + client = kwargs.pop("client") return client.describe_listeners(**kwargs) except Exception as e: # noqa - metrics.send('describe_listeners_v2_error', 'counter', 1, - metric_tags={"error": e}) + metrics.send( + "describe_listeners_v2_error", "counter", 1, metric_tags={"error": e} + ) sentry.captureException() raise -@sts_client('elb') +@sts_client("elb") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): """ @@ -201,17 +213,30 @@ def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): """ try: - return kwargs['client'].describe_load_balancer_policies(LoadBalancerName=load_balancer_name, - PolicyNames=policy_names) + return kwargs["client"].describe_load_balancer_policies( + LoadBalancerName=load_balancer_name, PolicyNames=policy_names + ) except Exception as e: # noqa - metrics.send('describe_load_balancer_policies_error', 'counter', 1, - metric_tags={"load_balancer_name": load_balancer_name, "policy_names": policy_names, "error": e}) - sentry.captureException(extra={"load_balancer_name": str(load_balancer_name), - "policy_names": str(policy_names)}) + metrics.send( + "describe_load_balancer_policies_error", + "counter", + 1, + metric_tags={ + "load_balancer_name": load_balancer_name, + "policy_names": policy_names, + "error": e, + }, + ) + sentry.captureException( + extra={ + "load_balancer_name": str(load_balancer_name), + "policy_names": str(policy_names), + } + ) raise -@sts_client('elbv2') +@sts_client("elbv2") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_ssl_policies_v2(policy_names, **kwargs): """ @@ -221,15 +246,19 @@ def describe_ssl_policies_v2(policy_names, **kwargs): :return: """ try: - return kwargs['client'].describe_ssl_policies(Names=policy_names) + return kwargs["client"].describe_ssl_policies(Names=policy_names) except Exception as e: # noqa - metrics.send('describe_ssl_policies_v2_error', 'counter', 1, - metric_tags={"policy_names": policy_names, "error": e}) + metrics.send( + "describe_ssl_policies_v2_error", + "counter", + 1, + metric_tags={"policy_names": policy_names, "error": e}, + ) sentry.captureException(extra={"policy_names": str(policy_names)}) raise -@sts_client('elb') +@sts_client("elb") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def describe_load_balancer_types(policies, **kwargs): """ @@ -238,10 +267,12 @@ def describe_load_balancer_types(policies, **kwargs): :param policies: :return: """ - return kwargs['client'].describe_load_balancer_policy_types(PolicyTypeNames=policies) + return kwargs["client"].describe_load_balancer_policy_types( + PolicyTypeNames=policies + ) -@sts_client('elb') +@sts_client("elb") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def attach_certificate(name, port, certificate_id, **kwargs): """ @@ -253,15 +284,19 @@ def attach_certificate(name, port, certificate_id, **kwargs): :param certificate_id: """ try: - return kwargs['client'].set_load_balancer_listener_ssl_certificate(LoadBalancerName=name, LoadBalancerPort=port, SSLCertificateId=certificate_id) + return kwargs["client"].set_load_balancer_listener_ssl_certificate( + LoadBalancerName=name, + LoadBalancerPort=port, + SSLCertificateId=certificate_id, + ) except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == 'LoadBalancerNotFound': + if e.response["Error"]["Code"] == "LoadBalancerNotFound": current_app.logger.warning("Loadbalancer does not exist.") else: raise e -@sts_client('elbv2') +@sts_client("elbv2") @retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20) def attach_certificate_v2(listener_arn, port, certificates, **kwargs): """ @@ -273,9 +308,11 @@ def attach_certificate_v2(listener_arn, port, certificates, **kwargs): :param certificates: """ try: - return kwargs['client'].modify_listener(ListenerArn=listener_arn, Port=port, Certificates=certificates) + return kwargs["client"].modify_listener( + ListenerArn=listener_arn, Port=port, Certificates=certificates + ) except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] == 'LoadBalancerNotFound': + if e.response["Error"]["Code"] == "LoadBalancerNotFound": current_app.logger.warning("Loadbalancer does not exist.") else: raise e diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index 49816c2b..5a6b753d 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -21,10 +21,10 @@ def retry_throttled(exception): :return: """ if isinstance(exception, botocore.exceptions.ClientError): - if exception.response['Error']['Code'] == 'NoSuchEntity': + if exception.response["Error"]["Code"] == "NoSuchEntity": return False - metrics.send('iam_retry', 'counter', 1) + metrics.send("iam_retry", "counter", 1) return True @@ -47,11 +47,11 @@ def create_arn_from_cert(account_number, region, certificate_name): :return: """ return "arn:aws:iam::{account_number}:server-certificate/{certificate_name}".format( - account_number=account_number, - certificate_name=certificate_name) + account_number=account_number, certificate_name=certificate_name + ) -@sts_client('iam') +@sts_client("iam") @retry(retry_on_exception=retry_throttled, wait_fixed=2000) def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): """ @@ -65,12 +65,12 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): :return: """ assert isinstance(private_key, str) - client = kwargs.pop('client') + client = kwargs.pop("client") - if not path or path == '/': - path = '/' + if not path or path == "/": + path = "/" else: - name = name + '-' + path.strip('/') + name = name + "-" + path.strip("/") try: if cert_chain: @@ -79,21 +79,21 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): ServerCertificateName=name, CertificateBody=str(body), PrivateKey=str(private_key), - CertificateChain=str(cert_chain) + CertificateChain=str(cert_chain), ) else: return client.upload_server_certificate( Path=path, ServerCertificateName=name, CertificateBody=str(body), - PrivateKey=str(private_key) + PrivateKey=str(private_key), ) except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] != 'EntityAlreadyExists': + if e.response["Error"]["Code"] != "EntityAlreadyExists": raise e -@sts_client('iam') +@sts_client("iam") @retry(retry_on_exception=retry_throttled, wait_fixed=2000) def delete_cert(cert_name, **kwargs): """ @@ -102,15 +102,15 @@ def delete_cert(cert_name, **kwargs): :param cert_name: :return: """ - client = kwargs.pop('client') + client = kwargs.pop("client") try: client.delete_server_certificate(ServerCertificateName=cert_name) except botocore.exceptions.ClientError as e: - if e.response['Error']['Code'] != 'NoSuchEntity': + if e.response["Error"]["Code"] != "NoSuchEntity": raise e -@sts_client('iam') +@sts_client("iam") @retry(retry_on_exception=retry_throttled, wait_fixed=2000) def get_certificate(name, **kwargs): """ @@ -118,13 +118,13 @@ def get_certificate(name, **kwargs): :return: """ - client = kwargs.pop('client') - return client.get_server_certificate( - ServerCertificateName=name - )['ServerCertificate'] + client = kwargs.pop("client") + return client.get_server_certificate(ServerCertificateName=name)[ + "ServerCertificate" + ] -@sts_client('iam') +@sts_client("iam") @retry(retry_on_exception=retry_throttled, wait_fixed=2000) def get_certificates(**kwargs): """ @@ -132,7 +132,7 @@ def get_certificates(**kwargs): :param kwargs: :return: """ - client = kwargs.pop('client') + client = kwargs.pop("client") return client.list_server_certificates(**kwargs) @@ -141,16 +141,20 @@ def get_all_certificates(**kwargs): Use STS to fetch all of the SSL certificates from a given account """ certificates = [] - account_number = kwargs.get('account_number') + account_number = kwargs.get("account_number") while True: response = get_certificates(**kwargs) - metadata = response['ServerCertificateMetadataList'] + metadata = response["ServerCertificateMetadataList"] for m in metadata: - certificates.append(get_certificate(m['ServerCertificateName'], account_number=account_number)) + certificates.append( + get_certificate( + m["ServerCertificateName"], account_number=account_number + ) + ) - if not response.get('Marker'): + if not response.get("Marker"): return certificates else: - kwargs.update(dict(Marker=response['Marker'])) + kwargs.update(dict(Marker=response["Marker"])) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 41bec31c..4414a62c 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -40,7 +40,7 @@ from lemur.plugins.lemur_aws import iam, s3, elb, ec2 def get_region_from_dns(dns): - return dns.split('.')[-4] + return dns.split(".")[-4] def format_elb_cipher_policy_v2(policy): @@ -52,10 +52,10 @@ def format_elb_cipher_policy_v2(policy): ciphers = [] name = None - for descr in policy['SslPolicies']: - name = descr['Name'] - for cipher in descr['Ciphers']: - ciphers.append(cipher['Name']) + for descr in policy["SslPolicies"]: + name = descr["Name"] + for cipher in descr["Ciphers"]: + ciphers.append(cipher["Name"]) return dict(name=name, ciphers=ciphers) @@ -68,14 +68,14 @@ def format_elb_cipher_policy(policy): """ ciphers = [] name = None - for descr in policy['PolicyDescriptions']: - for attr in descr['PolicyAttributeDescriptions']: - if attr['AttributeName'] == 'Reference-Security-Policy': - name = attr['AttributeValue'] + for descr in policy["PolicyDescriptions"]: + for attr in descr["PolicyAttributeDescriptions"]: + if attr["AttributeName"] == "Reference-Security-Policy": + name = attr["AttributeValue"] continue - if attr['AttributeValue'] == 'true': - ciphers.append(attr['AttributeName']) + if attr["AttributeValue"] == "true": + ciphers.append(attr["AttributeName"]) return dict(name=name, ciphers=ciphers) @@ -89,25 +89,31 @@ def get_elb_endpoints(account_number, region, elb_dict): :return: """ endpoints = [] - for listener in elb_dict['ListenerDescriptions']: - if not listener['Listener'].get('SSLCertificateId'): + for listener in elb_dict["ListenerDescriptions"]: + if not listener["Listener"].get("SSLCertificateId"): continue - if listener['Listener']['SSLCertificateId'] == 'Invalid-Certificate': + if listener["Listener"]["SSLCertificateId"] == "Invalid-Certificate": continue endpoint = dict( - name=elb_dict['LoadBalancerName'], - dnsname=elb_dict['DNSName'], - type='elb', - port=listener['Listener']['LoadBalancerPort'], - certificate_name=iam.get_name_from_arn(listener['Listener']['SSLCertificateId']) + name=elb_dict["LoadBalancerName"], + dnsname=elb_dict["DNSName"], + type="elb", + port=listener["Listener"]["LoadBalancerPort"], + certificate_name=iam.get_name_from_arn( + listener["Listener"]["SSLCertificateId"] + ), ) - if listener['PolicyNames']: - policy = elb.describe_load_balancer_policies(elb_dict['LoadBalancerName'], listener['PolicyNames'], - account_number=account_number, region=region) - endpoint['policy'] = format_elb_cipher_policy(policy) + if listener["PolicyNames"]: + policy = elb.describe_load_balancer_policies( + elb_dict["LoadBalancerName"], + listener["PolicyNames"], + account_number=account_number, + region=region, + ) + endpoint["policy"] = format_elb_cipher_policy(policy) current_app.logger.debug("Found new endpoint. Endpoint: {}".format(endpoint)) @@ -125,24 +131,29 @@ def get_elb_endpoints_v2(account_number, region, elb_dict): :return: """ endpoints = [] - listeners = elb.describe_listeners_v2(account_number=account_number, region=region, - LoadBalancerArn=elb_dict['LoadBalancerArn']) - for listener in listeners['Listeners']: - if not listener.get('Certificates'): + listeners = elb.describe_listeners_v2( + account_number=account_number, + region=region, + LoadBalancerArn=elb_dict["LoadBalancerArn"], + ) + for listener in listeners["Listeners"]: + if not listener.get("Certificates"): continue - for certificate in listener['Certificates']: + for certificate in listener["Certificates"]: endpoint = dict( - name=elb_dict['LoadBalancerName'], - dnsname=elb_dict['DNSName'], - type='elbv2', - port=listener['Port'], - certificate_name=iam.get_name_from_arn(certificate['CertificateArn']) + name=elb_dict["LoadBalancerName"], + dnsname=elb_dict["DNSName"], + type="elbv2", + port=listener["Port"], + certificate_name=iam.get_name_from_arn(certificate["CertificateArn"]), ) - if listener['SslPolicy']: - policy = elb.describe_ssl_policies_v2([listener['SslPolicy']], account_number=account_number, region=region) - endpoint['policy'] = format_elb_cipher_policy_v2(policy) + if listener["SslPolicy"]: + policy = elb.describe_ssl_policies_v2( + [listener["SslPolicy"]], account_number=account_number, region=region + ) + endpoint["policy"] = format_elb_cipher_policy_v2(policy) endpoints.append(endpoint) @@ -150,54 +161,70 @@ def get_elb_endpoints_v2(account_number, region, elb_dict): class AWSSourcePlugin(SourcePlugin): - title = 'AWS' - slug = 'aws-source' - description = 'Discovers all SSL certificates and ELB endpoints in an AWS account' + title = "AWS" + slug = "aws-source" + description = "Discovers all SSL certificates and ELB endpoints in an AWS account" version = aws.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur" options = [ { - 'name': 'accountNumber', - 'type': 'str', - 'required': True, - 'validation': '/^[0-9]{12,12}$/', - 'helpMessage': 'Must be a valid AWS account number!', + "name": "accountNumber", + "type": "str", + "required": True, + "validation": "/^[0-9]{12,12}$/", + "helpMessage": "Must be a valid AWS account number!", }, { - 'name': 'regions', - 'type': 'str', - 'helpMessage': 'Comma separated list of regions to search in, if no region is specified we look in all regions.' + "name": "regions", + "type": "str", + "helpMessage": "Comma separated list of regions to search in, if no region is specified we look in all regions.", }, ] def get_certificates(self, options, **kwargs): - cert_data = iam.get_all_certificates(account_number=self.get_option('accountNumber', options)) - return [dict(body=c['CertificateBody'], chain=c.get('CertificateChain'), - name=c['ServerCertificateMetadata']['ServerCertificateName']) for c in cert_data] + cert_data = iam.get_all_certificates( + account_number=self.get_option("accountNumber", options) + ) + return [ + dict( + body=c["CertificateBody"], + chain=c.get("CertificateChain"), + name=c["ServerCertificateMetadata"]["ServerCertificateName"], + ) + for c in cert_data + ] def get_endpoints(self, options, **kwargs): endpoints = [] - account_number = self.get_option('accountNumber', options) - regions = self.get_option('regions', options) + account_number = self.get_option("accountNumber", options) + regions = self.get_option("regions", options) if not regions: regions = ec2.get_regions(account_number=account_number) else: - regions = regions.split(',') + regions = regions.split(",") for region in regions: elbs = elb.get_all_elbs(account_number=account_number, region=region) - current_app.logger.info("Describing classic load balancers in {0}-{1}".format(account_number, region)) + current_app.logger.info( + "Describing classic load balancers in {0}-{1}".format( + account_number, region + ) + ) for e in elbs: endpoints.extend(get_elb_endpoints(account_number, region, e)) # fetch advanced ELBs elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region) - current_app.logger.info("Describing advanced load balancers in {0}-{1}".format(account_number, region)) + current_app.logger.info( + "Describing advanced load balancers in {0}-{1}".format( + account_number, region + ) + ) for e in elbs_v2: endpoints.extend(get_elb_endpoints_v2(account_number, region, e)) @@ -206,106 +233,125 @@ class AWSSourcePlugin(SourcePlugin): def update_endpoint(self, endpoint, certificate): options = endpoint.source.options - account_number = self.get_option('accountNumber', options) + account_number = self.get_option("accountNumber", options) # relies on the fact that region is included in DNS name region = get_region_from_dns(endpoint.dnsname) arn = iam.create_arn_from_cert(account_number, region, certificate.name) - if endpoint.type == 'elbv2': - listener_arn = elb.get_listener_arn_from_endpoint(endpoint.name, endpoint.port, - account_number=account_number, region=region) - elb.attach_certificate_v2(listener_arn, endpoint.port, [{'CertificateArn': arn}], - account_number=account_number, region=region) + if endpoint.type == "elbv2": + listener_arn = elb.get_listener_arn_from_endpoint( + endpoint.name, + endpoint.port, + account_number=account_number, + region=region, + ) + elb.attach_certificate_v2( + listener_arn, + endpoint.port, + [{"CertificateArn": arn}], + account_number=account_number, + region=region, + ) else: - elb.attach_certificate(endpoint.name, endpoint.port, arn, account_number=account_number, region=region) + elb.attach_certificate( + endpoint.name, + endpoint.port, + arn, + account_number=account_number, + region=region, + ) def clean(self, certificate, options, **kwargs): - account_number = self.get_option('accountNumber', options) + account_number = self.get_option("accountNumber", options) iam.delete_cert(certificate.name, account_number=account_number) class AWSDestinationPlugin(DestinationPlugin): - title = 'AWS' - slug = 'aws-destination' - description = 'Allow the uploading of certificates to AWS IAM' + title = "AWS" + slug = "aws-destination" + description = "Allow the uploading of certificates to AWS IAM" version = aws.VERSION sync_as_source = True sync_as_source_name = AWSSourcePlugin.slug - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur" options = [ { - 'name': 'accountNumber', - 'type': 'str', - 'required': True, - 'validation': '[0-9]{12}', - 'helpMessage': 'Must be a valid AWS account number!', + "name": "accountNumber", + "type": "str", + "required": True, + "validation": "[0-9]{12}", + "helpMessage": "Must be a valid AWS account number!", }, { - 'name': 'path', - 'type': 'str', - 'default': '/', - 'helpMessage': 'Path to upload certificate.' - } + "name": "path", + "type": "str", + "default": "/", + "helpMessage": "Path to upload certificate.", + }, ] def upload(self, name, body, private_key, cert_chain, options, **kwargs): - iam.upload_cert(name, body, private_key, - self.get_option('path', options), - cert_chain=cert_chain, - account_number=self.get_option('accountNumber', options)) + iam.upload_cert( + name, + body, + private_key, + self.get_option("path", options), + cert_chain=cert_chain, + account_number=self.get_option("accountNumber", options), + ) def deploy(self, elb_name, account, region, certificate): pass class S3DestinationPlugin(ExportDestinationPlugin): - title = 'AWS-S3' - slug = 'aws-s3' - description = 'Allow the uploading of certificates to Amazon S3' + title = "AWS-S3" + slug = "aws-s3" + description = "Allow the uploading of certificates to Amazon S3" - author = 'Mikhail Khodorovskiy, Harm Weites ' - author_url = 'https://github.com/Netflix/lemur' + author = "Mikhail Khodorovskiy, Harm Weites " + author_url = "https://github.com/Netflix/lemur" additional_options = [ { - 'name': 'bucket', - 'type': 'str', - 'required': True, - 'validation': '[0-9a-z.-]{3,63}', - 'helpMessage': 'Must be a valid S3 bucket name!', + "name": "bucket", + "type": "str", + "required": True, + "validation": "[0-9a-z.-]{3,63}", + "helpMessage": "Must be a valid S3 bucket name!", }, { - 'name': 'accountNumber', - 'type': 'str', - 'required': True, - 'validation': '[0-9]{12}', - 'helpMessage': 'A valid AWS account number with permission to access S3', + "name": "accountNumber", + "type": "str", + "required": True, + "validation": "[0-9]{12}", + "helpMessage": "A valid AWS account number with permission to access S3", }, { - 'name': 'region', - 'type': 'str', - 'default': 'us-east-1', - 'required': False, - 'helpMessage': 'Region bucket exists', - 'available': ['us-east-1', 'us-west-2', 'eu-west-1'] + "name": "region", + "type": "str", + "default": "us-east-1", + "required": False, + "helpMessage": "Region bucket exists", + "available": ["us-east-1", "us-west-2", "eu-west-1"], }, { - 'name': 'encrypt', - 'type': 'bool', - 'required': False, - 'helpMessage': 'Enable server side encryption', - 'default': True + "name": "encrypt", + "type": "bool", + "required": False, + "helpMessage": "Enable server side encryption", + "default": True, }, { - 'name': 'prefix', - 'type': 'str', - 'required': False, - 'helpMessage': 'Must be a valid S3 object prefix!', - } + "name": "prefix", + "type": "str", + "required": False, + "helpMessage": "Must be a valid S3 object prefix!", + }, ] def __init__(self, *args, **kwargs): @@ -316,13 +362,12 @@ class S3DestinationPlugin(ExportDestinationPlugin): for ext, passphrase, data in files: s3.put( - self.get_option('bucket', options), - self.get_option('region', options), - '{prefix}/{name}.{extension}'.format( - prefix=self.get_option('prefix', options), - name=name, - extension=ext), + self.get_option("bucket", options), + self.get_option("region", options), + "{prefix}/{name}.{extension}".format( + prefix=self.get_option("prefix", options), name=name, extension=ext + ), data, - self.get_option('encrypt', options), - account_number=self.get_option('accountNumber', options) + self.get_option("encrypt", options), + account_number=self.get_option("accountNumber", options), ) diff --git a/lemur/plugins/lemur_aws/s3.py b/lemur/plugins/lemur_aws/s3.py index 2f8983e5..43faa28f 100644 --- a/lemur/plugins/lemur_aws/s3.py +++ b/lemur/plugins/lemur_aws/s3.py @@ -10,28 +10,26 @@ from flask import current_app from .sts import sts_client -@sts_client('s3', service_type='resource') +@sts_client("s3", service_type="resource") def put(bucket_name, region, prefix, data, encrypt, **kwargs): """ Use STS to write to an S3 bucket """ - bucket = kwargs['resource'].Bucket(bucket_name) - current_app.logger.debug('Persisting data to S3. Bucket: {0} Prefix: {1}'.format(bucket_name, prefix)) + bucket = kwargs["resource"].Bucket(bucket_name) + current_app.logger.debug( + "Persisting data to S3. Bucket: {0} Prefix: {1}".format(bucket_name, prefix) + ) # get data ready for writing if isinstance(data, str): - data = data.encode('utf-8') + data = data.encode("utf-8") if encrypt: bucket.put_object( Key=prefix, Body=data, - ACL='bucket-owner-full-control', - ServerSideEncryption='AES256' + ACL="bucket-owner-full-control", + ServerSideEncryption="AES256", ) else: - bucket.put_object( - Key=prefix, - Body=data, - ACL='bucket-owner-full-control' - ) + bucket.put_object(Key=prefix, Body=data, ACL="bucket-owner-full-control") diff --git a/lemur/plugins/lemur_aws/sts.py b/lemur/plugins/lemur_aws/sts.py index 6253ad7a..c1bd562c 100644 --- a/lemur/plugins/lemur_aws/sts.py +++ b/lemur/plugins/lemur_aws/sts.py @@ -13,46 +13,42 @@ from botocore.config import Config from flask import current_app -config = Config( - retries=dict( - max_attempts=20 - ) -) +config = Config(retries=dict(max_attempts=20)) -def sts_client(service, service_type='client'): +def sts_client(service, service_type="client"): def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): - sts = boto3.client('sts', config=config) - arn = 'arn:aws:iam::{0}:role/{1}'.format( - kwargs.pop('account_number'), - current_app.config.get('LEMUR_INSTANCE_PROFILE', 'Lemur') + sts = boto3.client("sts", config=config) + arn = "arn:aws:iam::{0}:role/{1}".format( + kwargs.pop("account_number"), + current_app.config.get("LEMUR_INSTANCE_PROFILE", "Lemur"), ) # TODO add user specific information to RoleSessionName - role = sts.assume_role(RoleArn=arn, RoleSessionName='lemur') + role = sts.assume_role(RoleArn=arn, RoleSessionName="lemur") - if service_type == 'client': + if service_type == "client": client = boto3.client( service, - region_name=kwargs.pop('region', 'us-east-1'), - aws_access_key_id=role['Credentials']['AccessKeyId'], - aws_secret_access_key=role['Credentials']['SecretAccessKey'], - aws_session_token=role['Credentials']['SessionToken'], - config=config + region_name=kwargs.pop("region", "us-east-1"), + aws_access_key_id=role["Credentials"]["AccessKeyId"], + aws_secret_access_key=role["Credentials"]["SecretAccessKey"], + aws_session_token=role["Credentials"]["SessionToken"], + config=config, ) - kwargs['client'] = client - elif service_type == 'resource': + kwargs["client"] = client + elif service_type == "resource": resource = boto3.resource( service, - region_name=kwargs.pop('region', 'us-east-1'), - aws_access_key_id=role['Credentials']['AccessKeyId'], - aws_secret_access_key=role['Credentials']['SecretAccessKey'], - aws_session_token=role['Credentials']['SessionToken'], - config=config + region_name=kwargs.pop("region", "us-east-1"), + aws_access_key_id=role["Credentials"]["AccessKeyId"], + aws_secret_access_key=role["Credentials"]["SecretAccessKey"], + aws_session_token=role["Credentials"]["SessionToken"], + config=config, ) - kwargs['resource'] = resource + kwargs["resource"] = resource return f(*args, **kwargs) return decorated_function diff --git a/lemur/plugins/lemur_aws/tests/test_elb.py b/lemur/plugins/lemur_aws/tests/test_elb.py index 7facc4dd..4571b87a 100644 --- a/lemur/plugins/lemur_aws/tests/test_elb.py +++ b/lemur/plugins/lemur_aws/tests/test_elb.py @@ -6,23 +6,24 @@ from moto import mock_sts, mock_elb @mock_elb() def test_get_all_elbs(app, aws_credentials): from lemur.plugins.lemur_aws.elb import get_all_elbs - client = boto3.client('elb', region_name='us-east-1') - elbs = get_all_elbs(account_number='123456789012', region='us-east-1') + client = boto3.client("elb", region_name="us-east-1") + + elbs = get_all_elbs(account_number="123456789012", region="us-east-1") assert not elbs client.create_load_balancer( - LoadBalancerName='example-lb', + LoadBalancerName="example-lb", Listeners=[ { - 'Protocol': 'string', - 'LoadBalancerPort': 443, - 'InstanceProtocol': 'tcp', - 'InstancePort': 5443, - 'SSLCertificateId': 'tcp' + "Protocol": "string", + "LoadBalancerPort": 443, + "InstanceProtocol": "tcp", + "InstancePort": 5443, + "SSLCertificateId": "tcp", } - ] + ], ) - elbs = get_all_elbs(account_number='123456789012', region='us-east-1') + elbs = get_all_elbs(account_number="123456789012", region="us-east-1") assert elbs diff --git a/lemur/plugins/lemur_aws/tests/test_iam.py b/lemur/plugins/lemur_aws/tests/test_iam.py index deec221e..5932d52d 100644 --- a/lemur/plugins/lemur_aws/tests/test_iam.py +++ b/lemur/plugins/lemur_aws/tests/test_iam.py @@ -6,15 +6,21 @@ from lemur.tests.vectors import EXTERNAL_VALID_STR, SAN_CERT_KEY def test_get_name_from_arn(): from lemur.plugins.lemur_aws.iam import get_name_from_arn - arn = 'arn:aws:iam::123456789012:server-certificate/tttt2.netflixtest.net-NetflixInc-20150624-20150625' - assert get_name_from_arn(arn) == 'tttt2.netflixtest.net-NetflixInc-20150624-20150625' + + arn = "arn:aws:iam::123456789012:server-certificate/tttt2.netflixtest.net-NetflixInc-20150624-20150625" + assert ( + get_name_from_arn(arn) == "tttt2.netflixtest.net-NetflixInc-20150624-20150625" + ) -@pytest.mark.skipif(True, reason="this fails because moto is not currently returning what boto does") +@pytest.mark.skipif( + True, reason="this fails because moto is not currently returning what boto does" +) @mock_sts() @mock_iam() def test_get_all_server_certs(app): from lemur.plugins.lemur_aws.iam import upload_cert, get_all_certificates - upload_cert('123456789012', 'testCert', EXTERNAL_VALID_STR, SAN_CERT_KEY) - certs = get_all_certificates('123456789012') + + upload_cert("123456789012", "testCert", EXTERNAL_VALID_STR, SAN_CERT_KEY) + certs = get_all_certificates("123456789012") assert len(certs) == 1 diff --git a/lemur/plugins/lemur_aws/tests/test_plugin.py b/lemur/plugins/lemur_aws/tests/test_plugin.py index 95e4c9a4..dbad7b02 100644 --- a/lemur/plugins/lemur_aws/tests/test_plugin.py +++ b/lemur/plugins/lemur_aws/tests/test_plugin.py @@ -1,6 +1,5 @@ - def test_get_certificates(app): from lemur.plugins.base import plugins - p = plugins.get('aws-s3') + p = plugins.get("aws-s3") assert p diff --git a/lemur/plugins/lemur_cfssl/__init__.py b/lemur/plugins/lemur_cfssl/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_cfssl/__init__.py +++ b/lemur/plugins/lemur_cfssl/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_cfssl/plugin.py b/lemur/plugins/lemur_cfssl/plugin.py index 4bfefc85..ae16d168 100644 --- a/lemur/plugins/lemur_cfssl/plugin.py +++ b/lemur/plugins/lemur_cfssl/plugin.py @@ -24,13 +24,13 @@ from lemur.extensions import metrics class CfsslIssuerPlugin(IssuerPlugin): - title = 'CFSSL' - slug = 'cfssl-issuer' - description = 'Enables the creation of certificates by CFSSL private CA' + title = "CFSSL" + slug = "cfssl-issuer" + description = "Enables the creation of certificates by CFSSL private CA" version = cfssl.VERSION - author = 'Charles Hendrie' - author_url = 'https://github.com/netflix/lemur.git' + author = "Charles Hendrie" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): self.session = requests.Session() @@ -44,15 +44,17 @@ class CfsslIssuerPlugin(IssuerPlugin): :param issuer_options: :return: """ - current_app.logger.info("Requesting a new cfssl certificate with csr: {0}".format(csr)) + current_app.logger.info( + "Requesting a new cfssl certificate with csr: {0}".format(csr) + ) - url = "{0}{1}".format(current_app.config.get('CFSSL_URL'), '/api/v1/cfssl/sign') + url = "{0}{1}".format(current_app.config.get("CFSSL_URL"), "/api/v1/cfssl/sign") - data = {'certificate_request': csr} + data = {"certificate_request": csr} data = json.dumps(data) try: - hex_key = current_app.config.get('CFSSL_KEY') + hex_key = current_app.config.get("CFSSL_KEY") key = bytes.fromhex(hex_key) except (ValueError, NameError): # unable to find CFSSL_KEY in config, continue using normal sign method @@ -60,22 +62,33 @@ class CfsslIssuerPlugin(IssuerPlugin): else: data = data.encode() - token = base64.b64encode(hmac.new(key, data, digestmod=hashlib.sha256).digest()) + token = base64.b64encode( + hmac.new(key, data, digestmod=hashlib.sha256).digest() + ) data = base64.b64encode(data) - data = json.dumps({'token': token.decode('utf-8'), 'request': data.decode('utf-8')}) + data = json.dumps( + {"token": token.decode("utf-8"), "request": data.decode("utf-8")} + ) - url = "{0}{1}".format(current_app.config.get('CFSSL_URL'), '/api/v1/cfssl/authsign') - response = self.session.post(url, data=data.encode(encoding='utf_8', errors='strict')) + url = "{0}{1}".format( + current_app.config.get("CFSSL_URL"), "/api/v1/cfssl/authsign" + ) + response = self.session.post( + url, data=data.encode(encoding="utf_8", errors="strict") + ) if response.status_code > 399: - metrics.send('cfssl_create_certificate_failure', 'counter', 1) - raise Exception( - "Error creating cert. Please check your CFSSL API server") - response_json = json.loads(response.content.decode('utf_8')) - cert = response_json['result']['certificate'] + metrics.send("cfssl_create_certificate_failure", "counter", 1) + raise Exception("Error creating cert. Please check your CFSSL API server") + response_json = json.loads(response.content.decode("utf_8")) + cert = response_json["result"]["certificate"] parsed_cert = parse_certificate(cert) - metrics.send('cfssl_create_certificate_success', 'counter', 1) - return cert, current_app.config.get('CFSSL_INTERMEDIATE'), parsed_cert.serial_number + metrics.send("cfssl_create_certificate_success", "counter", 1) + return ( + cert, + current_app.config.get("CFSSL_INTERMEDIATE"), + parsed_cert.serial_number, + ) @staticmethod def create_authority(options): @@ -86,22 +99,26 @@ class CfsslIssuerPlugin(IssuerPlugin): :param options: :return: """ - role = {'username': '', 'password': '', 'name': 'cfssl'} - return current_app.config.get('CFSSL_ROOT'), "", [role] + role = {"username": "", "password": "", "name": "cfssl"} + return current_app.config.get("CFSSL_ROOT"), "", [role] def revoke_certificate(self, certificate, comments): """Revoke a CFSSL certificate.""" - base_url = current_app.config.get('CFSSL_URL') - create_url = '{0}/api/v1/cfssl/revoke'.format(base_url) - data = '{"serial": "' + certificate.external_id + '","authority_key_id": "' + \ - get_authority_key(certificate.body) + \ - '", "reason": "superseded"}' + base_url = current_app.config.get("CFSSL_URL") + create_url = "{0}/api/v1/cfssl/revoke".format(base_url) + data = ( + '{"serial": "' + + certificate.external_id + + '","authority_key_id": "' + + get_authority_key(certificate.body) + + '", "reason": "superseded"}' + ) current_app.logger.debug("Revoking cert: {0}".format(data)) response = self.session.post( - create_url, data=data.encode(encoding='utf_8', errors='strict')) + create_url, data=data.encode(encoding="utf_8", errors="strict") + ) if response.status_code > 399: - metrics.send('cfssl_revoke_certificate_failure', 'counter', 1) - raise Exception( - "Error revoking cert. Please check your CFSSL API server") - metrics.send('cfssl_revoke_certificate_success', 'counter', 1) + metrics.send("cfssl_revoke_certificate_failure", "counter", 1) + raise Exception("Error revoking cert. Please check your CFSSL API server") + metrics.send("cfssl_revoke_certificate_success", "counter", 1) return response.json() diff --git a/lemur/plugins/lemur_cfssl/tests/test_cfssl.py b/lemur/plugins/lemur_cfssl/tests/test_cfssl.py index ea8f0856..10fb9963 100644 --- a/lemur/plugins/lemur_cfssl/tests/test_cfssl.py +++ b/lemur/plugins/lemur_cfssl/tests/test_cfssl.py @@ -1,6 +1,5 @@ - def test_get_certificates(app): from lemur.plugins.base import plugins - p = plugins.get('cfssl-issuer') + p = plugins.get("cfssl-issuer") assert p diff --git a/lemur/plugins/lemur_cryptography/__init__.py b/lemur/plugins/lemur_cryptography/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_cryptography/__init__.py +++ b/lemur/plugins/lemur_cryptography/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_cryptography/plugin.py b/lemur/plugins/lemur_cryptography/plugin.py index 97060391..005f36f9 100644 --- a/lemur/plugins/lemur_cryptography/plugin.py +++ b/lemur/plugins/lemur_cryptography/plugin.py @@ -22,7 +22,7 @@ from lemur.certificates.service import create_csr def build_certificate_authority(options): - options['certificate_authority'] = True + options["certificate_authority"] = True csr, private_key = create_csr(**options) cert_pem, chain_cert_pem = issue_certificate(csr, options, private_key) @@ -30,24 +30,32 @@ def build_certificate_authority(options): def issue_certificate(csr, options, private_key=None): - csr = x509.load_pem_x509_csr(csr.encode('utf-8'), default_backend()) + csr = x509.load_pem_x509_csr(csr.encode("utf-8"), default_backend()) if options.get("parent"): # creating intermediate authorities will have options['parent'] to specify the issuer # creating certificates will have options['authority'] to specify the issuer # This works around that by making sure options['authority'] can be referenced for either - options['authority'] = options['parent'] + options["authority"] = options["parent"] if options.get("authority"): # Issue certificate signed by an existing lemur_certificates authority - issuer_subject = options['authority'].authority_certificate.subject - assert private_key is None, "Private would be ignored, authority key used instead" - private_key = options['authority'].authority_certificate.private_key - chain_cert_pem = options['authority'].authority_certificate.body - authority_key_identifier_public = options['authority'].authority_certificate.public_key - authority_key_identifier_subject = x509.SubjectKeyIdentifier.from_public_key(authority_key_identifier_public) + issuer_subject = options["authority"].authority_certificate.subject + assert ( + private_key is None + ), "Private would be ignored, authority key used instead" + private_key = options["authority"].authority_certificate.private_key + chain_cert_pem = options["authority"].authority_certificate.body + authority_key_identifier_public = options[ + "authority" + ].authority_certificate.public_key + authority_key_identifier_subject = x509.SubjectKeyIdentifier.from_public_key( + authority_key_identifier_public + ) authority_key_identifier_issuer = issuer_subject - authority_key_identifier_serial = int(options['authority'].authority_certificate.serial) + authority_key_identifier_serial = int( + options["authority"].authority_certificate.serial + ) # TODO figure out a better way to increment serial # New authorities have a value at options['serial_number'] that is being ignored here. serial = int(uuid.uuid4()) @@ -58,7 +66,7 @@ def issue_certificate(csr, options, private_key=None): authority_key_identifier_public = csr.public_key() authority_key_identifier_subject = None authority_key_identifier_issuer = csr.subject - authority_key_identifier_serial = options['serial_number'] + authority_key_identifier_serial = options["serial_number"] # TODO figure out a better way to increment serial serial = int(uuid.uuid4()) @@ -68,19 +76,20 @@ def issue_certificate(csr, options, private_key=None): issuer_name=issuer_subject, subject_name=csr.subject, public_key=csr.public_key(), - not_valid_before=options['validity_start'], - not_valid_after=options['validity_end'], + not_valid_before=options["validity_start"], + not_valid_after=options["validity_end"], serial_number=serial, - extensions=extensions) + extensions=extensions, + ) - for k, v in options.get('extensions', {}).items(): - if k == 'authority_key_identifier': + for k, v in options.get("extensions", {}).items(): + if k == "authority_key_identifier": # One or both of these options may be present inside the aki extension (authority_key_identifier, authority_identifier) = (False, False) for k2, v2 in v.items(): - if k2 == 'use_key_identifier' and v2: + if k2 == "use_key_identifier" and v2: authority_key_identifier = True - if k2 == 'use_authority_cert' and v2: + if k2 == "use_authority_cert" and v2: authority_identifier = True if authority_key_identifier: if authority_key_identifier_subject: @@ -89,13 +98,21 @@ def issue_certificate(csr, options, private_key=None): # but the digest of the ski is at just ski.digest. Until that library is fixed, # this function won't work. The second line has the same result. # aki = x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(authority_key_identifier_subject) - aki = x509.AuthorityKeyIdentifier(authority_key_identifier_subject.digest, None, None) + aki = x509.AuthorityKeyIdentifier( + authority_key_identifier_subject.digest, None, None + ) else: - aki = x509.AuthorityKeyIdentifier.from_issuer_public_key(authority_key_identifier_public) + aki = x509.AuthorityKeyIdentifier.from_issuer_public_key( + authority_key_identifier_public + ) elif authority_identifier: - aki = x509.AuthorityKeyIdentifier(None, [x509.DirectoryName(authority_key_identifier_issuer)], authority_key_identifier_serial) + aki = x509.AuthorityKeyIdentifier( + None, + [x509.DirectoryName(authority_key_identifier_issuer)], + authority_key_identifier_serial, + ) builder = builder.add_extension(aki, critical=False) - if k == 'certificate_info_access': + if k == "certificate_info_access": # FIXME: Implement the AuthorityInformationAccess extension # descriptions = [ # x509.AccessDescription(x509.oid.AuthorityInformationAccessOID.OCSP, x509.UniformResourceIdentifier(u"http://FIXME")), @@ -108,7 +125,7 @@ def issue_certificate(csr, options, private_key=None): # critical=False # ) pass - if k == 'crl_distribution_points': + if k == "crl_distribution_points": # FIXME: Implement the CRLDistributionPoints extension # FIXME: Not implemented in lemur/schemas.py yet https://github.com/Netflix/lemur/issues/662 pass @@ -116,20 +133,24 @@ def issue_certificate(csr, options, private_key=None): private_key = parse_private_key(private_key) cert = builder.sign(private_key, hashes.SHA256(), default_backend()) - cert_pem = cert.public_bytes( - encoding=serialization.Encoding.PEM - ).decode('utf-8') + cert_pem = cert.public_bytes(encoding=serialization.Encoding.PEM).decode("utf-8") return cert_pem, chain_cert_pem def normalize_extensions(csr): try: - san_extension = csr.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME) + san_extension = csr.extensions.get_extension_for_oid( + x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME + ) san_dnsnames = san_extension.value.get_values_for_type(x509.DNSName) except x509.extensions.ExtensionNotFound: san_dnsnames = [] - san_extension = x509.Extension(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME, True, x509.SubjectAlternativeName(san_dnsnames)) + san_extension = x509.Extension( + x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME, + True, + x509.SubjectAlternativeName(san_dnsnames), + ) common_name = csr.subject.get_attributes_for_oid(x509.oid.NameOID.COMMON_NAME) common_name = common_name[0].value @@ -149,7 +170,11 @@ def normalize_extensions(csr): for san in san_extension.value: general_names.append(san) - san_extension = x509.Extension(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME, True, x509.SubjectAlternativeName(general_names)) + san_extension = x509.Extension( + x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME, + True, + x509.SubjectAlternativeName(general_names), + ) # Remove original san extension from CSR and add new SAN extension extensions = list(filter(filter_san_extensions, csr.extensions._extensions)) @@ -166,13 +191,13 @@ def filter_san_extensions(ext): class CryptographyIssuerPlugin(IssuerPlugin): - title = 'Cryptography' - slug = 'cryptography-issuer' - description = 'Enables the creation and signing of self-signed certificates' + title = "Cryptography" + slug = "cryptography-issuer" + description = "Enables the creation and signing of self-signed certificates" version = cryptography_issuer.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def create_certificate(self, csr, options): """ @@ -182,7 +207,9 @@ class CryptographyIssuerPlugin(IssuerPlugin): :param options: :return: :raise Exception: """ - current_app.logger.debug("Issuing new cryptography certificate with options: {0}".format(options)) + current_app.logger.debug( + "Issuing new cryptography certificate with options: {0}".format(options) + ) cert_pem, chain_cert_pem = issue_certificate(csr, options) return cert_pem, chain_cert_pem, None @@ -195,10 +222,12 @@ class CryptographyIssuerPlugin(IssuerPlugin): :param options: :return: """ - current_app.logger.debug("Issuing new cryptography authority with options: {0}".format(options)) + current_app.logger.debug( + "Issuing new cryptography authority with options: {0}".format(options) + ) cert_pem, private_key, chain_cert_pem = build_certificate_authority(options) roles = [ - {'username': '', 'password': '', 'name': options['name'] + '_admin'}, - {'username': '', 'password': '', 'name': options['name'] + '_operator'} + {"username": "", "password": "", "name": options["name"] + "_admin"}, + {"username": "", "password": "", "name": options["name"] + "_operator"}, ] return cert_pem, private_key, chain_cert_pem, roles diff --git a/lemur/plugins/lemur_cryptography/tests/test_cryptography.py b/lemur/plugins/lemur_cryptography/tests/test_cryptography.py index 8a81bf6c..7f1777fc 100644 --- a/lemur/plugins/lemur_cryptography/tests/test_cryptography.py +++ b/lemur/plugins/lemur_cryptography/tests/test_cryptography.py @@ -5,24 +5,24 @@ def test_build_certificate_authority(): from lemur.plugins.lemur_cryptography.plugin import build_certificate_authority options = { - 'key_type': 'RSA2048', - 'country': 'US', - 'state': 'CA', - 'location': 'Example place', - 'organization': 'Example, Inc.', - 'organizational_unit': 'Example Unit', - 'common_name': 'Example ROOT', - 'validity_start': arrow.get('2016-12-01').datetime, - 'validity_end': arrow.get('2016-12-02').datetime, - 'first_serial': 1, - 'serial_number': 1, - 'owner': 'owner@example.com' + "key_type": "RSA2048", + "country": "US", + "state": "CA", + "location": "Example place", + "organization": "Example, Inc.", + "organizational_unit": "Example Unit", + "common_name": "Example ROOT", + "validity_start": arrow.get("2016-12-01").datetime, + "validity_end": arrow.get("2016-12-02").datetime, + "first_serial": 1, + "serial_number": 1, + "owner": "owner@example.com", } cert_pem, private_key_pem, chain_cert_pem = build_certificate_authority(options) assert cert_pem assert private_key_pem - assert chain_cert_pem == '' + assert chain_cert_pem == "" def test_issue_certificate(authority): @@ -30,10 +30,10 @@ def test_issue_certificate(authority): from lemur.plugins.lemur_cryptography.plugin import issue_certificate options = { - 'common_name': 'Example.com', - 'authority': authority, - 'validity_start': arrow.get('2016-12-01').datetime, - 'validity_end': arrow.get('2016-12-02').datetime + "common_name": "Example.com", + "authority": authority, + "validity_start": arrow.get("2016-12-01").datetime, + "validity_end": arrow.get("2016-12-02").datetime, } cert_pem, chain_cert_pem = issue_certificate(CSR_STR, options) assert cert_pem diff --git a/lemur/plugins/lemur_csr/__init__.py b/lemur/plugins/lemur_csr/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_csr/__init__.py +++ b/lemur/plugins/lemur_csr/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_csr/plugin.py b/lemur/plugins/lemur_csr/plugin.py index 13f42084..776dfce5 100644 --- a/lemur/plugins/lemur_csr/plugin.py +++ b/lemur/plugins/lemur_csr/plugin.py @@ -43,38 +43,30 @@ def create_csr(cert, chain, csr_tmp, key): assert isinstance(key, str) with mktempfile() as key_tmp: - with open(key_tmp, 'w') as f: + with open(key_tmp, "w") as f: f.write(key) with mktempfile() as cert_tmp: - with open(cert_tmp, 'w') as f: + with open(cert_tmp, "w") as f: if chain: f.writelines([cert.strip() + "\n", chain.strip() + "\n"]) else: f.writelines([cert.strip() + "\n"]) - output = subprocess.check_output([ - "openssl", - "x509", - "-x509toreq", - "-in", cert_tmp, - "-signkey", key_tmp, - ]) - subprocess.run([ - "openssl", - "req", - "-out", csr_tmp - ], input=output) + output = subprocess.check_output( + ["openssl", "x509", "-x509toreq", "-in", cert_tmp, "-signkey", key_tmp] + ) + subprocess.run(["openssl", "req", "-out", csr_tmp], input=output) class CSRExportPlugin(ExportPlugin): - title = 'CSR' - slug = 'openssl-csr' - description = 'Exports a CSR' + title = "CSR" + slug = "openssl-csr" + description = "Exports a CSR" version = csr.VERSION - author = 'jchuong' - author_url = 'https://github.com/jchuong' + author = "jchuong" + author_url = "https://github.com/jchuong" def export(self, body, chain, key, options, **kwargs): """ @@ -93,7 +85,7 @@ class CSRExportPlugin(ExportPlugin): create_csr(body, chain, output_tmp, key) extension = "csr" - with open(output_tmp, 'rb') as f: + with open(output_tmp, "rb") as f: raw = f.read() # passphrase is None return extension, None, raw diff --git a/lemur/plugins/lemur_csr/tests/test_csr_export.py b/lemur/plugins/lemur_csr/tests/test_csr_export.py index 9b233a4e..0b55aefe 100644 --- a/lemur/plugins/lemur_csr/tests/test_csr_export.py +++ b/lemur/plugins/lemur_csr/tests/test_csr_export.py @@ -4,7 +4,8 @@ from lemur.tests.vectors import INTERNAL_PRIVATE_KEY_A_STR, INTERNAL_CERTIFICATE def test_export_certificate_to_csr(app): from lemur.plugins.base import plugins - p = plugins.get('openssl-csr') + + p = plugins.get("openssl-csr") options = [] with pytest.raises(Exception): p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) diff --git a/lemur/plugins/lemur_digicert/__init__.py b/lemur/plugins/lemur_digicert/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_digicert/__init__.py +++ b/lemur/plugins/lemur_digicert/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_digicert/plugin.py b/lemur/plugins/lemur_digicert/plugin.py index a65c02ff..c5b01cc4 100644 --- a/lemur/plugins/lemur_digicert/plugin.py +++ b/lemur/plugins/lemur_digicert/plugin.py @@ -40,7 +40,7 @@ def log_status_code(r, *args, **kwargs): :param kwargs: :return: """ - metrics.send('digicert_status_code_{}'.format(r.status_code), 'counter', 1) + metrics.send("digicert_status_code_{}".format(r.status_code), "counter", 1) def signature_hash(signing_algorithm): @@ -50,18 +50,18 @@ def signature_hash(signing_algorithm): :return: str digicert specific algorithm string """ if not signing_algorithm: - return current_app.config.get('DIGICERT_DEFAULT_SIGNING_ALGORITHM', 'sha256') + return current_app.config.get("DIGICERT_DEFAULT_SIGNING_ALGORITHM", "sha256") - if signing_algorithm == 'sha256WithRSA': - return 'sha256' + if signing_algorithm == "sha256WithRSA": + return "sha256" - elif signing_algorithm == 'sha384WithRSA': - return 'sha384' + elif signing_algorithm == "sha384WithRSA": + return "sha384" - elif signing_algorithm == 'sha512WithRSA': - return 'sha512' + elif signing_algorithm == "sha512WithRSA": + return "sha512" - raise Exception('Unsupported signing algorithm.') + raise Exception("Unsupported signing algorithm.") def determine_validity_years(end_date): @@ -79,8 +79,9 @@ def determine_validity_years(end_date): elif end_date < now.replace(years=+3): return 3 - raise Exception("DigiCert issued certificates cannot exceed three" - " years in validity") + raise Exception( + "DigiCert issued certificates cannot exceed three" " years in validity" + ) def get_additional_names(options): @@ -92,8 +93,8 @@ def get_additional_names(options): """ names = [] # add SANs if present - if options.get('extensions'): - for san in options['extensions']['sub_alt_names']['names']: + if options.get("extensions"): + for san in options["extensions"]["sub_alt_names"]["names"]: if isinstance(san, x509.DNSName): names.append(san.value) return names @@ -106,31 +107,33 @@ def map_fields(options, csr): :param csr: :return: dict or valid DigiCert options """ - if not options.get('validity_years'): - if not options.get('validity_end'): - options['validity_years'] = current_app.config.get('DIGICERT_DEFAULT_VALIDITY', 1) + if not options.get("validity_years"): + if not options.get("validity_end"): + options["validity_years"] = current_app.config.get( + "DIGICERT_DEFAULT_VALIDITY", 1 + ) - data = dict(certificate={ - "common_name": options['common_name'], - "csr": csr, - "signature_hash": - signature_hash(options.get('signing_algorithm')), - }, organization={ - "id": current_app.config.get("DIGICERT_ORG_ID") - }) + data = dict( + certificate={ + "common_name": options["common_name"], + "csr": csr, + "signature_hash": signature_hash(options.get("signing_algorithm")), + }, + organization={"id": current_app.config.get("DIGICERT_ORG_ID")}, + ) - data['certificate']['dns_names'] = get_additional_names(options) + data["certificate"]["dns_names"] = get_additional_names(options) - if options.get('validity_years'): - data['validity_years'] = options['validity_years'] + if options.get("validity_years"): + data["validity_years"] = options["validity_years"] else: - data['custom_expiration_date'] = options['validity_end'].format('YYYY-MM-DD') + data["custom_expiration_date"] = options["validity_end"].format("YYYY-MM-DD") - if current_app.config.get('DIGICERT_PRIVATE', False): - if 'product' in data: - data['product']['type_hint'] = 'private' + if current_app.config.get("DIGICERT_PRIVATE", False): + if "product" in data: + data["product"]["type_hint"] = "private" else: - data['product'] = dict(type_hint='private') + data["product"] = dict(type_hint="private") return data @@ -143,26 +146,30 @@ def map_cis_fields(options, csr): :param csr: :return: """ - if not options.get('validity_years'): - if not options.get('validity_end'): - options['validity_end'] = arrow.utcnow().replace(years=current_app.config.get('DIGICERT_DEFAULT_VALIDITY', 1)) - options['validity_years'] = determine_validity_years(options['validity_end']) + if not options.get("validity_years"): + if not options.get("validity_end"): + options["validity_end"] = arrow.utcnow().replace( + years=current_app.config.get("DIGICERT_DEFAULT_VALIDITY", 1) + ) + options["validity_years"] = determine_validity_years(options["validity_end"]) else: - options['validity_end'] = arrow.utcnow().replace(years=options['validity_years']) + options["validity_end"] = arrow.utcnow().replace( + years=options["validity_years"] + ) data = { - "profile_name": current_app.config.get('DIGICERT_CIS_PROFILE_NAME'), - "common_name": options['common_name'], + "profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAME"), + "common_name": options["common_name"], "additional_dns_names": get_additional_names(options), "csr": csr, - "signature_hash": signature_hash(options.get('signing_algorithm')), + "signature_hash": signature_hash(options.get("signing_algorithm")), "validity": { - "valid_to": options['validity_end'].format('YYYY-MM-DDTHH:MM') + 'Z' + "valid_to": options["validity_end"].format("YYYY-MM-DDTHH:MM") + "Z" }, "organization": { - "name": options['organization'], - "units": [options['organizational_unit']] - } + "name": options["organization"], + "units": [options["organizational_unit"]], + }, } return data @@ -175,7 +182,7 @@ def handle_response(response): :return: """ if response.status_code > 399: - raise Exception(response.json()['errors'][0]['message']) + raise Exception(response.json()["errors"][0]["message"]) return response.json() @@ -197,19 +204,17 @@ def get_certificate_id(session, base_url, order_id): """Retrieve certificate order id from Digicert API.""" order_url = "{0}/services/v2/order/certificate/{1}".format(base_url, order_id) response_data = handle_response(session.get(order_url)) - if response_data['status'] != 'issued': + if response_data["status"] != "issued": raise Exception("Order not in issued state.") - return response_data['certificate']['id'] + return response_data["certificate"]["id"] @retry(stop_max_attempt_number=10, wait_fixed=10000) def get_cis_certificate(session, base_url, order_id): """Retrieve certificate order id from Digicert API.""" - certificate_url = '{0}/platform/cis/certificate/{1}'.format(base_url, order_id) - session.headers.update( - {'Accept': 'application/x-pem-file'} - ) + certificate_url = "{0}/platform/cis/certificate/{1}".format(base_url, order_id) + session.headers.update({"Accept": "application/x-pem-file"}) response = session.get(certificate_url) if response.status_code == 404: @@ -220,29 +225,30 @@ def get_cis_certificate(session, base_url, order_id): class DigiCertSourcePlugin(SourcePlugin): """Wrap the Digicert Certifcate API.""" - title = 'DigiCert' - slug = 'digicert-source' + + title = "DigiCert" + slug = "digicert-source" description = "Enables the use of Digicert as a source of existing certificates." version = digicert.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): """Initialize source with appropriate details.""" required_vars = [ - 'DIGICERT_API_KEY', - 'DIGICERT_URL', - 'DIGICERT_ORG_ID', - 'DIGICERT_ROOT', + "DIGICERT_API_KEY", + "DIGICERT_URL", + "DIGICERT_ORG_ID", + "DIGICERT_ROOT", ] validate_conf(current_app, required_vars) self.session = requests.Session() self.session.headers.update( { - 'X-DC-DEVKEY': current_app.config['DIGICERT_API_KEY'], - 'Content-Type': 'application/json' + "X-DC-DEVKEY": current_app.config["DIGICERT_API_KEY"], + "Content-Type": "application/json", } ) @@ -256,22 +262,23 @@ class DigiCertSourcePlugin(SourcePlugin): class DigiCertIssuerPlugin(IssuerPlugin): """Wrap the Digicert Issuer API.""" - title = 'DigiCert' - slug = 'digicert-issuer' + + title = "DigiCert" + slug = "digicert-issuer" description = "Enables the creation of certificates by the DigiCert REST API." version = digicert.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): """Initialize the issuer with the appropriate details.""" required_vars = [ - 'DIGICERT_API_KEY', - 'DIGICERT_URL', - 'DIGICERT_ORG_ID', - 'DIGICERT_ORDER_TYPE', - 'DIGICERT_ROOT', + "DIGICERT_API_KEY", + "DIGICERT_URL", + "DIGICERT_ORG_ID", + "DIGICERT_ORDER_TYPE", + "DIGICERT_ROOT", ] validate_conf(current_app, required_vars) @@ -279,8 +286,8 @@ class DigiCertIssuerPlugin(IssuerPlugin): self.session = requests.Session() self.session.headers.update( { - 'X-DC-DEVKEY': current_app.config['DIGICERT_API_KEY'], - 'Content-Type': 'application/json' + "X-DC-DEVKEY": current_app.config["DIGICERT_API_KEY"], + "Content-Type": "application/json", } ) @@ -295,69 +302,93 @@ class DigiCertIssuerPlugin(IssuerPlugin): :param issuer_options: :return: :raise Exception: """ - base_url = current_app.config.get('DIGICERT_URL') - cert_type = current_app.config.get('DIGICERT_ORDER_TYPE') + base_url = current_app.config.get("DIGICERT_URL") + cert_type = current_app.config.get("DIGICERT_ORDER_TYPE") # make certificate request - determinator_url = "{0}/services/v2/order/certificate/{1}".format(base_url, cert_type) + determinator_url = "{0}/services/v2/order/certificate/{1}".format( + base_url, cert_type + ) data = map_fields(issuer_options, csr) response = self.session.post(determinator_url, data=json.dumps(data)) if response.status_code > 399: - raise Exception(response.json()['errors'][0]['message']) + raise Exception(response.json()["errors"][0]["message"]) - order_id = response.json()['id'] + order_id = response.json()["id"] certificate_id = get_certificate_id(self.session, base_url, order_id) # retrieve certificate - certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format(base_url, certificate_id) - end_entity, intermediate, root = pem.parse(self.session.get(certificate_url).content) - return "\n".join(str(end_entity).splitlines()), "\n".join(str(intermediate).splitlines()), certificate_id + certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format( + base_url, certificate_id + ) + end_entity, intermediate, root = pem.parse( + self.session.get(certificate_url).content + ) + return ( + "\n".join(str(end_entity).splitlines()), + "\n".join(str(intermediate).splitlines()), + certificate_id, + ) def revoke_certificate(self, certificate, comments): """Revoke a Digicert certificate.""" - base_url = current_app.config.get('DIGICERT_URL') + base_url = current_app.config.get("DIGICERT_URL") # make certificate revoke request - create_url = '{0}/services/v2/certificate/{1}/revoke'.format(base_url, certificate.external_id) - metrics.send('digicert_revoke_certificate', 'counter', 1) - response = self.session.put(create_url, data=json.dumps({'comments': comments})) + create_url = "{0}/services/v2/certificate/{1}/revoke".format( + base_url, certificate.external_id + ) + metrics.send("digicert_revoke_certificate", "counter", 1) + response = self.session.put(create_url, data=json.dumps({"comments": comments})) return handle_response(response) def get_ordered_certificate(self, pending_cert): """ Retrieve a certificate via order id """ order_id = pending_cert.external_id - base_url = current_app.config.get('DIGICERT_URL') + base_url = current_app.config.get("DIGICERT_URL") try: certificate_id = get_certificate_id(self.session, base_url, order_id) except Exception as ex: return None - certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format(base_url, certificate_id) - end_entity, intermediate, root = pem.parse(self.session.get(certificate_url).content) - cert = {'body': "\n".join(str(end_entity).splitlines()), - 'chain': "\n".join(str(intermediate).splitlines()), - 'external_id': str(certificate_id)} + certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format( + base_url, certificate_id + ) + end_entity, intermediate, root = pem.parse( + self.session.get(certificate_url).content + ) + cert = { + "body": "\n".join(str(end_entity).splitlines()), + "chain": "\n".join(str(intermediate).splitlines()), + "external_id": str(certificate_id), + } return cert def cancel_ordered_certificate(self, pending_cert, **kwargs): """ Set the certificate order to canceled """ - base_url = current_app.config.get('DIGICERT_URL') - api_url = "{0}/services/v2/order/certificate/{1}/status".format(base_url, pending_cert.external_id) - payload = { - 'status': 'CANCELED', - 'note': kwargs.get('note') - } + base_url = current_app.config.get("DIGICERT_URL") + api_url = "{0}/services/v2/order/certificate/{1}/status".format( + base_url, pending_cert.external_id + ) + payload = {"status": "CANCELED", "note": kwargs.get("note")} response = self.session.put(api_url, data=json.dumps(payload)) if response.status_code == 404: # not well documented by Digicert, but either the certificate does not exist or we # don't own that order (someone else's order id!). Either way, we can just ignore it # and have it removed from Lemur current_app.logger.warning( - "Digicert Plugin tried to cancel pending certificate {0} but it does not exist!".format(pending_cert.name)) + "Digicert Plugin tried to cancel pending certificate {0} but it does not exist!".format( + pending_cert.name + ) + ) elif response.status_code != 204: - current_app.logger.debug("{0} code {1}".format(response.status_code, response.content)) - raise Exception("Failed to cancel pending certificate {0}".format(pending_cert.name)) + current_app.logger.debug( + "{0} code {1}".format(response.status_code, response.content) + ) + raise Exception( + "Failed to cancel pending certificate {0}".format(pending_cert.name) + ) @staticmethod def create_authority(options): @@ -370,72 +401,81 @@ class DigiCertIssuerPlugin(IssuerPlugin): :param options: :return: """ - role = {'username': '', 'password': '', 'name': 'digicert'} - return current_app.config.get('DIGICERT_ROOT'), "", [role] + role = {"username": "", "password": "", "name": "digicert"} + return current_app.config.get("DIGICERT_ROOT"), "", [role] class DigiCertCISSourcePlugin(SourcePlugin): """Wrap the Digicert CIS Certifcate API.""" - title = 'DigiCert' - slug = 'digicert-cis-source' + + title = "DigiCert" + slug = "digicert-cis-source" description = "Enables the use of Digicert as a source of existing certificates." version = digicert.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" additional_options = [] def __init__(self, *args, **kwargs): """Initialize source with appropriate details.""" required_vars = [ - 'DIGICERT_CIS_API_KEY', - 'DIGICERT_CIS_URL', - 'DIGICERT_CIS_ROOT', - 'DIGICERT_CIS_INTERMEDIATE', - 'DIGICERT_CIS_PROFILE_NAME' + "DIGICERT_CIS_API_KEY", + "DIGICERT_CIS_URL", + "DIGICERT_CIS_ROOT", + "DIGICERT_CIS_INTERMEDIATE", + "DIGICERT_CIS_PROFILE_NAME", ] validate_conf(current_app, required_vars) self.session = requests.Session() self.session.headers.update( { - 'X-DC-DEVKEY': current_app.config['DIGICERT_CIS_API_KEY'], - 'Content-Type': 'application/json' + "X-DC-DEVKEY": current_app.config["DIGICERT_CIS_API_KEY"], + "Content-Type": "application/json", } ) self.session.hooks = dict(response=log_status_code) a = requests.adapters.HTTPAdapter(max_retries=3) - self.session.mount('https://', a) + self.session.mount("https://", a) super(DigiCertCISSourcePlugin, self).__init__(*args, **kwargs) def get_certificates(self, options, **kwargs): """Fetch all Digicert certificates.""" - base_url = current_app.config.get('DIGICERT_CIS_URL') + base_url = current_app.config.get("DIGICERT_CIS_URL") # make request - search_url = '{0}/platform/cis/certificate/search'.format(base_url) + search_url = "{0}/platform/cis/certificate/search".format(base_url) certs = [] page = 1 while True: - response = self.session.get(search_url, params={'status': ['issued'], 'page': page}) + response = self.session.get( + search_url, params={"status": ["issued"], "page": page} + ) data = handle_cis_response(response) - for c in data['certificates']: - download_url = '{0}/platform/cis/certificate/{1}'.format(base_url, c['id']) + for c in data["certificates"]: + download_url = "{0}/platform/cis/certificate/{1}".format( + base_url, c["id"] + ) certificate = self.session.get(download_url) # normalize serial - serial = str(int(c['serial_number'], 16)) - cert = {'body': certificate.content, 'serial': serial, 'external_id': c['id']} + serial = str(int(c["serial_number"], 16)) + cert = { + "body": certificate.content, + "serial": serial, + "external_id": c["id"], + } certs.append(cert) - if page == data['total_pages']: + if page == data["total_pages"]: break page += 1 @@ -444,22 +484,23 @@ class DigiCertCISSourcePlugin(SourcePlugin): class DigiCertCISIssuerPlugin(IssuerPlugin): """Wrap the Digicert Certificate Issuing API.""" - title = 'DigiCert CIS' - slug = 'digicert-cis-issuer' + + title = "DigiCert CIS" + slug = "digicert-cis-issuer" description = "Enables the creation of certificates by the DigiCert CIS REST API." version = digicert.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): """Initialize the issuer with the appropriate details.""" required_vars = [ - 'DIGICERT_CIS_API_KEY', - 'DIGICERT_CIS_URL', - 'DIGICERT_CIS_ROOT', - 'DIGICERT_CIS_INTERMEDIATE', - 'DIGICERT_CIS_PROFILE_NAME' + "DIGICERT_CIS_API_KEY", + "DIGICERT_CIS_URL", + "DIGICERT_CIS_ROOT", + "DIGICERT_CIS_INTERMEDIATE", + "DIGICERT_CIS_PROFILE_NAME", ] validate_conf(current_app, required_vars) @@ -467,8 +508,8 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): self.session = requests.Session() self.session.headers.update( { - 'X-DC-DEVKEY': current_app.config['DIGICERT_CIS_API_KEY'], - 'Content-Type': 'application/json' + "X-DC-DEVKEY": current_app.config["DIGICERT_CIS_API_KEY"], + "Content-Type": "application/json", } ) @@ -478,41 +519,51 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): def create_certificate(self, csr, issuer_options): """Create a DigiCert certificate.""" - base_url = current_app.config.get('DIGICERT_CIS_URL') + base_url = current_app.config.get("DIGICERT_CIS_URL") # make certificate request - create_url = '{0}/platform/cis/certificate'.format(base_url) + create_url = "{0}/platform/cis/certificate".format(base_url) data = map_cis_fields(issuer_options, csr) response = self.session.post(create_url, data=json.dumps(data)) data = handle_cis_response(response) # retrieve certificate - certificate_pem = get_cis_certificate(self.session, base_url, data['id']) + certificate_pem = get_cis_certificate(self.session, base_url, data["id"]) - self.session.headers.pop('Accept') + self.session.headers.pop("Accept") end_entity = pem.parse(certificate_pem)[0] - if 'ECC' in issuer_options['key_type']: - return "\n".join(str(end_entity).splitlines()), current_app.config.get('DIGICERT_ECC_CIS_INTERMEDIATE'), data['id'] + if "ECC" in issuer_options["key_type"]: + return ( + "\n".join(str(end_entity).splitlines()), + current_app.config.get("DIGICERT_ECC_CIS_INTERMEDIATE"), + data["id"], + ) # By default return RSA - return "\n".join(str(end_entity).splitlines()), current_app.config.get('DIGICERT_CIS_INTERMEDIATE'), data['id'] + return ( + "\n".join(str(end_entity).splitlines()), + current_app.config.get("DIGICERT_CIS_INTERMEDIATE"), + data["id"], + ) def revoke_certificate(self, certificate, comments): """Revoke a Digicert certificate.""" - base_url = current_app.config.get('DIGICERT_CIS_URL') + base_url = current_app.config.get("DIGICERT_CIS_URL") # make certificate revoke request - revoke_url = '{0}/platform/cis/certificate/{1}/revoke'.format(base_url, certificate.external_id) - metrics.send('digicert_revoke_certificate_success', 'counter', 1) - response = self.session.put(revoke_url, data=json.dumps({'comments': comments})) + revoke_url = "{0}/platform/cis/certificate/{1}/revoke".format( + base_url, certificate.external_id + ) + metrics.send("digicert_revoke_certificate_success", "counter", 1) + response = self.session.put(revoke_url, data=json.dumps({"comments": comments})) if response.status_code != 204: - metrics.send('digicert_revoke_certificate_failure', 'counter', 1) - raise Exception('Failed to revoke certificate.') + metrics.send("digicert_revoke_certificate_failure", "counter", 1) + raise Exception("Failed to revoke certificate.") - metrics.send('digicert_revoke_certificate_success', 'counter', 1) + metrics.send("digicert_revoke_certificate_success", "counter", 1) @staticmethod def create_authority(options): @@ -525,5 +576,5 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): :param options: :return: """ - role = {'username': '', 'password': '', 'name': 'digicert'} - return current_app.config.get('DIGICERT_CIS_ROOT'), "", [role] + role = {"username": "", "password": "", "name": "digicert"} + return current_app.config.get("DIGICERT_CIS_ROOT"), "", [role] diff --git a/lemur/plugins/lemur_digicert/tests/test_digicert.py b/lemur/plugins/lemur_digicert/tests/test_digicert.py index d8d1519d..71efbad4 100644 --- a/lemur/plugins/lemur_digicert/tests/test_digicert.py +++ b/lemur/plugins/lemur_digicert/tests/test_digicert.py @@ -13,144 +13,129 @@ from cryptography import x509 def test_map_fields_with_validity_end_and_start(app): from lemur.plugins.lemur_digicert.plugin import map_fields - names = [u'one.example.com', u'two.example.com', u'three.example.com'] + names = [u"one.example.com", u"two.example.com", u"three.example.com"] options = { - 'common_name': 'example.com', - 'owner': 'bob@example.com', - 'description': 'test certificate', - 'extensions': { - 'sub_alt_names': { - 'names': [x509.DNSName(x) for x in names] - } - }, - 'validity_end': arrow.get(2017, 5, 7), - 'validity_start': arrow.get(2016, 10, 30) + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "validity_end": arrow.get(2017, 5, 7), + "validity_start": arrow.get(2016, 10, 30), } data = map_fields(options, CSR_STR) assert data == { - 'certificate': { - 'csr': CSR_STR, - 'common_name': 'example.com', - 'dns_names': names, - 'signature_hash': 'sha256' + "certificate": { + "csr": CSR_STR, + "common_name": "example.com", + "dns_names": names, + "signature_hash": "sha256", }, - 'organization': {'id': 111111}, - 'custom_expiration_date': arrow.get(2017, 5, 7).format('YYYY-MM-DD') + "organization": {"id": 111111}, + "custom_expiration_date": arrow.get(2017, 5, 7).format("YYYY-MM-DD"), } def test_map_fields_with_validity_years(app): from lemur.plugins.lemur_digicert.plugin import map_fields - names = [u'one.example.com', u'two.example.com', u'three.example.com'] + names = [u"one.example.com", u"two.example.com", u"three.example.com"] options = { - 'common_name': 'example.com', - 'owner': 'bob@example.com', - 'description': 'test certificate', - 'extensions': { - 'sub_alt_names': { - 'names': [x509.DNSName(x) for x in names] - } - }, - 'validity_years': 2, - 'validity_end': arrow.get(2017, 10, 30) + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "validity_years": 2, + "validity_end": arrow.get(2017, 10, 30), } data = map_fields(options, CSR_STR) assert data == { - 'certificate': { - 'csr': CSR_STR, - 'common_name': 'example.com', - 'dns_names': names, - 'signature_hash': 'sha256' + "certificate": { + "csr": CSR_STR, + "common_name": "example.com", + "dns_names": names, + "signature_hash": "sha256", }, - 'organization': {'id': 111111}, - 'validity_years': 2 + "organization": {"id": 111111}, + "validity_years": 2, } def test_map_cis_fields(app): from lemur.plugins.lemur_digicert.plugin import map_cis_fields - names = [u'one.example.com', u'two.example.com', u'three.example.com'] + names = [u"one.example.com", u"two.example.com", u"three.example.com"] options = { - 'common_name': 'example.com', - 'owner': 'bob@example.com', - 'description': 'test certificate', - 'extensions': { - 'sub_alt_names': { - 'names': [x509.DNSName(x) for x in names] - } - }, - 'organization': 'Example, Inc.', - 'organizational_unit': 'Example Org', - 'validity_end': arrow.get(2017, 5, 7), - 'validity_start': arrow.get(2016, 10, 30) + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "organization": "Example, Inc.", + "organizational_unit": "Example Org", + "validity_end": arrow.get(2017, 5, 7), + "validity_start": arrow.get(2016, 10, 30), } data = map_cis_fields(options, CSR_STR) assert data == { - 'common_name': 'example.com', - 'csr': CSR_STR, - 'additional_dns_names': names, - 'signature_hash': 'sha256', - 'organization': {'name': 'Example, Inc.', 'units': ['Example Org']}, - 'validity': { - 'valid_to': arrow.get(2017, 5, 7).format('YYYY-MM-DDTHH:MM') + 'Z' + "common_name": "example.com", + "csr": CSR_STR, + "additional_dns_names": names, + "signature_hash": "sha256", + "organization": {"name": "Example, Inc.", "units": ["Example Org"]}, + "validity": { + "valid_to": arrow.get(2017, 5, 7).format("YYYY-MM-DDTHH:MM") + "Z" }, - 'profile_name': None + "profile_name": None, } options = { - 'common_name': 'example.com', - 'owner': 'bob@example.com', - 'description': 'test certificate', - 'extensions': { - 'sub_alt_names': { - 'names': [x509.DNSName(x) for x in names] - } - }, - 'organization': 'Example, Inc.', - 'organizational_unit': 'Example Org', - 'validity_years': 2 + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "organization": "Example, Inc.", + "organizational_unit": "Example Org", + "validity_years": 2, } with freeze_time(time_to_freeze=arrow.get(2016, 11, 3).datetime): data = map_cis_fields(options, CSR_STR) assert data == { - 'common_name': 'example.com', - 'csr': CSR_STR, - 'additional_dns_names': names, - 'signature_hash': 'sha256', - 'organization': {'name': 'Example, Inc.', 'units': ['Example Org']}, - 'validity': { - 'valid_to': arrow.get(2018, 11, 3).format('YYYY-MM-DDTHH:MM') + 'Z' + "common_name": "example.com", + "csr": CSR_STR, + "additional_dns_names": names, + "signature_hash": "sha256", + "organization": {"name": "Example, Inc.", "units": ["Example Org"]}, + "validity": { + "valid_to": arrow.get(2018, 11, 3).format("YYYY-MM-DDTHH:MM") + "Z" }, - 'profile_name': None + "profile_name": None, } def test_signature_hash(app): from lemur.plugins.lemur_digicert.plugin import signature_hash - assert signature_hash(None) == 'sha256' - assert signature_hash('sha256WithRSA') == 'sha256' - assert signature_hash('sha384WithRSA') == 'sha384' - assert signature_hash('sha512WithRSA') == 'sha512' + assert signature_hash(None) == "sha256" + assert signature_hash("sha256WithRSA") == "sha256" + assert signature_hash("sha384WithRSA") == "sha384" + assert signature_hash("sha512WithRSA") == "sha512" with pytest.raises(Exception): - signature_hash('sdfdsf') + signature_hash("sdfdsf") -def test_issuer_plugin_create_certificate(certificate_="""\ +def test_issuer_plugin_create_certificate( + certificate_="""\ -----BEGIN CERTIFICATE----- abc -----END CERTIFICATE----- @@ -160,7 +145,8 @@ def -----BEGIN CERTIFICATE----- ghi -----END CERTIFICATE----- -"""): +""" +): import requests_mock from lemur.plugins.lemur_digicert.plugin import DigiCertIssuerPlugin @@ -168,12 +154,26 @@ ghi subject = DigiCertIssuerPlugin() adapter = requests_mock.Adapter() - adapter.register_uri('POST', 'mock://www.digicert.com/services/v2/order/certificate/ssl_plus', text=json.dumps({'id': 'id123'})) - adapter.register_uri('GET', 'mock://www.digicert.com/services/v2/order/certificate/id123', text=json.dumps({'status': 'issued', 'certificate': {'id': 'cert123'}})) - adapter.register_uri('GET', 'mock://www.digicert.com/services/v2/certificate/cert123/download/format/pem_all', text=pem_fixture) - subject.session.mount('mock', adapter) + adapter.register_uri( + "POST", + "mock://www.digicert.com/services/v2/order/certificate/ssl_plus", + text=json.dumps({"id": "id123"}), + ) + adapter.register_uri( + "GET", + "mock://www.digicert.com/services/v2/order/certificate/id123", + text=json.dumps({"status": "issued", "certificate": {"id": "cert123"}}), + ) + adapter.register_uri( + "GET", + "mock://www.digicert.com/services/v2/certificate/cert123/download/format/pem_all", + text=pem_fixture, + ) + subject.session.mount("mock", adapter) - cert, intermediate, external_id = subject.create_certificate("", {'common_name': 'test.com'}) + cert, intermediate, external_id = subject.create_certificate( + "", {"common_name": "test.com"} + ) assert cert == "-----BEGIN CERTIFICATE-----\nabc\n-----END CERTIFICATE-----" assert intermediate == "-----BEGIN CERTIFICATE-----\ndef\n-----END CERTIFICATE-----" @@ -187,10 +187,18 @@ def test_cancel_ordered_certificate(mock_pending_cert): mock_pending_cert.external_id = 1234 subject = DigiCertIssuerPlugin() adapter = requests_mock.Adapter() - adapter.register_uri('PUT', 'mock://www.digicert.com/services/v2/order/certificate/1234/status', status_code=204) - adapter.register_uri('PUT', 'mock://www.digicert.com/services/v2/order/certificate/111/status', status_code=404) - subject.session.mount('mock', adapter) - data = {'note': 'Test'} + adapter.register_uri( + "PUT", + "mock://www.digicert.com/services/v2/order/certificate/1234/status", + status_code=204, + ) + adapter.register_uri( + "PUT", + "mock://www.digicert.com/services/v2/order/certificate/111/status", + status_code=404, + ) + subject.session.mount("mock", adapter) + data = {"note": "Test"} subject.cancel_ordered_certificate(mock_pending_cert, **data) # A non-existing order id, does not raise exception because if it doesn't exist, then it doesn't matter diff --git a/lemur/plugins/lemur_email/__init__.py b/lemur/plugins/lemur_email/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_email/__init__.py +++ b/lemur/plugins/lemur_email/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_email/plugin.py b/lemur/plugins/lemur_email/plugin.py index 18007b99..241aa1b0 100644 --- a/lemur/plugins/lemur_email/plugin.py +++ b/lemur/plugins/lemur_email/plugin.py @@ -27,8 +27,10 @@ def render_html(template_name, message): :param message: :return: """ - template = env.get_template('{}.html'.format(template_name)) - return template.render(dict(message=message, hostname=current_app.config.get('LEMUR_HOSTNAME'))) + template = env.get_template("{}.html".format(template_name)) + return template.render( + dict(message=message, hostname=current_app.config.get("LEMUR_HOSTNAME")) + ) def send_via_smtp(subject, body, targets): @@ -40,7 +42,9 @@ def send_via_smtp(subject, body, targets): :param targets: :return: """ - msg = Message(subject, recipients=targets, sender=current_app.config.get("LEMUR_EMAIL")) + msg = Message( + subject, recipients=targets, sender=current_app.config.get("LEMUR_EMAIL") + ) msg.body = "" # kinda a weird api for sending html emails msg.html = body smtp_mail.send(msg) @@ -54,65 +58,55 @@ def send_via_ses(subject, body, targets): :param targets: :return: """ - client = boto3.client('ses', region_name='us-east-1') + client = boto3.client("ses", region_name="us-east-1") client.send_email( - Source=current_app.config.get('LEMUR_EMAIL'), - Destination={ - 'ToAddresses': targets - }, + Source=current_app.config.get("LEMUR_EMAIL"), + Destination={"ToAddresses": targets}, Message={ - 'Subject': { - 'Data': subject, - 'Charset': 'UTF-8' - }, - 'Body': { - 'Html': { - 'Data': body, - 'Charset': 'UTF-8' - } - } - } + "Subject": {"Data": subject, "Charset": "UTF-8"}, + "Body": {"Html": {"Data": body, "Charset": "UTF-8"}}, + }, ) class EmailNotificationPlugin(ExpirationNotificationPlugin): - title = 'Email' - slug = 'email-notification' - description = 'Sends expiration email notifications' + title = "Email" + slug = "email-notification" + description = "Sends expiration email notifications" version = email.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur" additional_options = [ { - 'name': 'recipients', - 'type': 'str', - 'required': True, - 'validation': '^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$', - 'helpMessage': 'Comma delimited list of email addresses', - }, + "name": "recipients", + "type": "str", + "required": True, + "validation": "^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$", + "helpMessage": "Comma delimited list of email addresses", + } ] def __init__(self, *args, **kwargs): """Initialize the plugin with the appropriate details.""" - sender = current_app.config.get('LEMUR_EMAIL_SENDER', 'ses').lower() + sender = current_app.config.get("LEMUR_EMAIL_SENDER", "ses").lower() - if sender not in ['ses', 'smtp']: - raise InvalidConfiguration('Email sender type {0} is not recognized.') + if sender not in ["ses", "smtp"]: + raise InvalidConfiguration("Email sender type {0} is not recognized.") @staticmethod def send(notification_type, message, targets, options, **kwargs): - subject = 'Lemur: {0} Notification'.format(notification_type.capitalize()) + subject = "Lemur: {0} Notification".format(notification_type.capitalize()) - data = {'options': options, 'certificates': message} + data = {"options": options, "certificates": message} body = render_html(notification_type, data) - s_type = current_app.config.get("LEMUR_EMAIL_SENDER", 'ses').lower() + s_type = current_app.config.get("LEMUR_EMAIL_SENDER", "ses").lower() - if s_type == 'ses': + if s_type == "ses": send_via_ses(subject, body, targets) - elif s_type == 'smtp': + elif s_type == "smtp": send_via_smtp(subject, body, targets) diff --git a/lemur/plugins/lemur_email/templates/config.py b/lemur/plugins/lemur_email/templates/config.py index 2ec8a6c2..3d877fe0 100644 --- a/lemur/plugins/lemur_email/templates/config.py +++ b/lemur/plugins/lemur_email/templates/config.py @@ -5,22 +5,24 @@ from jinja2 import Environment, FileSystemLoader, select_autoescape from lemur.plugins.utils import get_plugin_option loader = FileSystemLoader(searchpath=os.path.dirname(os.path.realpath(__file__))) -env = Environment(loader=loader, # nosec: potentially dangerous types esc. - autoescape=select_autoescape(['html', 'xml'])) +env = Environment( + loader=loader, # nosec: potentially dangerous types esc. + autoescape=select_autoescape(["html", "xml"]), +) def human_time(time): - return arrow.get(time).format('dddd, MMMM D, YYYY') + return arrow.get(time).format("dddd, MMMM D, YYYY") def interval(options): - return get_plugin_option('interval', options) + return get_plugin_option("interval", options) def unit(options): - return get_plugin_option('unit', options) + return get_plugin_option("unit", options) -env.filters['time'] = human_time -env.filters['interval'] = interval -env.filters['unit'] = unit +env.filters["time"] = human_time +env.filters["interval"] = interval +env.filters["unit"] = unit diff --git a/lemur/plugins/lemur_email/tests/test_email.py b/lemur/plugins/lemur_email/tests/test_email.py index 9d58402f..43168cab 100644 --- a/lemur/plugins/lemur_email/tests/test_email.py +++ b/lemur/plugins/lemur_email/tests/test_email.py @@ -13,21 +13,24 @@ def test_render(certificate, endpoint): new_cert.replaces.append(certificate) data = { - 'certificates': [certificate_notification_output_schema.dump(certificate).data], - 'options': [{'name': 'interval', 'value': 10}, {'name': 'unit', 'value': 'days'}] + "certificates": [certificate_notification_output_schema.dump(certificate).data], + "options": [ + {"name": "interval", "value": 10}, + {"name": "unit", "value": "days"}, + ], } - template = env.get_template('{}.html'.format('expiration')) + template = env.get_template("{}.html".format("expiration")) - body = template.render(dict(message=data, hostname='lemur.test.example.com')) + body = template.render(dict(message=data, hostname="lemur.test.example.com")) - template = env.get_template('{}.html'.format('rotation')) + template = env.get_template("{}.html".format("rotation")) certificate.endpoints.append(endpoint) body = template.render( dict( certificate=certificate_notification_output_schema.dump(certificate).data, - hostname='lemur.test.example.com' + hostname="lemur.test.example.com", ) ) diff --git a/lemur/plugins/lemur_jks/__init__.py b/lemur/plugins/lemur_jks/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_jks/__init__.py +++ b/lemur/plugins/lemur_jks/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_jks/plugin.py b/lemur/plugins/lemur_jks/plugin.py index 3d456f1c..7134faeb 100644 --- a/lemur/plugins/lemur_jks/plugin.py +++ b/lemur/plugins/lemur_jks/plugin.py @@ -31,10 +31,10 @@ def create_truststore(cert, chain, alias, passphrase): entries = [] for idx, cert_bytes in enumerate(cert_chain_as_der(cert, chain)): # The original cert gets name _cert, first chain element is _cert_1, etc. - cert_alias = alias + '_cert' + ('_{}'.format(idx) if idx else '') + cert_alias = alias + "_cert" + ("_{}".format(idx) if idx else "") entries.append(TrustedCertEntry.new(cert_alias, cert_bytes)) - return KeyStore.new('jks', entries).saves(passphrase) + return KeyStore.new("jks", entries).saves(passphrase) def create_keystore(cert, chain, key, alias, passphrase): @@ -42,36 +42,36 @@ def create_keystore(cert, chain, key, alias, passphrase): key_bytes = parse_private_key(key).private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption() + encryption_algorithm=serialization.NoEncryption(), ) entry = PrivateKeyEntry.new(alias, certs_bytes, key_bytes) - return KeyStore.new('jks', [entry]).saves(passphrase) + return KeyStore.new("jks", [entry]).saves(passphrase) class JavaTruststoreExportPlugin(ExportPlugin): - title = 'Java Truststore (JKS)' - slug = 'java-truststore-jks' - description = 'Generates a JKS truststore' + title = "Java Truststore (JKS)" + slug = "java-truststore-jks" + description = "Generates a JKS truststore" requires_key = False version = jks.VERSION - author = 'Marti Raudsepp' - author_url = 'https://github.com/intgr' + author = "Marti Raudsepp" + author_url = "https://github.com/intgr" options = [ { - 'name': 'alias', - 'type': 'str', - 'required': False, - 'helpMessage': 'Enter the alias you wish to use for the truststore.', + "name": "alias", + "type": "str", + "required": False, + "helpMessage": "Enter the alias you wish to use for the truststore.", }, { - 'name': 'passphrase', - 'type': 'str', - 'required': False, - 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.', - 'validation': '' + "name": "passphrase", + "type": "str", + "required": False, + "helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.", + "validation": "", }, ] @@ -80,44 +80,44 @@ class JavaTruststoreExportPlugin(ExportPlugin): Generates a Java Truststore """ - if self.get_option('alias', options): - alias = self.get_option('alias', options) + if self.get_option("alias", options): + alias = self.get_option("alias", options) else: alias = common_name(parse_certificate(body)) - if self.get_option('passphrase', options): - passphrase = self.get_option('passphrase', options) + if self.get_option("passphrase", options): + passphrase = self.get_option("passphrase", options) else: - passphrase = Fernet.generate_key().decode('utf-8') + passphrase = Fernet.generate_key().decode("utf-8") raw = create_truststore(body, chain, alias, passphrase) - return 'jks', passphrase, raw + return "jks", passphrase, raw class JavaKeystoreExportPlugin(ExportPlugin): - title = 'Java Keystore (JKS)' - slug = 'java-keystore-jks' - description = 'Generates a JKS keystore' + title = "Java Keystore (JKS)" + slug = "java-keystore-jks" + description = "Generates a JKS keystore" version = jks.VERSION - author = 'Marti Raudsepp' - author_url = 'https://github.com/intgr' + author = "Marti Raudsepp" + author_url = "https://github.com/intgr" options = [ { - 'name': 'passphrase', - 'type': 'str', - 'required': False, - 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.', - 'validation': '' + "name": "passphrase", + "type": "str", + "required": False, + "helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.", + "validation": "", }, { - 'name': 'alias', - 'type': 'str', - 'required': False, - 'helpMessage': 'Enter the alias you wish to use for the keystore.', - } + "name": "alias", + "type": "str", + "required": False, + "helpMessage": "Enter the alias you wish to use for the keystore.", + }, ] def export(self, body, chain, key, options, **kwargs): @@ -125,16 +125,16 @@ class JavaKeystoreExportPlugin(ExportPlugin): Generates a Java Keystore """ - if self.get_option('passphrase', options): - passphrase = self.get_option('passphrase', options) + if self.get_option("passphrase", options): + passphrase = self.get_option("passphrase", options) else: - passphrase = Fernet.generate_key().decode('utf-8') + passphrase = Fernet.generate_key().decode("utf-8") - if self.get_option('alias', options): - alias = self.get_option('alias', options) + if self.get_option("alias", options): + alias = self.get_option("alias", options) else: alias = common_name(parse_certificate(body)) raw = create_keystore(body, chain, key, alias, passphrase) - return 'jks', passphrase, raw + return "jks", passphrase, raw diff --git a/lemur/plugins/lemur_jks/tests/test_jks.py b/lemur/plugins/lemur_jks/tests/test_jks.py index e4a5e64a..b9fe9b33 100644 --- a/lemur/plugins/lemur_jks/tests/test_jks.py +++ b/lemur/plugins/lemur_jks/tests/test_jks.py @@ -1,96 +1,105 @@ import pytest from jks import KeyStore, TrustedCertEntry, PrivateKeyEntry -from lemur.tests.vectors import INTERNAL_CERTIFICATE_A_STR, SAN_CERT_STR, INTERMEDIATE_CERT_STR, ROOTCA_CERT_STR, \ - SAN_CERT_KEY +from lemur.tests.vectors import ( + INTERNAL_CERTIFICATE_A_STR, + SAN_CERT_STR, + INTERMEDIATE_CERT_STR, + ROOTCA_CERT_STR, + SAN_CERT_KEY, +) def test_export_truststore(app): from lemur.plugins.base import plugins - p = plugins.get('java-truststore-jks') + p = plugins.get("java-truststore-jks") options = [ - {'name': 'passphrase', 'value': 'hunter2'}, - {'name': 'alias', 'value': 'AzureDiamond'}, + {"name": "passphrase", "value": "hunter2"}, + {"name": "alias", "value": "AzureDiamond"}, ] - chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR + chain = INTERMEDIATE_CERT_STR + "\n" + ROOTCA_CERT_STR ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options) - assert ext == 'jks' - assert password == 'hunter2' + assert ext == "jks" + assert password == "hunter2" assert isinstance(raw, bytes) - ks = KeyStore.loads(raw, 'hunter2') - assert ks.store_type == 'jks' + ks = KeyStore.loads(raw, "hunter2") + assert ks.store_type == "jks" # JKS lower-cases alias strings - assert ks.entries.keys() == {'azurediamond_cert', 'azurediamond_cert_1', 'azurediamond_cert_2'} - assert isinstance(ks.entries['azurediamond_cert'], TrustedCertEntry) + assert ks.entries.keys() == { + "azurediamond_cert", + "azurediamond_cert_1", + "azurediamond_cert_2", + } + assert isinstance(ks.entries["azurediamond_cert"], TrustedCertEntry) def test_export_truststore_defaults(app): from lemur.plugins.base import plugins - p = plugins.get('java-truststore-jks') + p = plugins.get("java-truststore-jks") options = [] - ext, password, raw = p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options) + ext, password, raw = p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) - assert ext == 'jks' + assert ext == "jks" assert isinstance(password, str) assert isinstance(raw, bytes) ks = KeyStore.loads(raw, password) - assert ks.store_type == 'jks' + assert ks.store_type == "jks" # JKS lower-cases alias strings - assert ks.entries.keys() == {'acommonname_cert'} - assert isinstance(ks.entries['acommonname_cert'], TrustedCertEntry) + assert ks.entries.keys() == {"acommonname_cert"} + assert isinstance(ks.entries["acommonname_cert"], TrustedCertEntry) def test_export_keystore(app): from lemur.plugins.base import plugins - p = plugins.get('java-keystore-jks') + p = plugins.get("java-keystore-jks") options = [ - {'name': 'passphrase', 'value': 'hunter2'}, - {'name': 'alias', 'value': 'AzureDiamond'}, + {"name": "passphrase", "value": "hunter2"}, + {"name": "alias", "value": "AzureDiamond"}, ] - chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR + chain = INTERMEDIATE_CERT_STR + "\n" + ROOTCA_CERT_STR with pytest.raises(Exception): - p.export(INTERNAL_CERTIFICATE_A_STR, chain, '', options) + p.export(INTERNAL_CERTIFICATE_A_STR, chain, "", options) ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options) - assert ext == 'jks' - assert password == 'hunter2' + assert ext == "jks" + assert password == "hunter2" assert isinstance(raw, bytes) ks = KeyStore.loads(raw, password) - assert ks.store_type == 'jks' + assert ks.store_type == "jks" # JKS lower-cases alias strings - assert ks.entries.keys() == {'azurediamond'} - entry = ks.entries['azurediamond'] + assert ks.entries.keys() == {"azurediamond"} + entry = ks.entries["azurediamond"] assert isinstance(entry, PrivateKeyEntry) - assert len(entry.cert_chain) == 3 # Cert and chain were provided + assert len(entry.cert_chain) == 3 # Cert and chain were provided def test_export_keystore_defaults(app): from lemur.plugins.base import plugins - p = plugins.get('java-keystore-jks') + p = plugins.get("java-keystore-jks") options = [] with pytest.raises(Exception): - p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options) + p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) - ext, password, raw = p.export(SAN_CERT_STR, '', SAN_CERT_KEY, options) + ext, password, raw = p.export(SAN_CERT_STR, "", SAN_CERT_KEY, options) - assert ext == 'jks' + assert ext == "jks" assert isinstance(password, str) assert isinstance(raw, bytes) ks = KeyStore.loads(raw, password) - assert ks.store_type == 'jks' - assert ks.entries.keys() == {'san.example.org'} - entry = ks.entries['san.example.org'] + assert ks.store_type == "jks" + assert ks.entries.keys() == {"san.example.org"} + entry = ks.entries["san.example.org"] assert isinstance(entry, PrivateKeyEntry) - assert len(entry.cert_chain) == 1 # Only cert itself, no chain was provided + assert len(entry.cert_chain) == 1 # Only cert itself, no chain was provided diff --git a/lemur/plugins/lemur_kubernetes/__init__.py b/lemur/plugins/lemur_kubernetes/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_kubernetes/__init__.py +++ b/lemur/plugins/lemur_kubernetes/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_kubernetes/plugin.py b/lemur/plugins/lemur_kubernetes/plugin.py index 30b864eb..62ffffda 100644 --- a/lemur/plugins/lemur_kubernetes/plugin.py +++ b/lemur/plugins/lemur_kubernetes/plugin.py @@ -21,7 +21,7 @@ from lemur.common.defaults import common_name from lemur.common.utils import parse_certificate from lemur.plugins.bases import DestinationPlugin -DEFAULT_API_VERSION = 'v1' +DEFAULT_API_VERSION = "v1" def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): @@ -34,7 +34,7 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): if 200 <= create_resp.status_code <= 299: return None - elif create_resp.json().get('reason', '') != 'AlreadyExists': + elif create_resp.json().get("reason", "") != "AlreadyExists": return create_resp.content url = _resolve_uri(k8s_base_uri, namespace, kind, name) @@ -50,22 +50,27 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data): def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION): - api_group = 'api' - if '/' in api_ver: - api_group = 'apis' - return '{base}/{api_group}/{api_ver}/namespaces'.format(base=k8s_base_uri, api_group=api_group, api_ver=api_ver) + ( - '/' + namespace if namespace else '') + api_group = "api" + if "/" in api_ver: + api_group = "apis" + return "{base}/{api_group}/{api_ver}/namespaces".format( + base=k8s_base_uri, api_group=api_group, api_ver=api_ver + ) + ("/" + namespace if namespace else "") def _resolve_uri(k8s_base_uri, namespace, kind, name=None, api_ver=DEFAULT_API_VERSION): if not namespace: - namespace = 'default' + namespace = "default" - return "/".join(itertools.chain.from_iterable([ - (_resolve_ns(k8s_base_uri, namespace, api_ver=api_ver),), - ((kind + 's').lower(),), - (name,) if name else (), - ])) + return "/".join( + itertools.chain.from_iterable( + [ + (_resolve_ns(k8s_base_uri, namespace, api_ver=api_ver),), + ((kind + "s").lower(),), + (name,) if name else (), + ] + ) + ) # Performs Base64 encoding of string to string using the base64.b64encode() function @@ -76,117 +81,113 @@ def base64encode(string): def build_secret(secret_format, secret_name, body, private_key, cert_chain): secret = { - 'apiVersion': 'v1', - 'kind': 'Secret', - 'type': 'Opaque', - 'metadata': { - 'name': secret_name, - } + "apiVersion": "v1", + "kind": "Secret", + "type": "Opaque", + "metadata": {"name": secret_name}, } - if secret_format == 'Full': - secret['data'] = { - 'combined.pem': base64encode('%s\n%s' % (body, private_key)), - 'ca.crt': base64encode(cert_chain), - 'service.key': base64encode(private_key), - 'service.crt': base64encode(body), + if secret_format == "Full": + secret["data"] = { + "combined.pem": base64encode("%s\n%s" % (body, private_key)), + "ca.crt": base64encode(cert_chain), + "service.key": base64encode(private_key), + "service.crt": base64encode(body), } - if secret_format == 'TLS': - secret['type'] = 'kubernetes.io/tls' - secret['data'] = { - 'tls.crt': base64encode(cert_chain), - 'tls.key': base64encode(private_key) - } - if secret_format == 'Certificate': - secret['data'] = { - 'tls.crt': base64encode(cert_chain), + if secret_format == "TLS": + secret["type"] = "kubernetes.io/tls" + secret["data"] = { + "tls.crt": base64encode(cert_chain), + "tls.key": base64encode(private_key), } + if secret_format == "Certificate": + secret["data"] = {"tls.crt": base64encode(cert_chain)} return secret class KubernetesDestinationPlugin(DestinationPlugin): - title = 'Kubernetes' - slug = 'kubernetes-destination' - description = 'Allow the uploading of certificates to Kubernetes as secret' + title = "Kubernetes" + slug = "kubernetes-destination" + description = "Allow the uploading of certificates to Kubernetes as secret" - author = 'Mikhail Khodorovskiy' - author_url = 'https://github.com/mik373/lemur' + author = "Mikhail Khodorovskiy" + author_url = "https://github.com/mik373/lemur" options = [ { - 'name': 'secretNameFormat', - 'type': 'str', - 'required': False, + "name": "secretNameFormat", + "type": "str", + "required": False, # Validation is difficult. This regex is used by kubectl to validate secret names: # [a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)* # Allowing the insertion of "{common_name}" (or any other such placeholder} # at any point in the string proved very challenging and had a tendency to # cause my browser to hang. The specified expression will allow any valid string # but will also accept many invalid strings. - 'validation': '(?:[a-z0-9.-]|\\{common_name\\})+', - 'helpMessage': 'Must be a valid secret name, possibly including "{common_name}"', - 'default': '{common_name}' + "validation": "(?:[a-z0-9.-]|\\{common_name\\})+", + "helpMessage": 'Must be a valid secret name, possibly including "{common_name}"', + "default": "{common_name}", }, { - 'name': 'kubernetesURL', - 'type': 'str', - 'required': False, - 'validation': 'https?://[a-zA-Z0-9.-]+(?::[0-9]+)?', - 'helpMessage': 'Must be a valid Kubernetes server URL!', - 'default': 'https://kubernetes.default' + "name": "kubernetesURL", + "type": "str", + "required": False, + "validation": "https?://[a-zA-Z0-9.-]+(?::[0-9]+)?", + "helpMessage": "Must be a valid Kubernetes server URL!", + "default": "https://kubernetes.default", }, { - 'name': 'kubernetesAuthToken', - 'type': 'str', - 'required': False, - 'validation': '[0-9a-zA-Z-_.]+', - 'helpMessage': 'Must be a valid Kubernetes server Token!', + "name": "kubernetesAuthToken", + "type": "str", + "required": False, + "validation": "[0-9a-zA-Z-_.]+", + "helpMessage": "Must be a valid Kubernetes server Token!", }, { - 'name': 'kubernetesAuthTokenFile', - 'type': 'str', - 'required': False, - 'validation': '(/[^/]+)+', - 'helpMessage': 'Must be a valid file path!', - 'default': '/var/run/secrets/kubernetes.io/serviceaccount/token' + "name": "kubernetesAuthTokenFile", + "type": "str", + "required": False, + "validation": "(/[^/]+)+", + "helpMessage": "Must be a valid file path!", + "default": "/var/run/secrets/kubernetes.io/serviceaccount/token", }, { - 'name': 'kubernetesServerCertificate', - 'type': 'textarea', - 'required': False, - 'validation': '-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----', - 'helpMessage': 'Must be a valid Kubernetes server Certificate!', + "name": "kubernetesServerCertificate", + "type": "textarea", + "required": False, + "validation": "-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----", + "helpMessage": "Must be a valid Kubernetes server Certificate!", }, { - 'name': 'kubernetesServerCertificateFile', - 'type': 'str', - 'required': False, - 'validation': '(/[^/]+)+', - 'helpMessage': 'Must be a valid file path!', - 'default': '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt' + "name": "kubernetesServerCertificateFile", + "type": "str", + "required": False, + "validation": "(/[^/]+)+", + "helpMessage": "Must be a valid file path!", + "default": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt", }, { - 'name': 'kubernetesNamespace', - 'type': 'str', - 'required': False, - 'validation': '[a-z0-9]([-a-z0-9]*[a-z0-9])?', - 'helpMessage': 'Must be a valid Kubernetes Namespace!', + "name": "kubernetesNamespace", + "type": "str", + "required": False, + "validation": "[a-z0-9]([-a-z0-9]*[a-z0-9])?", + "helpMessage": "Must be a valid Kubernetes Namespace!", }, { - 'name': 'kubernetesNamespaceFile', - 'type': 'str', - 'required': False, - 'validation': '(/[^/]+)+', - 'helpMessage': 'Must be a valid file path!', - 'default': '/var/run/secrets/kubernetes.io/serviceaccount/namespace' + "name": "kubernetesNamespaceFile", + "type": "str", + "required": False, + "validation": "(/[^/]+)+", + "helpMessage": "Must be a valid file path!", + "default": "/var/run/secrets/kubernetes.io/serviceaccount/namespace", }, { - 'name': 'secretFormat', - 'type': 'select', - 'required': True, - 'available': ['Full', 'TLS', 'Certificate'], - 'helpMessage': 'The type of Secret to create.', - 'default': 'Full' - } + "name": "secretFormat", + "type": "select", + "required": True, + "available": ["Full", "TLS", "Certificate"], + "helpMessage": "The type of Secret to create.", + "default": "Full", + }, ] def __init__(self, *args, **kwargs): @@ -195,27 +196,28 @@ class KubernetesDestinationPlugin(DestinationPlugin): def upload(self, name, body, private_key, cert_chain, options, **kwargs): try: - k8_base_uri = self.get_option('kubernetesURL', options) - secret_format = self.get_option('secretFormat', options) - k8s_api = K8sSession( - self.k8s_bearer(options), - self.k8s_cert(options) - ) + k8_base_uri = self.get_option("kubernetesURL", options) + secret_format = self.get_option("secretFormat", options) + k8s_api = K8sSession(self.k8s_bearer(options), self.k8s_cert(options)) cn = common_name(parse_certificate(body)) - secret_name_format = self.get_option('secretNameFormat', options) + secret_name_format = self.get_option("secretNameFormat", options) secret_name = secret_name_format.format(common_name=cn) - secret = build_secret(secret_format, secret_name, body, private_key, cert_chain) + secret = build_secret( + secret_format, secret_name, body, private_key, cert_chain + ) err = ensure_resource( k8s_api, k8s_base_uri=k8_base_uri, namespace=self.k8s_namespace(options), kind="secret", name=secret_name, - data=secret + data=secret, ) except Exception as e: - current_app.logger.exception("Exception in upload: {}".format(e), exc_info=True) + current_app.logger.exception( + "Exception in upload: {}".format(e), exc_info=True + ) raise if err is not None: @@ -223,24 +225,28 @@ class KubernetesDestinationPlugin(DestinationPlugin): raise Exception("Error uploading secret: " + err) def k8s_bearer(self, options): - bearer = self.get_option('kubernetesAuthToken', options) + bearer = self.get_option("kubernetesAuthToken", options) if not bearer: - bearer_file = self.get_option('kubernetesAuthTokenFile', options) + bearer_file = self.get_option("kubernetesAuthTokenFile", options) with open(bearer_file, "r") as file: bearer = file.readline() if bearer: current_app.logger.debug("Using token read from %s", bearer_file) else: - raise Exception("Unable to locate token in options or from %s", bearer_file) + raise Exception( + "Unable to locate token in options or from %s", bearer_file + ) else: current_app.logger.debug("Using token from options") return bearer def k8s_cert(self, options): - cert_file = self.get_option('kubernetesServerCertificateFile', options) - cert = self.get_option('kubernetesServerCertificate', options) + cert_file = self.get_option("kubernetesServerCertificateFile", options) + cert = self.get_option("kubernetesServerCertificate", options) if cert: - cert_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'k8.cert') + cert_file = os.path.join( + os.path.abspath(os.path.dirname(__file__)), "k8.cert" + ) with open(cert_file, "w") as text_file: text_file.write(cert) current_app.logger.debug("Using certificate from options") @@ -249,36 +255,69 @@ class KubernetesDestinationPlugin(DestinationPlugin): return cert_file def k8s_namespace(self, options): - namespace = self.get_option('kubernetesNamespace', options) + namespace = self.get_option("kubernetesNamespace", options) if not namespace: - namespace_file = self.get_option('kubernetesNamespaceFile', options) + namespace_file = self.get_option("kubernetesNamespaceFile", options) with open(namespace_file, "r") as file: namespace = file.readline() if namespace: - current_app.logger.debug("Using namespace %s from %s", namespace, namespace_file) + current_app.logger.debug( + "Using namespace %s from %s", namespace, namespace_file + ) else: - raise Exception("Unable to locate namespace in options or from %s", namespace_file) + raise Exception( + "Unable to locate namespace in options or from %s", namespace_file + ) else: current_app.logger.debug("Using namespace %s from options", namespace) return namespace class K8sSession(requests.Session): - def __init__(self, bearer, cert_file): super(K8sSession, self).__init__() - self.headers.update({ - 'Authorization': 'Bearer %s' % bearer - }) + self.headers.update({"Authorization": "Bearer %s" % bearer}) self.verify = cert_file - def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, - timeout=30, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, - json=None): + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=30, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): """ This method overrides the default timeout to be 10s. """ - return super(K8sSession, self).request(method, url, params, data, headers, cookies, files, auth, timeout, - allow_redirects, proxies, hooks, stream, verify, cert, json) + return super(K8sSession, self).request( + method, + url, + params, + data, + headers, + cookies, + files, + auth, + timeout, + allow_redirects, + proxies, + hooks, + stream, + verify, + cert, + json, + ) diff --git a/lemur/plugins/lemur_openssl/__init__.py b/lemur/plugins/lemur_openssl/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_openssl/__init__.py +++ b/lemur/plugins/lemur_openssl/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_openssl/plugin.py b/lemur/plugins/lemur_openssl/plugin.py index 6d6f89aa..02da311b 100644 --- a/lemur/plugins/lemur_openssl/plugin.py +++ b/lemur/plugins/lemur_openssl/plugin.py @@ -50,59 +50,66 @@ def create_pkcs12(cert, chain, p12_tmp, key, alias, passphrase): assert isinstance(key, str) with mktempfile() as key_tmp: - with open(key_tmp, 'w') as f: + with open(key_tmp, "w") as f: f.write(key) # Create PKCS12 keystore from private key and public certificate with mktempfile() as cert_tmp: - with open(cert_tmp, 'w') as f: + with open(cert_tmp, "w") as f: if chain: f.writelines([cert.strip() + "\n", chain.strip() + "\n"]) else: f.writelines([cert.strip() + "\n"]) - run_process([ - "openssl", - "pkcs12", - "-export", - "-name", alias, - "-in", cert_tmp, - "-inkey", key_tmp, - "-out", p12_tmp, - "-password", "pass:{}".format(passphrase) - ]) + run_process( + [ + "openssl", + "pkcs12", + "-export", + "-name", + alias, + "-in", + cert_tmp, + "-inkey", + key_tmp, + "-out", + p12_tmp, + "-password", + "pass:{}".format(passphrase), + ] + ) class OpenSSLExportPlugin(ExportPlugin): - title = 'OpenSSL' - slug = 'openssl-export' - description = 'Is a loose interface to openssl and support various formats' + title = "OpenSSL" + slug = "openssl-export" + description = "Is a loose interface to openssl and support various formats" version = openssl.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur" options = [ { - 'name': 'type', - 'type': 'select', - 'required': True, - 'available': ['PKCS12 (.p12)'], - 'helpMessage': 'Choose the format you wish to export', + "name": "type", + "type": "select", + "required": True, + "available": ["PKCS12 (.p12)"], + "helpMessage": "Choose the format you wish to export", }, { - 'name': 'passphrase', - 'type': 'str', - 'required': False, - 'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.', - 'validation': '' + "name": "passphrase", + "type": "str", + "required": False, + "helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.", + "validation": "", }, { - 'name': 'alias', - 'type': 'str', - 'required': False, - 'helpMessage': 'Enter the alias you wish to use for the keystore.', - } + "name": "alias", + "type": "str", + "required": False, + "helpMessage": "Enter the alias you wish to use for the keystore.", + }, ] def export(self, body, chain, key, options, **kwargs): @@ -115,20 +122,20 @@ class OpenSSLExportPlugin(ExportPlugin): :param options: :param kwargs: """ - if self.get_option('passphrase', options): - passphrase = self.get_option('passphrase', options) + if self.get_option("passphrase", options): + passphrase = self.get_option("passphrase", options) else: passphrase = get_psuedo_random_string() - if self.get_option('alias', options): - alias = self.get_option('alias', options) + if self.get_option("alias", options): + alias = self.get_option("alias", options) else: alias = common_name(parse_certificate(body)) - type = self.get_option('type', options) + type = self.get_option("type", options) with mktemppath() as output_tmp: - if type == 'PKCS12 (.p12)': + if type == "PKCS12 (.p12)": if not key: raise Exception("Private Key required by {0}".format(type)) @@ -137,7 +144,7 @@ class OpenSSLExportPlugin(ExportPlugin): else: raise Exception("Unable to export, unsupported type: {0}".format(type)) - with open(output_tmp, 'rb') as f: + with open(output_tmp, "rb") as f: raw = f.read() return extension, passphrase, raw diff --git a/lemur/plugins/lemur_openssl/tests/test_openssl.py b/lemur/plugins/lemur_openssl/tests/test_openssl.py index e24033e8..c332f941 100644 --- a/lemur/plugins/lemur_openssl/tests/test_openssl.py +++ b/lemur/plugins/lemur_openssl/tests/test_openssl.py @@ -4,8 +4,12 @@ from lemur.tests.vectors import INTERNAL_PRIVATE_KEY_A_STR, INTERNAL_CERTIFICATE def test_export_certificate_to_pkcs12(app): from lemur.plugins.base import plugins - p = plugins.get('openssl-export') - options = [{'name': 'passphrase', 'value': 'test1234'}, {'name': 'type', 'value': 'PKCS12 (.p12)'}] + + p = plugins.get("openssl-export") + options = [ + {"name": "passphrase", "value": "test1234"}, + {"name": "type", "value": "PKCS12 (.p12)"}, + ] with pytest.raises(Exception): p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options) diff --git a/lemur/plugins/lemur_sftp/__init__.py b/lemur/plugins/lemur_sftp/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_sftp/__init__.py +++ b/lemur/plugins/lemur_sftp/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_sftp/plugin.py b/lemur/plugins/lemur_sftp/plugin.py index d74effc5..de8df427 100644 --- a/lemur/plugins/lemur_sftp/plugin.py +++ b/lemur/plugins/lemur_sftp/plugin.py @@ -27,107 +27,105 @@ from lemur.plugins.bases import DestinationPlugin class SFTPDestinationPlugin(DestinationPlugin): - title = 'SFTP' - slug = 'sftp-destination' - description = 'Allow the uploading of certificates to SFTP' + title = "SFTP" + slug = "sftp-destination" + description = "Allow the uploading of certificates to SFTP" version = lemur_sftp.VERSION - author = 'Dmitry Zykov' - author_url = 'https://github.com/DmitryZykov' + author = "Dmitry Zykov" + author_url = "https://github.com/DmitryZykov" options = [ { - 'name': 'host', - 'type': 'str', - 'required': True, - 'helpMessage': 'The SFTP host.' + "name": "host", + "type": "str", + "required": True, + "helpMessage": "The SFTP host.", }, { - 'name': 'port', - 'type': 'int', - 'required': True, - 'helpMessage': 'The SFTP port, default is 22.', - 'validation': '^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})', - 'default': '22' + "name": "port", + "type": "int", + "required": True, + "helpMessage": "The SFTP port, default is 22.", + "validation": "^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})", + "default": "22", }, { - 'name': 'user', - 'type': 'str', - 'required': True, - 'helpMessage': 'The SFTP user. Default is root.', - 'default': 'root' + "name": "user", + "type": "str", + "required": True, + "helpMessage": "The SFTP user. Default is root.", + "default": "root", }, { - 'name': 'password', - 'type': 'str', - 'required': False, - 'helpMessage': 'The SFTP password (optional when the private key is used).', - 'default': None + "name": "password", + "type": "str", + "required": False, + "helpMessage": "The SFTP password (optional when the private key is used).", + "default": None, }, { - 'name': 'privateKeyPath', - 'type': 'str', - 'required': False, - 'helpMessage': 'The path to the RSA private key on the Lemur server (optional).', - 'default': None + "name": "privateKeyPath", + "type": "str", + "required": False, + "helpMessage": "The path to the RSA private key on the Lemur server (optional).", + "default": None, }, { - 'name': 'privateKeyPass', - 'type': 'str', - 'required': False, - 'helpMessage': 'The password for the encrypted RSA private key (optional).', - 'default': None + "name": "privateKeyPass", + "type": "str", + "required": False, + "helpMessage": "The password for the encrypted RSA private key (optional).", + "default": None, }, { - 'name': 'destinationPath', - 'type': 'str', - 'required': True, - 'helpMessage': 'The SFTP path where certificates will be uploaded.', - 'default': '/etc/nginx/certs' + "name": "destinationPath", + "type": "str", + "required": True, + "helpMessage": "The SFTP path where certificates will be uploaded.", + "default": "/etc/nginx/certs", }, { - 'name': 'exportFormat', - 'required': True, - 'value': 'NGINX', - 'helpMessage': 'The export format for certificates.', - 'type': 'select', - 'available': [ - 'NGINX', - 'Apache' - ] - } + "name": "exportFormat", + "required": True, + "value": "NGINX", + "helpMessage": "The export format for certificates.", + "type": "select", + "available": ["NGINX", "Apache"], + }, ] def upload(self, name, body, private_key, cert_chain, options, **kwargs): - current_app.logger.debug('SFTP destination plugin is started') + current_app.logger.debug("SFTP destination plugin is started") cn = common_name(parse_certificate(body)) - host = self.get_option('host', options) - port = self.get_option('port', options) - user = self.get_option('user', options) - password = self.get_option('password', options) - ssh_priv_key = self.get_option('privateKeyPath', options) - ssh_priv_key_pass = self.get_option('privateKeyPass', options) - dst_path = self.get_option('destinationPath', options) - export_format = self.get_option('exportFormat', options) + host = self.get_option("host", options) + port = self.get_option("port", options) + user = self.get_option("user", options) + password = self.get_option("password", options) + ssh_priv_key = self.get_option("privateKeyPath", options) + ssh_priv_key_pass = self.get_option("privateKeyPass", options) + dst_path = self.get_option("destinationPath", options) + export_format = self.get_option("exportFormat", options) # prepare files for upload - files = {cn + '.key': private_key, - cn + '.pem': body} + files = {cn + ".key": private_key, cn + ".pem": body} if cert_chain: - if export_format == 'NGINX': + if export_format == "NGINX": # assemble body + chain in the single file - files[cn + '.pem'] += '\n' + cert_chain + files[cn + ".pem"] += "\n" + cert_chain - elif export_format == 'Apache': + elif export_format == "Apache": # store chain in the separate file - files[cn + '.ca.bundle.pem'] = cert_chain + files[cn + ".ca.bundle.pem"] = cert_chain # upload files try: - current_app.logger.debug('Connecting to {0}@{1}:{2}'.format(user, host, port)) + current_app.logger.debug( + "Connecting to {0}@{1}:{2}".format(user, host, port) + ) ssh = paramiko.SSHClient() # allow connection to the new unknown host @@ -135,14 +133,18 @@ class SFTPDestinationPlugin(DestinationPlugin): # open the ssh connection if password: - current_app.logger.debug('Using password') + current_app.logger.debug("Using password") ssh.connect(host, username=user, port=port, password=password) elif ssh_priv_key: - current_app.logger.debug('Using RSA private key') - pkey = paramiko.RSAKey.from_private_key_file(ssh_priv_key, ssh_priv_key_pass) + current_app.logger.debug("Using RSA private key") + pkey = paramiko.RSAKey.from_private_key_file( + ssh_priv_key, ssh_priv_key_pass + ) ssh.connect(host, username=user, port=port, pkey=pkey) else: - current_app.logger.error("No password or private key provided. Can't proceed") + current_app.logger.error( + "No password or private key provided. Can't proceed" + ) raise paramiko.ssh_exception.AuthenticationException # open the sftp session inside the ssh connection @@ -150,29 +152,33 @@ class SFTPDestinationPlugin(DestinationPlugin): # make sure that the destination path exist try: - current_app.logger.debug('Creating {0}'.format(dst_path)) + current_app.logger.debug("Creating {0}".format(dst_path)) sftp.mkdir(dst_path) except IOError: - current_app.logger.debug('{0} already exist, resuming'.format(dst_path)) + current_app.logger.debug("{0} already exist, resuming".format(dst_path)) try: - dst_path_cn = dst_path + '/' + cn - current_app.logger.debug('Creating {0}'.format(dst_path_cn)) + dst_path_cn = dst_path + "/" + cn + current_app.logger.debug("Creating {0}".format(dst_path_cn)) sftp.mkdir(dst_path_cn) except IOError: - current_app.logger.debug('{0} already exist, resuming'.format(dst_path_cn)) + current_app.logger.debug( + "{0} already exist, resuming".format(dst_path_cn) + ) # upload certificate files to the sftp destination for filename, data in files.items(): - current_app.logger.debug('Uploading {0} to {1}'.format(filename, dst_path_cn)) - with sftp.open(dst_path_cn + '/' + filename, 'w') as f: + current_app.logger.debug( + "Uploading {0} to {1}".format(filename, dst_path_cn) + ) + with sftp.open(dst_path_cn + "/" + filename, "w") as f: f.write(data) # read only for owner, -r-------- - sftp.chmod(dst_path_cn + '/' + filename, 0o400) + sftp.chmod(dst_path_cn + "/" + filename, 0o400) ssh.close() except Exception as e: - current_app.logger.error('ERROR in {0}: {1}'.format(e.__class__, e)) + current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e)) try: ssh.close() except BaseException: diff --git a/lemur/plugins/lemur_slack/__init__.py b/lemur/plugins/lemur_slack/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_slack/__init__.py +++ b/lemur/plugins/lemur_slack/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_slack/plugin.py b/lemur/plugins/lemur_slack/plugin.py index a986aa9a..7569d295 100644 --- a/lemur/plugins/lemur_slack/plugin.py +++ b/lemur/plugins/lemur_slack/plugin.py @@ -17,102 +17,101 @@ import requests def create_certificate_url(name): - return 'https://{hostname}/#/certificates/{name}'.format( - hostname=current_app.config.get('LEMUR_HOSTNAME'), - name=name + return "https://{hostname}/#/certificates/{name}".format( + hostname=current_app.config.get("LEMUR_HOSTNAME"), name=name ) def create_expiration_attachments(certificates): attachments = [] for certificate in certificates: - attachments.append({ - 'title': certificate['name'], - 'title_link': create_certificate_url(certificate['name']), - 'color': 'danger', - 'fallback': '', - 'fields': [ - { - 'title': 'Owner', - 'value': certificate['owner'], - 'short': True - }, - { - 'title': 'Expires', - 'value': arrow.get(certificate['validityEnd']).format('dddd, MMMM D, YYYY'), - 'short': True - }, - { - 'title': 'Endpoints Detected', - 'value': len(certificate['endpoints']), - 'short': True - } - ], - 'text': '', - 'mrkdwn_in': ['text'] - }) + attachments.append( + { + "title": certificate["name"], + "title_link": create_certificate_url(certificate["name"]), + "color": "danger", + "fallback": "", + "fields": [ + {"title": "Owner", "value": certificate["owner"], "short": True}, + { + "title": "Expires", + "value": arrow.get(certificate["validityEnd"]).format( + "dddd, MMMM D, YYYY" + ), + "short": True, + }, + { + "title": "Endpoints Detected", + "value": len(certificate["endpoints"]), + "short": True, + }, + ], + "text": "", + "mrkdwn_in": ["text"], + } + ) return attachments def create_rotation_attachments(certificate): return { - 'title': certificate['name'], - 'title_link': create_certificate_url(certificate['name']), - 'fields': [ + "title": certificate["name"], + "title_link": create_certificate_url(certificate["name"]), + "fields": [ { + {"title": "Owner", "value": certificate["owner"], "short": True}, { - 'title': 'Owner', - 'value': certificate['owner'], - 'short': True + "title": "Expires", + "value": arrow.get(certificate["validityEnd"]).format( + "dddd, MMMM D, YYYY" + ), + "short": True, }, { - 'title': 'Expires', - 'value': arrow.get(certificate['validityEnd']).format('dddd, MMMM D, YYYY'), - 'short': True + "title": "Replaced By", + "value": len(certificate["replaced"][0]["name"]), + "short": True, }, { - 'title': 'Replaced By', - 'value': len(certificate['replaced'][0]['name']), - 'short': True + "title": "Endpoints Rotated", + "value": len(certificate["endpoints"]), + "short": True, }, - { - 'title': 'Endpoints Rotated', - 'value': len(certificate['endpoints']), - 'short': True - } } - ] + ], } class SlackNotificationPlugin(ExpirationNotificationPlugin): - title = 'Slack' - slug = 'slack-notification' - description = 'Sends notifications to Slack' + title = "Slack" + slug = "slack-notification" + description = "Sends notifications to Slack" version = slack.VERSION - author = 'Harm Weites' - author_url = 'https://github.com/netflix/lemur' + author = "Harm Weites" + author_url = "https://github.com/netflix/lemur" additional_options = [ { - 'name': 'webhook', - 'type': 'str', - 'required': True, - 'validation': '^https:\/\/hooks\.slack\.com\/services\/.+$', - 'helpMessage': 'The url Slack told you to use for this integration', - }, { - 'name': 'username', - 'type': 'str', - 'validation': '^.+$', - 'helpMessage': 'The great storyteller', - 'default': 'Lemur' - }, { - 'name': 'recipients', - 'type': 'str', - 'required': True, - 'validation': '^(@|#).+$', - 'helpMessage': 'Where to send to, either @username or #channel', + "name": "webhook", + "type": "str", + "required": True, + "validation": "^https:\/\/hooks\.slack\.com\/services\/.+$", + "helpMessage": "The url Slack told you to use for this integration", + }, + { + "name": "username", + "type": "str", + "validation": "^.+$", + "helpMessage": "The great storyteller", + "default": "Lemur", + }, + { + "name": "recipients", + "type": "str", + "required": True, + "validation": "^(@|#).+$", + "helpMessage": "Where to send to, either @username or #channel", }, ] @@ -122,25 +121,27 @@ class SlackNotificationPlugin(ExpirationNotificationPlugin): `lemur notify` """ attachments = None - if notification_type == 'expiration': + if notification_type == "expiration": attachments = create_expiration_attachments(message) - elif notification_type == 'rotation': + elif notification_type == "rotation": attachments = create_rotation_attachments(message) if not attachments: - raise Exception('Unable to create message attachments') + raise Exception("Unable to create message attachments") body = { - 'text': 'Lemur {0} Notification'.format(notification_type.capitalize()), - 'attachments': attachments, - 'channel': self.get_option('recipients', options), - 'username': self.get_option('username', options) + "text": "Lemur {0} Notification".format(notification_type.capitalize()), + "attachments": attachments, + "channel": self.get_option("recipients", options), + "username": self.get_option("username", options), } - r = requests.post(self.get_option('webhook', options), json.dumps(body)) + r = requests.post(self.get_option("webhook", options), json.dumps(body)) if r.status_code not in [200]: - raise Exception('Failed to send message') + raise Exception("Failed to send message") - current_app.logger.error("Slack response: {0} Message Body: {1}".format(r.status_code, body)) + current_app.logger.error( + "Slack response: {0} Message Body: {1}".format(r.status_code, body) + ) diff --git a/lemur/plugins/lemur_slack/tests/test_slack.py b/lemur/plugins/lemur_slack/tests/test_slack.py index 701f69d9..86add25f 100644 --- a/lemur/plugins/lemur_slack/tests/test_slack.py +++ b/lemur/plugins/lemur_slack/tests/test_slack.py @@ -1,33 +1,23 @@ - - def test_formatting(certificate): from lemur.plugins.lemur_slack.plugin import create_expiration_attachments from lemur.certificates.schemas import certificate_notification_output_schema + data = [certificate_notification_output_schema.dump(certificate).data] attachment = { - 'title': certificate.name, - 'color': 'danger', - 'fields': [ - { - 'short': True, - 'value': 'joe@example.com', - 'title': 'Owner' - }, - { - 'short': True, - 'value': u'Tuesday, December 31, 2047', - 'title': 'Expires' - }, { - 'short': True, - 'value': 0, - 'title': 'Endpoints Detected' - } + "title": certificate.name, + "color": "danger", + "fields": [ + {"short": True, "value": "joe@example.com", "title": "Owner"}, + {"short": True, "value": u"Tuesday, December 31, 2047", "title": "Expires"}, + {"short": True, "value": 0, "title": "Endpoints Detected"}, ], - 'title_link': 'https://lemur.example.com/#/certificates/{name}'.format(name=certificate.name), - 'mrkdwn_in': ['text'], - 'text': '', - 'fallback': '' + "title_link": "https://lemur.example.com/#/certificates/{name}".format( + name=certificate.name + ), + "mrkdwn_in": ["text"], + "text": "", + "fallback": "", } assert attachment == create_expiration_attachments(data)[0] diff --git a/lemur/plugins/lemur_statsd/lemur_statsd/__init__.py b/lemur/plugins/lemur_statsd/lemur_statsd/__init__.py index 3a751848..b4d708ce 100644 --- a/lemur/plugins/lemur_statsd/lemur_statsd/__init__.py +++ b/lemur/plugins/lemur_statsd/lemur_statsd/__init__.py @@ -1,4 +1,4 @@ try: - VERSION = __import__('pkg_resources').get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'Unknown' + VERSION = "Unknown" diff --git a/lemur/plugins/lemur_statsd/lemur_statsd/plugin.py b/lemur/plugins/lemur_statsd/lemur_statsd/plugin.py index a6a87c66..293b4634 100644 --- a/lemur/plugins/lemur_statsd/lemur_statsd/plugin.py +++ b/lemur/plugins/lemur_statsd/lemur_statsd/plugin.py @@ -6,40 +6,44 @@ from datadog import DogStatsd class StatsdMetricPlugin(MetricPlugin): - title = 'Statsd' - slug = 'statsd-metrics' - description = 'Adds support for sending metrics to Statsd' + title = "Statsd" + slug = "statsd-metrics" + description = "Adds support for sending metrics to Statsd" version = plug.VERSION def __init__(self): - host = current_app.config.get('STATSD_HOST') - port = current_app.config.get('STATSD_PORT') - prefix = current_app.config.get('STATSD_PREFIX') + host = current_app.config.get("STATSD_HOST") + port = current_app.config.get("STATSD_PORT") + prefix = current_app.config.get("STATSD_PREFIX") self.statsd = DogStatsd(host=host, port=port, namespace=prefix) - def submit(self, metric_name, metric_type, metric_value, metric_tags=None, options=None): - valid_types = ['COUNTER', 'GAUGE', 'TIMER'] + def submit( + self, metric_name, metric_type, metric_value, metric_tags=None, options=None + ): + valid_types = ["COUNTER", "GAUGE", "TIMER"] tags = [] if metric_type.upper() not in valid_types: raise Exception( "Invalid Metric Type for Statsd, '{metric}' choose from: {options}".format( - metric=metric_type, options=','.join(valid_types) + metric=metric_type, options=",".join(valid_types) ) ) if metric_tags: if not isinstance(metric_tags, dict): - raise Exception("Invalid Metric Tags for Statsd: Tags must be in dict format") + raise Exception( + "Invalid Metric Tags for Statsd: Tags must be in dict format" + ) else: tags = map(lambda e: "{0}:{1}".format(*e), metric_tags.items()) - if metric_type.upper() == 'COUNTER': + if metric_type.upper() == "COUNTER": self.statsd.increment(metric_name, metric_value, tags) - elif metric_type.upper() == 'GAUGE': + elif metric_type.upper() == "GAUGE": self.statsd.gauge(metric_name, metric_value, tags) - elif metric_type.upper() == 'TIMER': + elif metric_type.upper() == "TIMER": self.statsd.timing(metric_name, metric_value, tags) return diff --git a/lemur/plugins/lemur_statsd/setup.py b/lemur/plugins/lemur_statsd/setup.py index 6c4c2dd6..9b3c5f52 100644 --- a/lemur/plugins/lemur_statsd/setup.py +++ b/lemur/plugins/lemur_statsd/setup.py @@ -2,23 +2,16 @@ from __future__ import absolute_import from setuptools import setup, find_packages -install_requires = [ - 'lemur', - 'datadog' -] +install_requires = ["lemur", "datadog"] setup( - name='lemur_statsd', - version='1.0.0', - author='Cloudflare Security Engineering', - author_email='', + name="lemur_statsd", + version="1.0.0", + author="Cloudflare Security Engineering", + author_email="", include_package_data=True, packages=find_packages(), zip_safe=False, install_requires=install_requires, - entry_points={ - 'lemur.plugins': [ - 'statsd = lemur_statsd.plugin:StatsdMetricPlugin', - ] - } + entry_points={"lemur.plugins": ["statsd = lemur_statsd.plugin:StatsdMetricPlugin"]}, ) diff --git a/lemur/plugins/lemur_vault_dest/__init__.py b/lemur/plugins/lemur_vault_dest/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_vault_dest/__init__.py +++ b/lemur/plugins/lemur_vault_dest/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 803b0a0c..c8843cf5 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -25,59 +25,57 @@ from cryptography.hazmat.backends import default_backend class VaultSourcePlugin(SourcePlugin): """ Class for importing certificates from Hashicorp Vault""" - title = 'Vault' - slug = 'vault-source' - description = 'Discovers all certificates in a given path' - author = 'Christopher Jolley' - author_url = 'https://github.com/alwaysjolley/lemur' + title = "Vault" + slug = "vault-source" + description = "Discovers all certificates in a given path" + + author = "Christopher Jolley" + author_url = "https://github.com/alwaysjolley/lemur" options = [ { - 'name': 'vaultUrl', - 'type': 'str', - 'required': True, - 'validation': '^https?://[a-zA-Z0-9.:-]+$', - 'helpMessage': 'Valid URL to Hashi Vault instance' + "name": "vaultUrl", + "type": "str", + "required": True, + "validation": "^https?://[a-zA-Z0-9.:-]+$", + "helpMessage": "Valid URL to Hashi Vault instance", }, { - 'name': 'vaultKvApiVersion', - 'type': 'select', - 'value': '2', - 'available': [ - '1', - '2' - ], - 'required': True, - 'helpMessage': 'Version of the Vault KV API to use' + "name": "vaultKvApiVersion", + "type": "select", + "value": "2", + "available": ["1", "2"], + "required": True, + "helpMessage": "Version of the Vault KV API to use", }, { - 'name': 'vaultAuthTokenFile', - 'type': 'str', - 'required': True, - 'validation': '(/[^/]+)+', - 'helpMessage': 'Must be a valid file path!' + "name": "vaultAuthTokenFile", + "type": "str", + "required": True, + "validation": "(/[^/]+)+", + "helpMessage": "Must be a valid file path!", }, { - 'name': 'vaultMount', - 'type': 'str', - 'required': True, - 'validation': r'^\S+$', - 'helpMessage': 'Must be a valid Vault secrets mount name!' + "name": "vaultMount", + "type": "str", + "required": True, + "validation": r"^\S+$", + "helpMessage": "Must be a valid Vault secrets mount name!", }, { - 'name': 'vaultPath', - 'type': 'str', - 'required': True, - 'validation': '^([a-zA-Z0-9_-]+/?)+$', - 'helpMessage': 'Must be a valid Vault secrets path' + "name": "vaultPath", + "type": "str", + "required": True, + "validation": "^([a-zA-Z0-9_-]+/?)+$", + "helpMessage": "Must be a valid Vault secrets path", }, { - 'name': 'objectName', - 'type': 'str', - 'required': True, - 'validation': '[0-9a-zA-Z:_-]+', - 'helpMessage': 'Object Name to search' + "name": "objectName", + "type": "str", + "required": True, + "validation": "[0-9a-zA-Z:_-]+", + "helpMessage": "Object Name to search", }, ] @@ -85,38 +83,38 @@ class VaultSourcePlugin(SourcePlugin): """Pull certificates from objects in Hashicorp Vault""" data = [] cert = [] - body = '' - url = self.get_option('vaultUrl', options) - token_file = self.get_option('vaultAuthTokenFile', options) - mount = self.get_option('vaultMount', options) - path = self.get_option('vaultPath', options) - obj_name = self.get_option('objectName', options) - api_version = self.get_option('vaultKvApiVersion', options) - cert_filter = '-----BEGIN CERTIFICATE-----' - cert_delimiter = '-----END CERTIFICATE-----' + body = "" + url = self.get_option("vaultUrl", options) + token_file = self.get_option("vaultAuthTokenFile", options) + mount = self.get_option("vaultMount", options) + path = self.get_option("vaultPath", options) + obj_name = self.get_option("objectName", options) + api_version = self.get_option("vaultKvApiVersion", options) + cert_filter = "-----BEGIN CERTIFICATE-----" + cert_delimiter = "-----END CERTIFICATE-----" - with open(token_file, 'r') as tfile: - token = tfile.readline().rstrip('\n') + with open(token_file, "r") as tfile: + token = tfile.readline().rstrip("\n") client = hvac.Client(url=url, token=token) client.secrets.kv.default_kv_version = api_version - path = '{0}/{1}'.format(path, obj_name) + path = "{0}/{1}".format(path, obj_name) secret = get_secret(client, mount, path) - for cname in secret['data']: - if 'crt' in secret['data'][cname]: - cert = secret['data'][cname]['crt'].split(cert_delimiter + '\n') - elif 'pem' in secret['data'][cname]: - cert = secret['data'][cname]['pem'].split(cert_delimiter + '\n') + for cname in secret["data"]: + if "crt" in secret["data"][cname]: + cert = secret["data"][cname]["crt"].split(cert_delimiter + "\n") + elif "pem" in secret["data"][cname]: + cert = secret["data"][cname]["pem"].split(cert_delimiter + "\n") else: - for key in secret['data'][cname]: - if secret['data'][cname][key].startswith(cert_filter): - cert = secret['data'][cname][key].split(cert_delimiter + '\n') + for key in secret["data"][cname]: + if secret["data"][cname][key].startswith(cert_filter): + cert = secret["data"][cname][key].split(cert_delimiter + "\n") break body = cert[0] + cert_delimiter - if 'chain' in secret['data'][cname]: - chain = secret['data'][cname]['chain'] + if "chain" in secret["data"][cname]: + chain = secret["data"][cname]["chain"] elif len(cert) > 1: if cert[1].startswith(cert_filter): chain = cert[1] + cert_delimiter @@ -124,8 +122,10 @@ class VaultSourcePlugin(SourcePlugin): chain = None else: chain = None - data.append({'body': body, 'chain': chain, 'name': cname}) - return [dict(body=c['body'], chain=c.get('chain'), name=c['name']) for c in data] + data.append({"body": body, "chain": chain, "name": cname}) + return [ + dict(body=c["body"], chain=c.get("chain"), name=c["name"]) for c in data + ] def get_endpoints(self, options, **kwargs): """ Not implemented yet """ @@ -135,81 +135,74 @@ class VaultSourcePlugin(SourcePlugin): class VaultDestinationPlugin(DestinationPlugin): """Hashicorp Vault Destination plugin for Lemur""" - title = 'Vault' - slug = 'hashi-vault-destination' - description = 'Allow the uploading of certificates to Hashi Vault as secret' - author = 'Christopher Jolley' - author_url = 'https://github.com/alwaysjolley/lemur' + title = "Vault" + slug = "hashi-vault-destination" + description = "Allow the uploading of certificates to Hashi Vault as secret" + + author = "Christopher Jolley" + author_url = "https://github.com/alwaysjolley/lemur" options = [ { - 'name': 'vaultUrl', - 'type': 'str', - 'required': True, - 'validation': '^https?://[a-zA-Z0-9.:-]+$', - 'helpMessage': 'Valid URL to Hashi Vault instance' + "name": "vaultUrl", + "type": "str", + "required": True, + "validation": "^https?://[a-zA-Z0-9.:-]+$", + "helpMessage": "Valid URL to Hashi Vault instance", }, { - 'name': 'vaultKvApiVersion', - 'type': 'select', - 'value': '2', - 'available': [ - '1', - '2' - ], - 'required': True, - 'helpMessage': 'Version of the Vault KV API to use' + "name": "vaultKvApiVersion", + "type": "select", + "value": "2", + "available": ["1", "2"], + "required": True, + "helpMessage": "Version of the Vault KV API to use", }, { - 'name': 'vaultAuthTokenFile', - 'type': 'str', - 'required': True, - 'validation': '(/[^/]+)+', - 'helpMessage': 'Must be a valid file path!' + "name": "vaultAuthTokenFile", + "type": "str", + "required": True, + "validation": "(/[^/]+)+", + "helpMessage": "Must be a valid file path!", }, { - 'name': 'vaultMount', - 'type': 'str', - 'required': True, - 'validation': r'^\S+$', - 'helpMessage': 'Must be a valid Vault secrets mount name!' + "name": "vaultMount", + "type": "str", + "required": True, + "validation": r"^\S+$", + "helpMessage": "Must be a valid Vault secrets mount name!", }, { - 'name': 'vaultPath', - 'type': 'str', - 'required': True, - 'validation': '^([a-zA-Z0-9_-]+/?)+$', - 'helpMessage': 'Must be a valid Vault secrets path' + "name": "vaultPath", + "type": "str", + "required": True, + "validation": "^([a-zA-Z0-9_-]+/?)+$", + "helpMessage": "Must be a valid Vault secrets path", }, { - 'name': 'objectName', - 'type': 'str', - 'required': False, - 'validation': '[0-9a-zA-Z:_-]+', - 'helpMessage': 'Name to bundle certs under, if blank use cn' + "name": "objectName", + "type": "str", + "required": False, + "validation": "[0-9a-zA-Z:_-]+", + "helpMessage": "Name to bundle certs under, if blank use cn", }, { - 'name': 'bundleChain', - 'type': 'select', - 'value': 'cert only', - 'available': [ - 'Nginx', - 'Apache', - 'PEM', - 'no chain' - ], - 'required': True, - 'helpMessage': 'Bundle the chain into the certificate' + "name": "bundleChain", + "type": "select", + "value": "cert only", + "available": ["Nginx", "Apache", "PEM", "no chain"], + "required": True, + "helpMessage": "Bundle the chain into the certificate", }, { - 'name': 'sanFilter', - 'type': 'str', - 'value': '.*', - 'required': False, - 'validation': '.*', - 'helpMessage': 'Valid regex filter' - } + "name": "sanFilter", + "type": "str", + "value": ".*", + "required": False, + "validation": ".*", + "helpMessage": "Valid regex filter", + }, ] def __init__(self, *args, **kwargs): @@ -225,14 +218,14 @@ class VaultDestinationPlugin(DestinationPlugin): """ cname = common_name(parse_certificate(body)) - url = self.get_option('vaultUrl', options) - token_file = self.get_option('vaultAuthTokenFile', options) - mount = self.get_option('vaultMount', options) - path = self.get_option('vaultPath', options) - bundle = self.get_option('bundleChain', options) - obj_name = self.get_option('objectName', options) - api_version = self.get_option('vaultKvApiVersion', options) - san_filter = self.get_option('sanFilter', options) + url = self.get_option("vaultUrl", options) + token_file = self.get_option("vaultAuthTokenFile", options) + mount = self.get_option("vaultMount", options) + path = self.get_option("vaultPath", options) + bundle = self.get_option("bundleChain", options) + obj_name = self.get_option("objectName", options) + api_version = self.get_option("vaultKvApiVersion", options) + san_filter = self.get_option("sanFilter", options) san_list = get_san_list(body) if san_filter: @@ -240,58 +233,67 @@ class VaultDestinationPlugin(DestinationPlugin): try: if not re.match(san_filter, san, flags=re.IGNORECASE): current_app.logger.exception( - "Exception uploading secret to vault: invalid SAN: {}".format(san), - exc_info=True) + "Exception uploading secret to vault: invalid SAN: {}".format( + san + ), + exc_info=True, + ) os._exit(1) except re.error: current_app.logger.exception( "Exception compiling regex filter: invalid filter", - exc_info=True) + exc_info=True, + ) - with open(token_file, 'r') as tfile: - token = tfile.readline().rstrip('\n') + with open(token_file, "r") as tfile: + token = tfile.readline().rstrip("\n") client = hvac.Client(url=url, token=token) client.secrets.kv.default_kv_version = api_version if obj_name: - path = '{0}/{1}'.format(path, obj_name) + path = "{0}/{1}".format(path, obj_name) else: - path = '{0}/{1}'.format(path, cname) + path = "{0}/{1}".format(path, cname) secret = get_secret(client, mount, path) - secret['data'][cname] = {} + secret["data"][cname] = {} - if bundle == 'Nginx': - secret['data'][cname]['crt'] = '{0}\n{1}'.format(body, cert_chain) - secret['data'][cname]['key'] = private_key - elif bundle == 'Apache': - secret['data'][cname]['crt'] = body - secret['data'][cname]['chain'] = cert_chain - secret['data'][cname]['key'] = private_key - elif bundle == 'PEM': - secret['data'][cname]['pem'] = '{0}\n{1}\n{2}'.format(body, cert_chain, private_key) + if bundle == "Nginx": + secret["data"][cname]["crt"] = "{0}\n{1}".format(body, cert_chain) + secret["data"][cname]["key"] = private_key + elif bundle == "Apache": + secret["data"][cname]["crt"] = body + secret["data"][cname]["chain"] = cert_chain + secret["data"][cname]["key"] = private_key + elif bundle == "PEM": + secret["data"][cname]["pem"] = "{0}\n{1}\n{2}".format( + body, cert_chain, private_key + ) else: - secret['data'][cname]['crt'] = body - secret['data'][cname]['key'] = private_key + secret["data"][cname]["crt"] = body + secret["data"][cname]["key"] = private_key if isinstance(san_list, list): - secret['data'][cname]['san'] = san_list + secret["data"][cname]["san"] = san_list try: client.secrets.kv.create_or_update_secret( - path=path, mount_point=mount, secret=secret['data'] + path=path, mount_point=mount, secret=secret["data"] ) except ConnectionError as err: current_app.logger.exception( - "Exception uploading secret to vault: {0}".format(err), exc_info=True) + "Exception uploading secret to vault: {0}".format(err), exc_info=True + ) def get_san_list(body): """ parse certificate for SAN names and return list, return empty list on error """ san_list = [] try: - byte_body = body.encode('utf-8') + byte_body = body.encode("utf-8") cert = x509.load_pem_x509_certificate(byte_body, default_backend()) - ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME) + ext = cert.extensions.get_extension_for_oid( + x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME + ) san_list = ext.value.get_values_for_type(x509.DNSName) except x509.extensions.ExtensionNotFound: pass @@ -301,12 +303,14 @@ def get_san_list(body): def get_secret(client, mount, path): """ retreive existing data from mount path and return dictionary """ - result = {'data': {}} + result = {"data": {}} try: - if client.secrets.kv.default_kv_version == '1': + if client.secrets.kv.default_kv_version == "1": result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount) else: - result = client.secrets.kv.v2.read_secret_version(path=path, mount_point=mount) + result = client.secrets.kv.v2.read_secret_version( + path=path, mount_point=mount + ) except ConnectionError: pass finally: diff --git a/lemur/plugins/lemur_verisign/__init__.py b/lemur/plugins/lemur_verisign/__init__.py index 8ce5a7f3..f8afd7e3 100644 --- a/lemur/plugins/lemur_verisign/__init__.py +++ b/lemur/plugins/lemur_verisign/__init__.py @@ -1,5 +1,4 @@ try: - VERSION = __import__('pkg_resources') \ - .get_distribution(__name__).version + VERSION = __import__("pkg_resources").get_distribution(__name__).version except Exception as e: - VERSION = 'unknown' + VERSION = "unknown" diff --git a/lemur/plugins/lemur_verisign/plugin.py b/lemur/plugins/lemur_verisign/plugin.py index e5207def..65bd1cac 100644 --- a/lemur/plugins/lemur_verisign/plugin.py +++ b/lemur/plugins/lemur_verisign/plugin.py @@ -58,7 +58,7 @@ VERISIGN_ERRORS = { "0x300a": "Domain/SubjectAltName Mismatched -- make sure that the SANs have the proper domain suffix", "0x950e": "Invalid Common Name -- make sure the CN has a proper domain suffix", "0xa00e": "Pending. (Insufficient number of tokens.)", - "0x8134": "Pending. (Domain failed CAA validation.)" + "0x8134": "Pending. (Domain failed CAA validation.)", } @@ -71,7 +71,7 @@ def log_status_code(r, *args, **kwargs): :param kwargs: :return: """ - metrics.send('symantec_status_code_{}'.format(r.status_code), 'counter', 1) + metrics.send("symantec_status_code_{}".format(r.status_code), "counter", 1) def get_additional_names(options): @@ -83,8 +83,8 @@ def get_additional_names(options): """ names = [] # add SANs if present - if options.get('extensions'): - for san in options['extensions']['sub_alt_names']: + if options.get("extensions"): + for san in options["extensions"]["sub_alt_names"]: if isinstance(san, x509.DNSName): names.append(san.value) return names @@ -99,37 +99,43 @@ def process_options(options): :return: dict or valid verisign options """ data = { - 'challenge': get_psuedo_random_string(), - 'serverType': 'Apache', - 'certProductType': 'Server', - 'firstName': current_app.config.get("VERISIGN_FIRST_NAME"), - 'lastName': current_app.config.get("VERISIGN_LAST_NAME"), - 'signatureAlgorithm': 'sha256WithRSAEncryption', - 'email': current_app.config.get("VERISIGN_EMAIL"), - 'ctLogOption': current_app.config.get("VERISIGN_CS_LOG_OPTION", "public"), + "challenge": get_psuedo_random_string(), + "serverType": "Apache", + "certProductType": "Server", + "firstName": current_app.config.get("VERISIGN_FIRST_NAME"), + "lastName": current_app.config.get("VERISIGN_LAST_NAME"), + "signatureAlgorithm": "sha256WithRSAEncryption", + "email": current_app.config.get("VERISIGN_EMAIL"), + "ctLogOption": current_app.config.get("VERISIGN_CS_LOG_OPTION", "public"), } - data['subject_alt_names'] = ",".join(get_additional_names(options)) + data["subject_alt_names"] = ",".join(get_additional_names(options)) - if options.get('validity_end') > arrow.utcnow().replace(years=2): - raise Exception("Verisign issued certificates cannot exceed two years in validity") + if options.get("validity_end") > arrow.utcnow().replace(years=2): + raise Exception( + "Verisign issued certificates cannot exceed two years in validity" + ) - if options.get('validity_end'): + if options.get("validity_end"): # VeriSign (Symantec) only accepts strictly smaller than 2 year end date - if options.get('validity_end') < arrow.utcnow().replace(years=2).replace(days=-1): + if options.get("validity_end") < arrow.utcnow().replace(years=2).replace( + days=-1 + ): period = get_default_issuance(options) - data['specificEndDate'] = options['validity_end'].format("MM/DD/YYYY") - data['validityPeriod'] = period + data["specificEndDate"] = options["validity_end"].format("MM/DD/YYYY") + data["validityPeriod"] = period else: # allowing Symantec website setting the end date, given the validity period - data['validityPeriod'] = str(get_default_issuance(options)) - options.pop('validity_end', None) + data["validityPeriod"] = str(get_default_issuance(options)) + options.pop("validity_end", None) - elif options.get('validity_years'): - if options['validity_years'] in [1, 2]: - data['validityPeriod'] = str(options['validity_years']) + 'Y' + elif options.get("validity_years"): + if options["validity_years"] in [1, 2]: + data["validityPeriod"] = str(options["validity_years"]) + "Y" else: - raise Exception("Verisign issued certificates cannot exceed two years in validity") + raise Exception( + "Verisign issued certificates cannot exceed two years in validity" + ) return data @@ -143,12 +149,14 @@ def get_default_issuance(options): """ now = arrow.utcnow() - if options['validity_end'] < now.replace(years=+1): - validity_period = '1Y' - elif options['validity_end'] < now.replace(years=+2): - validity_period = '2Y' + if options["validity_end"] < now.replace(years=+1): + validity_period = "1Y" + elif options["validity_end"] < now.replace(years=+2): + validity_period = "2Y" else: - raise Exception("Verisign issued certificates cannot exceed two years in validity") + raise Exception( + "Verisign issued certificates cannot exceed two years in validity" + ) return validity_period @@ -161,27 +169,27 @@ def handle_response(content): """ d = xmltodict.parse(content) global VERISIGN_ERRORS - if d.get('Error'): - status_code = d['Error']['StatusCode'] - elif d.get('Response'): - status_code = d['Response']['StatusCode'] + if d.get("Error"): + status_code = d["Error"]["StatusCode"] + elif d.get("Response"): + status_code = d["Response"]["StatusCode"] if status_code in VERISIGN_ERRORS.keys(): raise Exception(VERISIGN_ERRORS[status_code]) return d class VerisignIssuerPlugin(IssuerPlugin): - title = 'Verisign' - slug = 'verisign-issuer' - description = 'Enables the creation of certificates by the VICE2.0 verisign API.' + title = "Verisign" + slug = "verisign-issuer" + description = "Enables the creation of certificates by the VICE2.0 verisign API." version = verisign.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): self.session = requests.Session() - self.session.cert = current_app.config.get('VERISIGN_PEM_PATH') + self.session.cert = current_app.config.get("VERISIGN_PEM_PATH") self.session.hooks = dict(response=log_status_code) super(VerisignIssuerPlugin, self).__init__(*args, **kwargs) @@ -193,23 +201,31 @@ class VerisignIssuerPlugin(IssuerPlugin): :param issuer_options: :return: :raise Exception: """ - url = current_app.config.get("VERISIGN_URL") + '/rest/services/enroll' + url = current_app.config.get("VERISIGN_URL") + "/rest/services/enroll" data = process_options(issuer_options) - data['csr'] = csr + data["csr"] = csr - current_app.logger.info("Requesting a new verisign certificate: {0}".format(data)) + current_app.logger.info( + "Requesting a new verisign certificate: {0}".format(data) + ) response = self.session.post(url, data=data) try: - cert = handle_response(response.content)['Response']['Certificate'] + cert = handle_response(response.content)["Response"]["Certificate"] except KeyError: - metrics.send('verisign_create_certificate_error', 'counter', 1, - metric_tags={"common_name": issuer_options.get("common_name", "")}) - sentry.captureException(extra={"common_name": issuer_options.get("common_name", "")}) + metrics.send( + "verisign_create_certificate_error", + "counter", + 1, + metric_tags={"common_name": issuer_options.get("common_name", "")}, + ) + sentry.captureException( + extra={"common_name": issuer_options.get("common_name", "")} + ) raise Exception(f"Error with Verisign: {response.content}") # TODO add external id - return cert, current_app.config.get('VERISIGN_INTERMEDIATE'), None + return cert, current_app.config.get("VERISIGN_INTERMEDIATE"), None @staticmethod def create_authority(options): @@ -220,8 +236,8 @@ class VerisignIssuerPlugin(IssuerPlugin): :param options: :return: """ - role = {'username': '', 'password': '', 'name': 'verisign'} - return current_app.config.get('VERISIGN_ROOT'), "", [role] + role = {"username": "", "password": "", "name": "verisign"} + return current_app.config.get("VERISIGN_ROOT"), "", [role] def get_available_units(self): """ @@ -230,9 +246,11 @@ class VerisignIssuerPlugin(IssuerPlugin): :return: """ - url = current_app.config.get("VERISIGN_URL") + '/rest/services/getTokens' - response = self.session.post(url, headers={'content-type': 'application/x-www-form-urlencoded'}) - return handle_response(response.content)['Response']['Order'] + url = current_app.config.get("VERISIGN_URL") + "/rest/services/getTokens" + response = self.session.post( + url, headers={"content-type": "application/x-www-form-urlencoded"} + ) + return handle_response(response.content)["Response"]["Order"] def clear_pending_certificates(self): """ @@ -240,52 +258,54 @@ class VerisignIssuerPlugin(IssuerPlugin): :return: """ - url = current_app.config.get('VERISIGN_URL') + '/reportingws' + url = current_app.config.get("VERISIGN_URL") + "/reportingws" end = arrow.now() start = end.replace(days=-7) data = { - 'reportType': 'detail', - 'certProductType': 'Server', - 'certStatus': 'Pending', - 'startDate': start.format("MM/DD/YYYY"), - 'endDate': end.format("MM/DD/YYYY") + "reportType": "detail", + "certProductType": "Server", + "certStatus": "Pending", + "startDate": start.format("MM/DD/YYYY"), + "endDate": end.format("MM/DD/YYYY"), } response = self.session.post(url, data=data) - url = current_app.config.get('VERISIGN_URL') + '/rest/services/reject' - for order_id in response.json()['orderNumber']: - response = self.session.get(url, params={'transaction_id': order_id}) + url = current_app.config.get("VERISIGN_URL") + "/rest/services/reject" + for order_id in response.json()["orderNumber"]: + response = self.session.get(url, params={"transaction_id": order_id}) if response.status_code == 200: print("Rejecting certificate. TransactionId: {}".format(order_id)) class VerisignSourcePlugin(SourcePlugin): - title = 'Verisign' - slug = 'verisign-source' - description = 'Allows for the polling of issued certificates from the VICE2.0 verisign API.' + title = "Verisign" + slug = "verisign-source" + description = ( + "Allows for the polling of issued certificates from the VICE2.0 verisign API." + ) version = verisign.VERSION - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): self.session = requests.Session() - self.session.cert = current_app.config.get('VERISIGN_PEM_PATH') + self.session.cert = current_app.config.get("VERISIGN_PEM_PATH") super(VerisignSourcePlugin, self).__init__(*args, **kwargs) def get_certificates(self): - url = current_app.config.get('VERISIGN_URL') + '/reportingws' + url = current_app.config.get("VERISIGN_URL") + "/reportingws" end = arrow.now() start = end.replace(years=-5) data = { - 'reportType': 'detail', - 'startDate': start.format("MM/DD/YYYY"), - 'endDate': end.format("MM/DD/YYYY"), - 'structuredRecord': 'Y', - 'certStatus': 'Valid', + "reportType": "detail", + "startDate": start.format("MM/DD/YYYY"), + "endDate": end.format("MM/DD/YYYY"), + "structuredRecord": "Y", + "certStatus": "Valid", } current_app.logger.debug(data) response = self.session.post(url, data=data) diff --git a/lemur/plugins/lemur_verisign/tests/test_verisign.py b/lemur/plugins/lemur_verisign/tests/test_verisign.py index 8c4f1d81..42c528e8 100644 --- a/lemur/plugins/lemur_verisign/tests/test_verisign.py +++ b/lemur/plugins/lemur_verisign/tests/test_verisign.py @@ -1,4 +1,4 @@ - def test_get_certificates(app): from lemur.plugins.base import plugins - p = plugins.get('verisign-issuer') + + p = plugins.get("verisign-issuer") diff --git a/lemur/plugins/utils.py b/lemur/plugins/utils.py index e057d071..19655519 100644 --- a/lemur/plugins/utils.py +++ b/lemur/plugins/utils.py @@ -17,8 +17,8 @@ def get_plugin_option(name, options): :return: """ for o in options: - if o.get('name') == name: - return o.get('value', o.get('default')) + if o.get("name") == name: + return o.get("value", o.get("default")) def set_plugin_option(name, value, options): @@ -27,5 +27,5 @@ def set_plugin_option(name, value, options): :param options: """ for o in options: - if o.get('name') == name: - o.update({'value': value}) + if o.get("name") == name: + o.update({"value": value}) diff --git a/lemur/plugins/views.py b/lemur/plugins/views.py index dbdfccab..605b234a 100644 --- a/lemur/plugins/views.py +++ b/lemur/plugins/views.py @@ -15,12 +15,13 @@ from lemur.schemas import plugins_output_schema, plugin_output_schema from lemur.common.schema import validate_schema from lemur.plugins.base import plugins -mod = Blueprint('plugins', __name__) +mod = Blueprint("plugins", __name__) api = Api(mod) class PluginsList(AuthenticatedResource): """ Defines the 'plugins' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(PluginsList, self).__init__() @@ -69,17 +70,18 @@ class PluginsList(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 200: no error """ - self.reqparse.add_argument('type', type=str, location='args') + self.reqparse.add_argument("type", type=str, location="args") args = self.reqparse.parse_args() - if args['type']: - return list(plugins.all(plugin_type=args['type'])) + if args["type"]: + return list(plugins.all(plugin_type=args["type"])) return list(plugins.all()) class Plugins(AuthenticatedResource): """ Defines the 'plugins' endpoint """ + def __init__(self): super(Plugins, self).__init__() @@ -118,5 +120,5 @@ class Plugins(AuthenticatedResource): return plugins.get(name) -api.add_resource(PluginsList, '/plugins', endpoint='plugins') -api.add_resource(Plugins, '/plugins/', endpoint='pluginName') +api.add_resource(PluginsList, "/plugins", endpoint="plugins") +api.add_resource(Plugins, "/plugins/", endpoint="pluginName") diff --git a/lemur/policies/cli.py b/lemur/policies/cli.py index 725c1583..317f3414 100644 --- a/lemur/policies/cli.py +++ b/lemur/policies/cli.py @@ -12,8 +12,8 @@ from lemur.policies import service as policy_service manager = Manager(usage="Handles all policy related tasks.") -@manager.option('-d', '--days', dest='days', help='Number of days before expiration.') -@manager.option('-n', '--name', dest='name', help='Policy name.') +@manager.option("-d", "--days", dest="days", help="Number of days before expiration.") +@manager.option("-n", "--name", dest="name", help="Policy name.") def create(days, name): """ Create a new certificate rotation policy diff --git a/lemur/policies/models.py b/lemur/policies/models.py index 2329a347..a17d3ca1 100644 --- a/lemur/policies/models.py +++ b/lemur/policies/models.py @@ -12,10 +12,12 @@ from lemur.database import db class RotationPolicy(db.Model): - __tablename__ = 'rotation_policies' + __tablename__ = "rotation_policies" id = Column(Integer, primary_key=True) name = Column(String) days = Column(Integer) def __repr__(self): - return "RotationPolicy(days={days}, name={name})".format(days=self.days, name=self.name) + return "RotationPolicy(days={days}, name={name})".format( + days=self.days, name=self.name + ) diff --git a/lemur/policies/service.py b/lemur/policies/service.py index 10e9053b..cb43d52e 100644 --- a/lemur/policies/service.py +++ b/lemur/policies/service.py @@ -24,7 +24,7 @@ def get_by_name(policy_name): :param policy_name: :return: """ - return database.get_all(RotationPolicy, policy_name, field='name').all() + return database.get_all(RotationPolicy, policy_name, field="name").all() def delete(policy_id): diff --git a/lemur/reporting/cli.py b/lemur/reporting/cli.py index 8f797c33..c92b79cd 100644 --- a/lemur/reporting/cli.py +++ b/lemur/reporting/cli.py @@ -13,49 +13,73 @@ from lemur.reporting.service import fqdns, expiring_certificates manager = Manager(usage="Reporting related tasks.") -@manager.option('-v', '--validity', dest='validity', choices=['all', 'expired', 'valid'], default='all', help='Filter certificates by validity.') -@manager.option('-d', '--deployment', dest='deployment', choices=['all', 'deployed', 'ready'], default='all', help='Filter by deployment status.') +@manager.option( + "-v", + "--validity", + dest="validity", + choices=["all", "expired", "valid"], + default="all", + help="Filter certificates by validity.", +) +@manager.option( + "-d", + "--deployment", + dest="deployment", + choices=["all", "deployed", "ready"], + default="all", + help="Filter by deployment status.", +) def fqdn(deployment, validity): """ Generates a report in order to determine the number of FQDNs covered by Lemur issued certificates. """ - headers = ['FQDN', 'Root Domain', 'Issuer', 'Owner', 'Validity End', 'Total Length (days), Time Until Expiration (days)'] + headers = [ + "FQDN", + "Root Domain", + "Issuer", + "Owner", + "Validity End", + "Total Length (days), Time Until Expiration (days)", + ] rows = [] for cert in fqdns(validity=validity, deployment=deployment).all(): for domain in cert.domains: - rows.append([ - domain.name, - '.'.join(domain.name.split('.')[1:]), - cert.issuer, - cert.owner, - cert.not_after, - cert.validity_range.days, - cert.validity_remaining.days - ]) + rows.append( + [ + domain.name, + ".".join(domain.name.split(".")[1:]), + cert.issuer, + cert.owner, + cert.not_after, + cert.validity_range.days, + cert.validity_remaining.days, + ] + ) print(tabulate(rows, headers=headers)) -@manager.option('-ttl', '--ttl', dest='ttl', default=30, help='Days til expiration.') -@manager.option('-d', '--deployment', dest='deployment', choices=['all', 'deployed', 'ready'], default='all', help='Filter by deployment status.') +@manager.option("-ttl", "--ttl", dest="ttl", default=30, help="Days til expiration.") +@manager.option( + "-d", + "--deployment", + dest="deployment", + choices=["all", "deployed", "ready"], + default="all", + help="Filter by deployment status.", +) def expiring(ttl, deployment): """ Returns certificates expiring in the next n days. """ - headers = ['Common Name', 'Owner', 'Issuer', 'Validity End', 'Endpoint'] + headers = ["Common Name", "Owner", "Issuer", "Validity End", "Endpoint"] rows = [] for cert in expiring_certificates(ttl=ttl, deployment=deployment).all(): for endpoint in cert.endpoints: rows.append( - [ - cert.cn, - cert.owner, - cert.issuer, - cert.not_after, - endpoint.dnsname - ] + [cert.cn, cert.owner, cert.issuer, cert.not_after, endpoint.dnsname] ) print(tabulate(rows, headers=headers)) diff --git a/lemur/reporting/service.py b/lemur/reporting/service.py index 348cf2f4..77eb7b3e 100644 --- a/lemur/reporting/service.py +++ b/lemur/reporting/service.py @@ -9,10 +9,10 @@ from lemur.certificates.models import Certificate def filter_by_validity(query, validity=None): - if validity == 'expired': + if validity == "expired": query = query.filter(Certificate.expired == True) # noqa - elif validity == 'valid': + elif validity == "valid": query = query.filter(Certificate.expired == False) # noqa return query @@ -33,10 +33,10 @@ def filter_by_issuer(query, issuer=None): def filter_by_deployment(query, deployment=None): - if deployment == 'deployed': + if deployment == "deployed": query = query.filter(Certificate.endpoints.any()) - elif deployment == 'ready': + elif deployment == "ready": query = query.filter(not_(Certificate.endpoints.any())) return query @@ -55,8 +55,8 @@ def fqdns(**kwargs): :return: """ query = database.session_query(Certificate) - query = filter_by_deployment(query, deployment=kwargs.get('deployed')) - query = filter_by_validity(query, validity=kwargs.get('validity')) + query = filter_by_deployment(query, deployment=kwargs.get("deployed")) + query = filter_by_validity(query, validity=kwargs.get("validity")) return query @@ -65,13 +65,13 @@ def expiring_certificates(**kwargs): Returns an Expiring report. :return: """ - ttl = kwargs.get('ttl', 30) + ttl = kwargs.get("ttl", 30) now = arrow.utcnow() validity_end = now + timedelta(days=ttl) query = database.session_query(Certificate) - query = filter_by_deployment(query, deployment=kwargs.get('deployed')) - query = filter_by_validity(query, validity='valid') + query = filter_by_deployment(query, deployment=kwargs.get("deployed")) + query = filter_by_validity(query, validity="valid") query = filter_by_validity_end(query, validity_end=validity_end) return query diff --git a/lemur/roles/models.py b/lemur/roles/models.py index 85bf1bf1..91b5d58c 100644 --- a/lemur/roles/models.py +++ b/lemur/roles/models.py @@ -14,26 +14,42 @@ from sqlalchemy import Boolean, Column, Integer, String, Text, ForeignKey from lemur.database import db from lemur.utils import Vault -from lemur.models import roles_users, roles_authorities, roles_certificates, \ - pending_cert_role_associations +from lemur.models import ( + roles_users, + roles_authorities, + roles_certificates, + pending_cert_role_associations, +) class Role(db.Model): - __tablename__ = 'roles' + __tablename__ = "roles" id = Column(Integer, primary_key=True) name = Column(String(128), unique=True) username = Column(String(128)) password = Column(Vault) description = Column(Text) - authority_id = Column(Integer, ForeignKey('authorities.id')) - authorities = relationship("Authority", secondary=roles_authorities, passive_deletes=True, backref="role", cascade='all,delete') - user_id = Column(Integer, ForeignKey('users.id')) + authority_id = Column(Integer, ForeignKey("authorities.id")) + authorities = relationship( + "Authority", + secondary=roles_authorities, + passive_deletes=True, + backref="role", + cascade="all,delete", + ) + user_id = Column(Integer, ForeignKey("users.id")) third_party = Column(Boolean) - users = relationship("User", secondary=roles_users, passive_deletes=True, backref="role") - certificates = relationship("Certificate", secondary=roles_certificates, backref="role") - pending_certificates = relationship("PendingCertificate", secondary=pending_cert_role_associations, backref="role") + users = relationship( + "User", secondary=roles_users, passive_deletes=True, backref="role" + ) + certificates = relationship( + "Certificate", secondary=roles_certificates, backref="role" + ) + pending_certificates = relationship( + "PendingCertificate", secondary=pending_cert_role_associations, backref="role" + ) - sensitive_fields = ('password',) + sensitive_fields = ("password",) def __repr__(self): return "Role(name={name})".format(name=self.name) diff --git a/lemur/roles/service.py b/lemur/roles/service.py index bbeef1ce..51597d6e 100644 --- a/lemur/roles/service.py +++ b/lemur/roles/service.py @@ -47,7 +47,9 @@ def set_third_party(role_id, third_party_status=False): return role -def create(name, password=None, description=None, username=None, users=None, third_party=False): +def create( + name, password=None, description=None, username=None, users=None, third_party=False +): """ Create a new role @@ -58,7 +60,13 @@ def create(name, password=None, description=None, username=None, users=None, thi :param password: :return: """ - role = Role(name=name, description=description, username=username, password=password, third_party=third_party) + role = Role( + name=name, + description=description, + username=username, + password=password, + third_party=third_party, + ) if users: role.users = users @@ -83,7 +91,7 @@ def get_by_name(role_name): :param role_name: :return: """ - return database.get(Role, role_name, field='name') + return database.get(Role, role_name, field="name") def delete(role_id): @@ -105,9 +113,9 @@ def render(args): :return: """ query = database.session_query(Role) - filt = args.pop('filter') - user_id = args.pop('user_id', None) - authority_id = args.pop('authority_id', None) + filt = args.pop("filter") + user_id = args.pop("user_id", None) + authority_id = args.pop("authority_id", None) if user_id: query = query.filter(Role.users.any(User.id == user_id)) @@ -116,7 +124,7 @@ def render(args): query = query.filter(Role.authority_id == authority_id) if filt: - terms = filt.split(';') + terms = filt.split(";") query = database.filter(query, Role, terms) return database.sort_and_page(query, Role, args) diff --git a/lemur/roles/views.py b/lemur/roles/views.py index a635fdba..1e12f24b 100644 --- a/lemur/roles/views.py +++ b/lemur/roles/views.py @@ -17,15 +17,20 @@ from lemur.auth.permissions import RoleMemberPermission, admin_permission from lemur.common.utils import paginated_parser from lemur.common.schema import validate_schema -from lemur.roles.schemas import role_input_schema, role_output_schema, roles_output_schema +from lemur.roles.schemas import ( + role_input_schema, + role_output_schema, + roles_output_schema, +) -mod = Blueprint('roles', __name__) +mod = Blueprint("roles", __name__) api = Api(mod) class RolesList(AuthenticatedResource): """ Defines the 'roles' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(RolesList, self).__init__() @@ -79,11 +84,11 @@ class RolesList(AuthenticatedResource): :statuscode 403: unauthenticated """ parser = paginated_parser.copy() - parser.add_argument('owner', type=str, location='args') - parser.add_argument('id', type=str, location='args') + parser.add_argument("owner", type=str, location="args") + parser.add_argument("id", type=str, location="args") args = parser.parse_args() - args['user'] = g.current_user + args["user"] = g.current_user return service.render(args) @admin_permission.require(http_exception=403) @@ -135,8 +140,13 @@ class RolesList(AuthenticatedResource): :statuscode 200: no error :statuscode 403: unauthenticated """ - return service.create(data['name'], data.get('password'), data.get('description'), data.get('username'), - data.get('users')) + return service.create( + data["name"], + data.get("password"), + data.get("description"), + data.get("username"), + data.get("users"), + ) class RoleViewCredentials(AuthenticatedResource): @@ -177,11 +187,18 @@ class RoleViewCredentials(AuthenticatedResource): permission = RoleMemberPermission(role_id) if permission.can(): role = service.get(role_id) - response = make_response(jsonify(username=role.username, password=role.password), 200) - response.headers['cache-control'] = 'private, max-age=0, no-cache, no-store' - response.headers['pragma'] = 'no-cache' + response = make_response( + jsonify(username=role.username, password=role.password), 200 + ) + response.headers["cache-control"] = "private, max-age=0, no-cache, no-store" + response.headers["pragma"] = "no-cache" return response - return dict(message='You are not authorized to view the credentials for this role.'), 403 + return ( + dict( + message="You are not authorized to view the credentials for this role." + ), + 403, + ) class Roles(AuthenticatedResource): @@ -227,7 +244,12 @@ class Roles(AuthenticatedResource): if permission.can(): return service.get(role_id) - return dict(message="You are not allowed to view a role which you are not a member of."), 403 + return ( + dict( + message="You are not allowed to view a role which you are not a member of." + ), + 403, + ) @validate_schema(role_input_schema, role_output_schema) def put(self, role_id, data=None): @@ -269,8 +291,10 @@ class Roles(AuthenticatedResource): """ permission = RoleMemberPermission(role_id) if permission.can(): - return service.update(role_id, data['name'], data.get('description'), data.get('users')) - return dict(message='You are not authorized to modify this role.'), 403 + return service.update( + role_id, data["name"], data.get("description"), data.get("users") + ) + return dict(message="You are not authorized to modify this role."), 403 @admin_permission.require(http_exception=403) def delete(self, role_id): @@ -304,11 +328,12 @@ class Roles(AuthenticatedResource): :statuscode 403: unauthenticated """ service.delete(role_id) - return {'message': 'ok'} + return {"message": "ok"} class UserRolesList(AuthenticatedResource): """ Defines the 'roles' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(UserRolesList, self).__init__() @@ -362,12 +387,13 @@ class UserRolesList(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['user_id'] = user_id + args["user_id"] = user_id return service.render(args) class AuthorityRolesList(AuthenticatedResource): """ Defines the 'roles' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(AuthorityRolesList, self).__init__() @@ -421,12 +447,18 @@ class AuthorityRolesList(AuthenticatedResource): """ parser = paginated_parser.copy() args = parser.parse_args() - args['authority_id'] = authority_id + args["authority_id"] = authority_id return service.render(args) -api.add_resource(RolesList, '/roles', endpoint='roles') -api.add_resource(Roles, '/roles/', endpoint='role') -api.add_resource(RoleViewCredentials, '/roles//credentials', endpoint='roleCredentials`') -api.add_resource(AuthorityRolesList, '/authorities//roles', endpoint='authorityRoles') -api.add_resource(UserRolesList, '/users//roles', endpoint='userRoles') +api.add_resource(RolesList, "/roles", endpoint="roles") +api.add_resource(Roles, "/roles/", endpoint="role") +api.add_resource( + RoleViewCredentials, "/roles//credentials", endpoint="roleCredentials`" +) +api.add_resource( + AuthorityRolesList, + "/authorities//roles", + endpoint="authorityRoles", +) +api.add_resource(UserRolesList, "/users//roles", endpoint="userRoles") diff --git a/lemur/schemas.py b/lemur/schemas.py index ffdfe66f..e7b0fd64 100644 --- a/lemur/schemas.py +++ b/lemur/schemas.py @@ -14,7 +14,12 @@ from marshmallow.exceptions import ValidationError from lemur.common import validators from lemur.common.schema import LemurSchema, LemurInputSchema, LemurOutputSchema -from lemur.common.fields import KeyUsageExtension, ExtendedKeyUsageExtension, BasicConstraintsExtension, SubjectAlternativeNameExtension +from lemur.common.fields import ( + KeyUsageExtension, + ExtendedKeyUsageExtension, + BasicConstraintsExtension, + SubjectAlternativeNameExtension, +) from lemur.plugins import plugins from lemur.plugins.utils import get_plugin_option @@ -34,40 +39,42 @@ def validate_options(options): :param options: :return: """ - interval = get_plugin_option('interval', options) - unit = get_plugin_option('unit', options) + interval = get_plugin_option("interval", options) + unit = get_plugin_option("unit", options) if not interval and not unit: return - if unit == 'month': + if unit == "month": interval *= 30 - elif unit == 'week': + elif unit == "week": interval *= 7 if interval > 90: - raise ValidationError('Notification cannot be more than 90 days into the future.') + raise ValidationError( + "Notification cannot be more than 90 days into the future." + ) def get_object_attribute(data, many=False): if many: - ids = [d.get('id') for d in data] - names = [d.get('name') for d in data] + ids = [d.get("id") for d in data] + names = [d.get("name") for d in data] if None in ids: if None in names: - raise ValidationError('Associated object require a name or id.') + raise ValidationError("Associated object require a name or id.") else: - return 'name' - return 'id' + return "name" + return "id" else: - if data.get('id'): - return 'id' - elif data.get('name'): - return 'name' + if data.get("id"): + return "id" + elif data.get("name"): + return "name" else: - raise ValidationError('Associated object require a name or id.') + raise ValidationError("Associated object require a name or id.") def fetch_objects(model, data, many=False): @@ -80,10 +87,11 @@ def fetch_objects(model, data, many=False): diff = set(values).symmetric_difference(set(found)) if diff: - raise ValidationError('Unable to locate {model} with {attr} {diff}'.format( - model=model, - attr=attr, - diff=",".join(list(diff)))) + raise ValidationError( + "Unable to locate {model} with {attr} {diff}".format( + model=model, attr=attr, diff=",".join(list(diff)) + ) + ) return items @@ -91,10 +99,11 @@ def fetch_objects(model, data, many=False): try: return model.query.filter(getattr(model, attr) == data[attr]).one() except NoResultFound: - raise ValidationError('Unable to find {model} with {attr}: {data}'.format( - model=model, - attr=attr, - data=data[attr])) + raise ValidationError( + "Unable to find {model} with {attr}: {data}".format( + model=model, attr=attr, data=data[attr] + ) + ) class AssociatedAuthoritySchema(LemurInputSchema): @@ -178,17 +187,19 @@ class PluginInputSchema(LemurInputSchema): @post_load def get_object(self, data, many=False): try: - data['plugin_object'] = plugins.get(data['slug']) + data["plugin_object"] = plugins.get(data["slug"]) # parse any sub-plugins - for option in data.get('plugin_options', []): - if 'plugin' in option.get('type', []): - sub_data, errors = PluginInputSchema().load(option['value']) - option['value'] = sub_data + for option in data.get("plugin_options", []): + if "plugin" in option.get("type", []): + sub_data, errors = PluginInputSchema().load(option["value"]) + option["value"] = sub_data return data except Exception as e: - raise ValidationError('Unable to find plugin. Slug: {0} Reason: {1}'.format(data['slug'], e)) + raise ValidationError( + "Unable to find plugin. Slug: {0} Reason: {1}".format(data["slug"], e) + ) class PluginOutputSchema(LemurOutputSchema): @@ -196,7 +207,7 @@ class PluginOutputSchema(LemurOutputSchema): label = fields.String() description = fields.String() active = fields.Boolean() - options = fields.List(fields.Dict(), dump_to='pluginOptions') + options = fields.List(fields.Dict(), dump_to="pluginOptions") slug = fields.String() title = fields.String() @@ -227,7 +238,7 @@ class CertificateInfoAccessSchema(BaseExtensionSchema): @post_dump def handle_keys(self, data): - return {'includeAIA': data['include_aia']} + return {"includeAIA": data["include_aia"]} class CRLDistributionPointsSchema(BaseExtensionSchema): @@ -235,7 +246,7 @@ class CRLDistributionPointsSchema(BaseExtensionSchema): @post_dump def handle_keys(self, data): - return {'includeCRLDP': data['include_crl_dp']} + return {"includeCRLDP": data["include_crl_dp"]} class SubjectKeyIdentifierSchema(BaseExtensionSchema): @@ -243,7 +254,7 @@ class SubjectKeyIdentifierSchema(BaseExtensionSchema): @post_dump def handle_keys(self, data): - return {'includeSKI': data['include_ski']} + return {"includeSKI": data["include_ski"]} class CustomOIDSchema(BaseExtensionSchema): @@ -258,14 +269,18 @@ class NamesSchema(BaseExtensionSchema): class ExtensionSchema(BaseExtensionSchema): - basic_constraints = BasicConstraintsExtension() # some devices balk on default basic constraints + basic_constraints = ( + BasicConstraintsExtension() + ) # some devices balk on default basic constraints key_usage = KeyUsageExtension() extended_key_usage = ExtendedKeyUsageExtension() subject_key_identifier = fields.Nested(SubjectKeyIdentifierSchema) sub_alt_names = fields.Nested(NamesSchema) authority_key_identifier = fields.Nested(AuthorityKeyIdentifierSchema) certificate_info_access = fields.Nested(CertificateInfoAccessSchema) - crl_distribution_points = fields.Nested(CRLDistributionPointsSchema, dump_to='cRL_distribution_points') + crl_distribution_points = fields.Nested( + CRLDistributionPointsSchema, dump_to="cRL_distribution_points" + ) # FIXME: Convert custom OIDs to a custom field in fields.py like other Extensions # FIXME: Remove support in UI for Critical custom extensions https://github.com/Netflix/lemur/issues/665 custom = fields.List(fields.Nested(CustomOIDSchema)) diff --git a/lemur/sources/cli.py b/lemur/sources/cli.py index 0ab8c9f8..c41a1cf7 100644 --- a/lemur/sources/cli.py +++ b/lemur/sources/cli.py @@ -35,24 +35,32 @@ def validate_sources(source_strings): table.append([source.label, source.active, source.description]) print("No source specified choose from below:") - print(tabulate(table, headers=['Label', 'Active', 'Description'])) + print(tabulate(table, headers=["Label", "Active", "Description"])) sys.exit(1) - if 'all' in source_strings: + if "all" in source_strings: sources = source_service.get_all() else: for source_str in source_strings: source = source_service.get_by_label(source_str) if not source: - print("Unable to find specified source with label: {0}".format(source_str)) + print( + "Unable to find specified source with label: {0}".format(source_str) + ) sys.exit(1) sources.append(source) return sources -@manager.option('-s', '--sources', dest='source_strings', action='append', help='Sources to operate on.') +@manager.option( + "-s", + "--sources", + dest="source_strings", + action="append", + help="Sources to operate on.", +) def sync(source_strings): sources = validate_sources(source_strings) for source in sources: @@ -61,26 +69,23 @@ def sync(source_strings): start_time = time.time() print("[+] Staring to sync source: {label}!\n".format(label=source.label)) - user = user_service.get_by_username('lemur') + user = user_service.get_by_username("lemur") try: data = source_service.sync(source, user) print( "[+] Certificates: New: {new} Updated: {updated}".format( - new=data['certificates'][0], - updated=data['certificates'][1] + new=data["certificates"][0], updated=data["certificates"][1] ) ) print( "[+] Endpoints: New: {new} Updated: {updated}".format( - new=data['endpoints'][0], - updated=data['endpoints'][1] + new=data["endpoints"][0], updated=data["endpoints"][1] ) ) print( "[+] Finished syncing source: {label}. Run Time: {time}".format( - label=source.label, - time=(time.time() - start_time) + label=source.label, time=(time.time() - start_time) ) ) status = SUCCESS_METRIC_STATUS @@ -88,27 +93,50 @@ def sync(source_strings): except Exception as e: current_app.logger.exception(e) - print( - "[X] Failed syncing source {label}!\n".format(label=source.label) - ) + print("[X] Failed syncing source {label}!\n".format(label=source.label)) sentry.captureException() - metrics.send('source_sync_fail', 'counter', 1, metric_tags={'source': source.label, 'status': status}) + metrics.send( + "source_sync_fail", + "counter", + 1, + metric_tags={"source": source.label, "status": status}, + ) - metrics.send('source_sync', 'counter', 1, metric_tags={'source': source.label, 'status': status}) + metrics.send( + "source_sync", + "counter", + 1, + metric_tags={"source": source.label, "status": status}, + ) -@manager.option('-s', '--sources', dest='source_strings', action='append', help='Sources to operate on.') -@manager.option('-c', '--commit', dest='commit', action='store_true', default=False, help='Persist changes.') +@manager.option( + "-s", + "--sources", + dest="source_strings", + action="append", + help="Sources to operate on.", +) +@manager.option( + "-c", + "--commit", + dest="commit", + action="store_true", + default=False, + help="Persist changes.", +) def clean(source_strings, commit): sources = validate_sources(source_strings) for source in sources: s = plugins.get(source.plugin_name) - if not hasattr(s, 'clean'): - print("Cannot clean source: {0}, source plugin does not implement 'clean()'".format( - source.label - )) + if not hasattr(s, "clean"): + print( + "Cannot clean source: {0}, source plugin does not implement 'clean()'".format( + source.label + ) + ) continue start_time = time.time() @@ -128,19 +156,23 @@ def clean(source_strings, commit): current_app.logger.exception(e) sentry.captureException() - metrics.send('clean', 'counter', 1, metric_tags={'source': source.label, 'status': status}) + metrics.send( + "clean", + "counter", + 1, + metric_tags={"source": source.label, "status": status}, + ) - current_app.logger.warning("Removed {0} from source {1} during cleaning".format( - certificate.name, - source.label - )) + current_app.logger.warning( + "Removed {0} from source {1} during cleaning".format( + certificate.name, source.label + ) + ) cleaned += 1 print( "[+] Finished cleaning source: {label}. Removed {cleaned} certificates from source. Run Time: {time}\n".format( - label=source.label, - time=(time.time() - start_time), - cleaned=cleaned + label=source.label, time=(time.time() - start_time), cleaned=cleaned ) ) diff --git a/lemur/sources/models.py b/lemur/sources/models.py index 071688d1..78dbb213 100644 --- a/lemur/sources/models.py +++ b/lemur/sources/models.py @@ -15,7 +15,7 @@ from sqlalchemy_utils import ArrowType class Source(db.Model): - __tablename__ = 'sources' + __tablename__ = "sources" id = Column(Integer, primary_key=True) label = Column(String(32), unique=True) options = Column(JSONType) diff --git a/lemur/sources/schemas.py b/lemur/sources/schemas.py index 028fdb32..5531293f 100644 --- a/lemur/sources/schemas.py +++ b/lemur/sources/schemas.py @@ -30,7 +30,7 @@ class SourceOutputSchema(LemurOutputSchema): @post_dump def fill_object(self, data): if data: - data['plugin']['pluginOptions'] = data['options'] + data["plugin"]["pluginOptions"] = data["options"] return data diff --git a/lemur/sources/service.py b/lemur/sources/service.py index a4d373ab..ec988623 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -29,9 +29,11 @@ def certificate_create(certificate, source): data, errors = CertificateUploadInputSchema().load(certificate) if errors: - raise Exception("Unable to import certificate: {reasons}".format(reasons=errors)) + raise Exception( + "Unable to import certificate: {reasons}".format(reasons=errors) + ) - data['creator'] = certificate['creator'] + data["creator"] = certificate["creator"] cert = certificate_service.import_certificate(**data) cert.description = "This certificate was automatically discovered by Lemur" @@ -70,33 +72,44 @@ def sync_endpoints(source): try: endpoints = s.get_endpoints(source.options) except NotImplementedError: - current_app.logger.warning("Unable to sync endpoints for source {0} plugin has not implemented 'get_endpoints'".format(source.label)) + current_app.logger.warning( + "Unable to sync endpoints for source {0} plugin has not implemented 'get_endpoints'".format( + source.label + ) + ) return new, updated for endpoint in endpoints: - exists = endpoint_service.get_by_dnsname_and_port(endpoint['dnsname'], endpoint['port']) + exists = endpoint_service.get_by_dnsname_and_port( + endpoint["dnsname"], endpoint["port"] + ) - certificate_name = endpoint.pop('certificate_name') + certificate_name = endpoint.pop("certificate_name") - endpoint['certificate'] = certificate_service.get_by_name(certificate_name) + endpoint["certificate"] = certificate_service.get_by_name(certificate_name) - if not endpoint['certificate']: + if not endpoint["certificate"]: current_app.logger.error( - "Certificate Not Found. Name: {0} Endpoint: {1}".format(certificate_name, endpoint['name'])) + "Certificate Not Found. Name: {0} Endpoint: {1}".format( + certificate_name, endpoint["name"] + ) + ) continue - policy = endpoint.pop('policy') + policy = endpoint.pop("policy") policy_ciphers = [] - for nc in policy['ciphers']: + for nc in policy["ciphers"]: policy_ciphers.append(endpoint_service.get_or_create_cipher(name=nc)) - policy['ciphers'] = policy_ciphers - endpoint['policy'] = endpoint_service.get_or_create_policy(**policy) - endpoint['source'] = source + policy["ciphers"] = policy_ciphers + endpoint["policy"] = endpoint_service.get_or_create_policy(**policy) + endpoint["source"] = source if not exists: - current_app.logger.debug("Endpoint Created: Name: {name}".format(name=endpoint['name'])) + current_app.logger.debug( + "Endpoint Created: Name: {name}".format(name=endpoint["name"]) + ) endpoint_service.create(**endpoint) new += 1 @@ -119,27 +132,27 @@ def sync_certificates(source, user): for certificate in certificates: exists = False - if certificate.get('search', None): - conditions = certificate.pop('search') + if certificate.get("search", None): + conditions = certificate.pop("search") exists = certificate_service.get_by_attributes(conditions) - if not exists and certificate.get('name'): - result = certificate_service.get_by_name(certificate['name']) + if not exists and certificate.get("name"): + result = certificate_service.get_by_name(certificate["name"]) if result: exists = [result] - if not exists and certificate.get('serial'): - exists = certificate_service.get_by_serial(certificate['serial']) + if not exists and certificate.get("serial"): + exists = certificate_service.get_by_serial(certificate["serial"]) if not exists: - cert = parse_certificate(certificate['body']) + cert = parse_certificate(certificate["body"]) matching_serials = certificate_service.get_by_serial(serial(cert)) exists = find_matching_certificates_by_hash(cert, matching_serials) - if not certificate.get('owner'): - certificate['owner'] = user.email + if not certificate.get("owner"): + certificate["owner"] = user.email - certificate['creator'] = user + certificate["creator"] = user exists = [x for x in exists if x] if not exists: @@ -148,10 +161,10 @@ def sync_certificates(source, user): else: for e in exists: - if certificate.get('external_id'): - e.external_id = certificate['external_id'] - if certificate.get('authority_id'): - e.authority_id = certificate['authority_id'] + if certificate.get("external_id"): + e.external_id = certificate["external_id"] + if certificate.get("authority_id"): + e.authority_id = certificate["authority_id"] certificate_update(e, source) updated += 1 @@ -165,7 +178,10 @@ def sync(source, user): source.last_run = arrow.utcnow() database.update(source) - return {'endpoints': (new_endpoints, updated_endpoints), 'certificates': (new_certs, updated_certs)} + return { + "endpoints": (new_endpoints, updated_endpoints), + "certificates": (new_certs, updated_certs), + } def create(label, plugin_name, options, description=None): @@ -179,7 +195,9 @@ def create(label, plugin_name, options, description=None): :rtype : Source :return: New source """ - source = Source(label=label, options=options, plugin_name=plugin_name, description=description) + source = Source( + label=label, options=options, plugin_name=plugin_name, description=description + ) return database.create(source) @@ -230,7 +248,7 @@ def get_by_label(label): :param label: :return: """ - return database.get(Source, label, field='label') + return database.get(Source, label, field="label") def get_all(): @@ -244,8 +262,8 @@ def get_all(): def render(args): - filt = args.pop('filter') - certificate_id = args.pop('certificate_id', None) + filt = args.pop("filter") + certificate_id = args.pop("certificate_id", None) if certificate_id: query = database.session_query(Source).join(Certificate, Source.certificate) @@ -254,7 +272,7 @@ def render(args): query = database.session_query(Source) if filt: - terms = filt.split(';') + terms = filt.split(";") query = database.filter(query, Source, terms) return database.sort_and_page(query, Source, args) @@ -272,21 +290,27 @@ def add_aws_destination_to_sources(dst): src_accounts = set() sources = get_all() for src in sources: - src_accounts.add(get_plugin_option('accountNumber', src.options)) + src_accounts.add(get_plugin_option("accountNumber", src.options)) # check destination_plugin = plugins.get(dst.plugin_name) - account_number = get_plugin_option('accountNumber', dst.options) - if account_number is not None and \ - destination_plugin.sync_as_source is not None and \ - destination_plugin.sync_as_source and \ - (account_number not in src_accounts): - src_options = copy.deepcopy(plugins.get(destination_plugin.sync_as_source_name).options) - set_plugin_option('accountNumber', account_number, src_options) - create(label=dst.label, - plugin_name=destination_plugin.sync_as_source_name, - options=src_options, - description=dst.description) + account_number = get_plugin_option("accountNumber", dst.options) + if ( + account_number is not None + and destination_plugin.sync_as_source is not None + and destination_plugin.sync_as_source + and (account_number not in src_accounts) + ): + src_options = copy.deepcopy( + plugins.get(destination_plugin.sync_as_source_name).options + ) + set_plugin_option("accountNumber", account_number, src_options) + create( + label=dst.label, + plugin_name=destination_plugin.sync_as_source_name, + options=src_options, + description=dst.description, + ) return True return False diff --git a/lemur/sources/views.py b/lemur/sources/views.py index abf68109..b74c4d80 100644 --- a/lemur/sources/views.py +++ b/lemur/sources/views.py @@ -11,19 +11,24 @@ from flask_restful import Api, reqparse from lemur.sources import service from lemur.common.schema import validate_schema -from lemur.sources.schemas import source_input_schema, source_output_schema, sources_output_schema +from lemur.sources.schemas import ( + source_input_schema, + source_output_schema, + sources_output_schema, +) from lemur.auth.service import AuthenticatedResource from lemur.auth.permissions import admin_permission from lemur.common.utils import paginated_parser -mod = Blueprint('sources', __name__) +mod = Blueprint("sources", __name__) api = Api(mod) class SourcesList(AuthenticatedResource): """ Defines the 'sources' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(SourcesList, self).__init__() @@ -151,7 +156,12 @@ class SourcesList(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 200: no error """ - return service.create(data['label'], data['plugin']['slug'], data['plugin']['plugin_options'], data['description']) + return service.create( + data["label"], + data["plugin"]["slug"], + data["plugin"]["plugin_options"], + data["description"], + ) class Sources(AuthenticatedResource): @@ -271,16 +281,22 @@ class Sources(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 200: no error """ - return service.update(source_id, data['label'], data['plugin']['plugin_options'], data['description']) + return service.update( + source_id, + data["label"], + data["plugin"]["plugin_options"], + data["description"], + ) @admin_permission.require(http_exception=403) def delete(self, source_id): service.delete(source_id) - return {'result': True} + return {"result": True} class CertificateSources(AuthenticatedResource): """ Defines the 'certificate/', endpoint='account') -api.add_resource(CertificateSources, '/certificates//sources', - endpoint='certificateSources') +api.add_resource(SourcesList, "/sources", endpoint="sources") +api.add_resource(Sources, "/sources/", endpoint="account") +api.add_resource( + CertificateSources, + "/certificates//sources", + endpoint="certificateSources", +) diff --git a/lemur/tests/conf.py b/lemur/tests/conf.py index 525200cf..6d0d6967 100644 --- a/lemur/tests/conf.py +++ b/lemur/tests/conf.py @@ -15,49 +15,51 @@ debug = False TESTING = True # this is the secret key used by flask session management -SECRET_KEY = 'I/dVhOZNSMZMqrFJa5tWli6VQccOGudKerq3eWPMSzQNmHHVhMAQfQ==' +SECRET_KEY = "I/dVhOZNSMZMqrFJa5tWli6VQccOGudKerq3eWPMSzQNmHHVhMAQfQ==" # You should consider storing these separately from your config -LEMUR_TOKEN_SECRET = 'test' -LEMUR_ENCRYPTION_KEYS = 'o61sBLNBSGtAckngtNrfVNd8xy8Hp9LBGDstTbMbqCY=' +LEMUR_TOKEN_SECRET = "test" +LEMUR_ENCRYPTION_KEYS = "o61sBLNBSGtAckngtNrfVNd8xy8Hp9LBGDstTbMbqCY=" # List of domain regular expressions that non-admin users can issue LEMUR_WHITELISTED_DOMAINS = [ - '^[a-zA-Z0-9-]+\.example\.com$', - '^[a-zA-Z0-9-]+\.example\.org$', - '^example\d+\.long\.com$', + "^[a-zA-Z0-9-]+\.example\.com$", + "^[a-zA-Z0-9-]+\.example\.org$", + "^example\d+\.long\.com$", ] # Mail Server # Lemur currently only supports SES for sending email, this address # needs to be verified -LEMUR_EMAIL = '' -LEMUR_SECURITY_TEAM_EMAIL = ['security@example.com'] +LEMUR_EMAIL = "" +LEMUR_SECURITY_TEAM_EMAIL = ["security@example.com"] -LEMUR_HOSTNAME = 'lemur.example.com' +LEMUR_HOSTNAME = "lemur.example.com" # Logging LOG_LEVEL = "DEBUG" LOG_FILE = "lemur.log" -LEMUR_DEFAULT_COUNTRY = 'US' -LEMUR_DEFAULT_STATE = 'California' -LEMUR_DEFAULT_LOCATION = 'Los Gatos' -LEMUR_DEFAULT_ORGANIZATION = 'Example, Inc.' -LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = 'Example' +LEMUR_DEFAULT_COUNTRY = "US" +LEMUR_DEFAULT_STATE = "California" +LEMUR_DEFAULT_LOCATION = "Los Gatos" +LEMUR_DEFAULT_ORGANIZATION = "Example, Inc." +LEMUR_DEFAULT_ORGANIZATIONAL_UNIT = "Example" LEMUR_ALLOW_WEEKEND_EXPIRATION = False # Database # modify this if you are not using a local database -SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI', 'postgresql://lemur:lemur@localhost:5432/lemur') +SQLALCHEMY_DATABASE_URI = os.getenv( + "SQLALCHEMY_DATABASE_URI", "postgresql://lemur:lemur@localhost:5432/lemur" +) SQLALCHEMY_TRACK_MODIFICATIONS = False # AWS -LEMUR_INSTANCE_PROFILE = 'Lemur' +LEMUR_INSTANCE_PROFILE = "Lemur" # Issuers @@ -72,21 +74,21 @@ LEMUR_INSTANCE_PROFILE = 'Lemur' # CLOUDCA_DEFAULT_VALIDITY = 2 -DIGICERT_URL = 'mock://www.digicert.com' -DIGICERT_ORDER_TYPE = 'ssl_plus' -DIGICERT_API_KEY = 'api-key' +DIGICERT_URL = "mock://www.digicert.com" +DIGICERT_ORDER_TYPE = "ssl_plus" +DIGICERT_API_KEY = "api-key" DIGICERT_ORG_ID = 111111 DIGICERT_ROOT = "ROOT" -VERISIGN_URL = 'http://example.com' -VERISIGN_PEM_PATH = '~/' -VERISIGN_FIRST_NAME = 'Jim' -VERISIGN_LAST_NAME = 'Bob' -VERSIGN_EMAIL = 'jim@example.com' +VERISIGN_URL = "http://example.com" +VERISIGN_PEM_PATH = "~/" +VERISIGN_FIRST_NAME = "Jim" +VERISIGN_LAST_NAME = "Bob" +VERSIGN_EMAIL = "jim@example.com" -ACME_AWS_ACCOUNT_NUMBER = '11111111111' +ACME_AWS_ACCOUNT_NUMBER = "11111111111" -ACME_PRIVATE_KEY = ''' +ACME_PRIVATE_KEY = """ -----BEGIN RSA PRIVATE KEY----- MIIJJwIBAAKCAgEA0+jySNCc1i73LwDZEuIdSkZgRYQ4ZQVIioVf38RUhDElxy51 4gdWZwp8/TDpQ8cVXMj6QhdRpTVLluOz71hdvBAjxXTISRCRlItzizTgBD9CLXRh @@ -138,7 +140,7 @@ cRe4df5/EbRiUOyx/ZBepttB1meTnsH6cGPN0JnmTMQHQvanL3jjtjrC13408ONK omsEEjDt4qVqGvSyy+V/1EhqGPzm9ri3zapnorf69rscuXYYsMBZ8M6AtSio4ldB LjCRNS1lR6/mV8AqUNR9Kn2NLQyJ76yDoEVLulKZqGUsC9STN4oGJLUeFw== -----END RSA PRIVATE KEY----- -''' +""" ACME_ROOT = """ -----BEGIN CERTIFICATE----- @@ -174,17 +176,17 @@ PB0t6JzUA81mSqM3kxl5e+IZwhYAyO0OTg3/fs8HqGTNKd9BqoUwSRBzp06JMg5b rUCGwbCUDI0mxadJ3Bz4WxR6fyNpBK2yAinWEsikxqEt -----END CERTIFICATE----- """ -ACME_URL = 'https://acme-v01.api.letsencrypt.org' -ACME_EMAIL = 'jim@example.com' -ACME_TEL = '4088675309' -ACME_DIRECTORY_URL = 'https://acme-v01.api.letsencrypt.org' +ACME_URL = "https://acme-v01.api.letsencrypt.org" +ACME_EMAIL = "jim@example.com" +ACME_TEL = "4088675309" +ACME_DIRECTORY_URL = "https://acme-v01.api.letsencrypt.org" ACME_DISABLE_AUTORESOLVE = True LDAP_AUTH = True -LDAP_BIND_URI = 'ldap://localhost' -LDAP_BASE_DN = 'dc=example,dc=com' -LDAP_EMAIL_DOMAIN = 'example.com' -LDAP_REQUIRED_GROUP = 'Lemur Access' -LDAP_DEFAULT_ROLE = 'role1' +LDAP_BIND_URI = "ldap://localhost" +LDAP_BASE_DN = "dc=example,dc=com" +LDAP_EMAIL_DOMAIN = "example.com" +LDAP_REQUIRED_GROUP = "Lemur Access" +LDAP_DEFAULT_ROLE = "role1" ALLOW_CERT_DELETION = True diff --git a/lemur/tests/conftest.py b/lemur/tests/conftest.py index 809b9a6a..2efd65d9 100644 --- a/lemur/tests/conftest.py +++ b/lemur/tests/conftest.py @@ -13,16 +13,34 @@ from lemur import create_app from lemur.common.utils import parse_private_key from lemur.database import db as _db from lemur.auth.service import create_token -from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY, ROOTCA_CERT_STR, ROOTCA_KEY +from lemur.tests.vectors import ( + SAN_CERT_KEY, + INTERMEDIATE_KEY, + ROOTCA_CERT_STR, + ROOTCA_KEY, +) -from .factories import ApiKeyFactory, AuthorityFactory, NotificationFactory, DestinationFactory, \ - CertificateFactory, UserFactory, RoleFactory, SourceFactory, EndpointFactory, \ - RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory, InvalidCertificateFactory, \ - CryptoAuthorityFactory, CACertificateFactory +from .factories import ( + ApiKeyFactory, + AuthorityFactory, + NotificationFactory, + DestinationFactory, + CertificateFactory, + UserFactory, + RoleFactory, + SourceFactory, + EndpointFactory, + RotationPolicyFactory, + PendingCertificateFactory, + AsyncAuthorityFactory, + InvalidCertificateFactory, + CryptoAuthorityFactory, + CACertificateFactory, +) def pytest_runtest_setup(item): - if 'slow' in item.keywords and not item.config.getoption("--runslow"): + if "slow" in item.keywords and not item.config.getoption("--runslow"): pytest.skip("need --runslow option to run") if "incremental" in item.keywords: @@ -44,7 +62,9 @@ def app(request): Creates a new Flask application for a test duration. Uses application factory `create_app`. """ - _app = create_app(config_path=os.path.dirname(os.path.realpath(__file__)) + '/conf.py') + _app = create_app( + config_path=os.path.dirname(os.path.realpath(__file__)) + "/conf.py" + ) ctx = _app.app_context() ctx.push() @@ -56,15 +76,15 @@ def app(request): @pytest.yield_fixture(scope="session") def db(app, request): _db.drop_all() - _db.engine.execute(text('CREATE EXTENSION IF NOT EXISTS pg_trgm')) + _db.engine.execute(text("CREATE EXTENSION IF NOT EXISTS pg_trgm")) _db.create_all() _db.app = app UserFactory() - r = RoleFactory(name='admin') + r = RoleFactory(name="admin") u = UserFactory(roles=[r]) - rp = RotationPolicyFactory(name='default') + rp = RotationPolicyFactory(name="default") ApiKeyFactory(user=u) _db.session.commit() @@ -159,8 +179,8 @@ def user(session): u = UserFactory() session.commit() user_token = create_token(u) - token = {'Authorization': 'Basic ' + user_token} - return {'user': u, 'token': token} + token = {"Authorization": "Basic " + user_token} + return {"user": u, "token": token} @pytest.fixture @@ -203,18 +223,19 @@ def invalid_certificate(session): @pytest.fixture def admin_user(session): u = UserFactory() - admin_role = RoleFactory(name='admin') + admin_role = RoleFactory(name="admin") u.roles.append(admin_role) session.commit() user_token = create_token(u) - token = {'Authorization': 'Basic ' + user_token} - return {'user': u, 'token': token} + token = {"Authorization": "Basic " + user_token} + return {"user": u, "token": token} @pytest.fixture def async_issuer_plugin(): from lemur.plugins.base import register from .plugins.issuer_plugin import TestAsyncIssuerPlugin + register(TestAsyncIssuerPlugin) return TestAsyncIssuerPlugin @@ -223,6 +244,7 @@ def async_issuer_plugin(): def issuer_plugin(): from lemur.plugins.base import register from .plugins.issuer_plugin import TestIssuerPlugin + register(TestIssuerPlugin) return TestIssuerPlugin @@ -231,6 +253,7 @@ def issuer_plugin(): def notification_plugin(): from lemur.plugins.base import register from .plugins.notification_plugin import TestNotificationPlugin + register(TestNotificationPlugin) return TestNotificationPlugin @@ -239,6 +262,7 @@ def notification_plugin(): def destination_plugin(): from lemur.plugins.base import register from .plugins.destination_plugin import TestDestinationPlugin + register(TestDestinationPlugin) return TestDestinationPlugin @@ -247,6 +271,7 @@ def destination_plugin(): def source_plugin(): from lemur.plugins.base import register from .plugins.source_plugin import TestSourcePlugin + register(TestSourcePlugin) return TestSourcePlugin @@ -277,13 +302,19 @@ def issuer_private_key(): @pytest.fixture def cert_builder(private_key): - return (x509.CertificateBuilder() - .subject_name(x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, 'foo.com')])) - .issuer_name(x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, 'foo.com')])) - .serial_number(1) - .public_key(private_key.public_key()) - .not_valid_before(datetime.datetime(2017, 12, 22)) - .not_valid_after(datetime.datetime(2040, 1, 1))) + return ( + x509.CertificateBuilder() + .subject_name( + x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, "foo.com")]) + ) + .issuer_name( + x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, "foo.com")]) + ) + .serial_number(1) + .public_key(private_key.public_key()) + .not_valid_before(datetime.datetime(2017, 12, 22)) + .not_valid_after(datetime.datetime(2040, 1, 1)) + ) @pytest.fixture @@ -292,9 +323,9 @@ def selfsigned_cert(cert_builder, private_key): return cert_builder.sign(private_key, hashes.SHA256(), default_backend()) -@pytest.fixture(scope='function') +@pytest.fixture(scope="function") def aws_credentials(): - os.environ['AWS_ACCESS_KEY_ID'] = 'testing' - os.environ['AWS_SECRET_ACCESS_KEY'] = 'testing' - os.environ['AWS_SECURITY_TOKEN'] = 'testing' - os.environ['AWS_SESSION_TOKEN'] = 'testing' + os.environ["AWS_ACCESS_KEY_ID"] = "testing" + os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" + os.environ["AWS_SECURITY_TOKEN"] = "testing" + os.environ["AWS_SESSION_TOKEN"] = "testing" diff --git a/lemur/tests/factories.py b/lemur/tests/factories.py index de78f8a3..fea4c59a 100644 --- a/lemur/tests/factories.py +++ b/lemur/tests/factories.py @@ -1,4 +1,3 @@ - from datetime import date from factory import Sequence, post_generation, SubFactory @@ -19,8 +18,16 @@ from lemur.endpoints.models import Policy, Endpoint from lemur.policies.models import RotationPolicy from lemur.api_keys.models import ApiKey -from .vectors import SAN_CERT_STR, SAN_CERT_KEY, CSR_STR, INTERMEDIATE_CERT_STR, ROOTCA_CERT_STR, INTERMEDIATE_KEY, \ - WILDCARD_CERT_KEY, INVALID_CERT_STR +from .vectors import ( + SAN_CERT_STR, + SAN_CERT_KEY, + CSR_STR, + INTERMEDIATE_CERT_STR, + ROOTCA_CERT_STR, + INTERMEDIATE_KEY, + WILDCARD_CERT_KEY, + INVALID_CERT_STR, +) class BaseFactory(SQLAlchemyModelFactory): @@ -28,28 +35,32 @@ class BaseFactory(SQLAlchemyModelFactory): class Meta: """Factory configuration.""" + abstract = True sqlalchemy_session = db.session class RotationPolicyFactory(BaseFactory): """Rotation Factory.""" - name = Sequence(lambda n: 'policy{0}'.format(n)) + + name = Sequence(lambda n: "policy{0}".format(n)) days = 30 class Meta: """Factory configuration.""" + model = RotationPolicy class CertificateFactory(BaseFactory): """Certificate factory.""" - name = Sequence(lambda n: 'certificate{0}'.format(n)) + + name = Sequence(lambda n: "certificate{0}".format(n)) chain = INTERMEDIATE_CERT_STR body = SAN_CERT_STR private_key = SAN_CERT_KEY - owner = 'joe@example.com' - status = FuzzyChoice(['valid', 'revoked', 'unknown']) + owner = "joe@example.com" + status = FuzzyChoice(["valid", "revoked", "unknown"]) deleted = False description = FuzzyText(length=128) active = True @@ -58,6 +69,7 @@ class CertificateFactory(BaseFactory): class Meta: """Factory Configuration.""" + model = Certificate @post_generation @@ -139,20 +151,22 @@ class CACertificateFactory(CertificateFactory): class InvalidCertificateFactory(CertificateFactory): body = INVALID_CERT_STR - private_key = '' - chain = '' + private_key = "" + chain = "" class AuthorityFactory(BaseFactory): """Authority factory.""" - name = Sequence(lambda n: 'authority{0}'.format(n)) - owner = 'joe@example.com' - plugin = {'slug': 'test-issuer'} + + name = Sequence(lambda n: "authority{0}".format(n)) + owner = "joe@example.com" + plugin = {"slug": "test-issuer"} description = FuzzyText(length=128) authority_certificate = SubFactory(CACertificateFactory) class Meta: """Factory configuration.""" + model = Authority @post_generation @@ -167,54 +181,64 @@ class AuthorityFactory(BaseFactory): class AsyncAuthorityFactory(AuthorityFactory): """Async Authority factory.""" - name = Sequence(lambda n: 'authority{0}'.format(n)) - owner = 'joe@example.com' - plugin = {'slug': 'test-issuer-async'} + + name = Sequence(lambda n: "authority{0}".format(n)) + owner = "joe@example.com" + plugin = {"slug": "test-issuer-async"} description = FuzzyText(length=128) authority_certificate = SubFactory(CertificateFactory) class CryptoAuthorityFactory(AuthorityFactory): """Authority factory based on 'cryptography' plugin.""" - plugin = {'slug': 'cryptography-issuer'} + + plugin = {"slug": "cryptography-issuer"} class DestinationFactory(BaseFactory): """Destination factory.""" - plugin_name = 'test-destination' - label = Sequence(lambda n: 'destination{0}'.format(n)) + + plugin_name = "test-destination" + label = Sequence(lambda n: "destination{0}".format(n)) class Meta: """Factory Configuration.""" + model = Destination class SourceFactory(BaseFactory): """Source factory.""" - plugin_name = 'test-source' - label = Sequence(lambda n: 'source{0}'.format(n)) + + plugin_name = "test-source" + label = Sequence(lambda n: "source{0}".format(n)) class Meta: """Factory Configuration.""" + model = Source class NotificationFactory(BaseFactory): """Notification factory.""" - plugin_name = 'test-notification' - label = Sequence(lambda n: 'notification{0}'.format(n)) + + plugin_name = "test-notification" + label = Sequence(lambda n: "notification{0}".format(n)) class Meta: """Factory Configuration.""" + model = Notification class RoleFactory(BaseFactory): """Role factory.""" - name = Sequence(lambda n: 'role{0}'.format(n)) + + name = Sequence(lambda n: "role{0}".format(n)) class Meta: """Factory Configuration.""" + model = Role @post_generation @@ -229,14 +253,16 @@ class RoleFactory(BaseFactory): class UserFactory(BaseFactory): """User Factory.""" - username = Sequence(lambda n: 'user{0}'.format(n)) - email = Sequence(lambda n: 'user{0}@example.com'.format(n)) + + username = Sequence(lambda n: "user{0}".format(n)) + email = Sequence(lambda n: "user{0}@example.com".format(n)) active = True password = FuzzyText(length=24) certificates = [] class Meta: """Factory Configuration.""" + model = User @post_generation @@ -269,39 +295,45 @@ class UserFactory(BaseFactory): class PolicyFactory(BaseFactory): """Policy Factory.""" - name = Sequence(lambda n: 'endpoint{0}'.format(n)) + + name = Sequence(lambda n: "endpoint{0}".format(n)) class Meta: """Factory Configuration.""" + model = Policy class EndpointFactory(BaseFactory): """Endpoint Factory.""" - owner = 'joe@example.com' - name = Sequence(lambda n: 'endpoint{0}'.format(n)) - type = FuzzyChoice(['elb']) + + owner = "joe@example.com" + name = Sequence(lambda n: "endpoint{0}".format(n)) + type = FuzzyChoice(["elb"]) active = True port = FuzzyInteger(0, high=65535) - dnsname = 'endpoint.example.com' + dnsname = "endpoint.example.com" policy = SubFactory(PolicyFactory) certificate = SubFactory(CertificateFactory) source = SubFactory(SourceFactory) class Meta: """Factory Configuration.""" + model = Endpoint class ApiKeyFactory(BaseFactory): """Api Key Factory.""" - name = Sequence(lambda n: 'api_key_{0}'.format(n)) + + name = Sequence(lambda n: "api_key_{0}".format(n)) revoked = False ttl = -1 issued_at = 1 class Meta: """Factory Configuration.""" + model = ApiKey @post_generation @@ -315,13 +347,14 @@ class ApiKeyFactory(BaseFactory): class PendingCertificateFactory(BaseFactory): """PendingCertificate factory.""" - name = Sequence(lambda n: 'pending_certificate{0}'.format(n)) + + name = Sequence(lambda n: "pending_certificate{0}".format(n)) external_id = 12345 csr = CSR_STR chain = INTERMEDIATE_CERT_STR private_key = WILDCARD_CERT_KEY - owner = 'joe@example.com' - status = FuzzyChoice(['valid', 'revoked', 'unknown']) + owner = "joe@example.com" + status = FuzzyChoice(["valid", "revoked", "unknown"]) deleted = False description = FuzzyText(length=128) date_created = FuzzyDate(date(2016, 1, 1), date(2020, 1, 1)) @@ -330,6 +363,7 @@ class PendingCertificateFactory(BaseFactory): class Meta: """Factory Configuration.""" + model = PendingCertificate @post_generation diff --git a/lemur/tests/plugins/destination_plugin.py b/lemur/tests/plugins/destination_plugin.py index f77085ec..d1eb6711 100644 --- a/lemur/tests/plugins/destination_plugin.py +++ b/lemur/tests/plugins/destination_plugin.py @@ -2,12 +2,12 @@ from lemur.plugins.bases import DestinationPlugin class TestDestinationPlugin(DestinationPlugin): - title = 'Test' - slug = 'test-destination' - description = 'Enables testing' + title = "Test" + slug = "test-destination" + description = "Enables testing" - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): super(TestDestinationPlugin, self).__init__(*args, **kwargs) diff --git a/lemur/tests/plugins/issuer_plugin.py b/lemur/tests/plugins/issuer_plugin.py index 3fda83ae..5f5c732b 100644 --- a/lemur/tests/plugins/issuer_plugin.py +++ b/lemur/tests/plugins/issuer_plugin.py @@ -4,12 +4,12 @@ from lemur.tests.vectors import SAN_CERT_STR, INTERMEDIATE_CERT_STR class TestIssuerPlugin(IssuerPlugin): - title = 'Test' - slug = 'test-issuer' - description = 'Enables testing' + title = "Test" + slug = "test-issuer" + description = "Enables testing" - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): super(TestIssuerPlugin, self).__init__(*args, **kwargs) @@ -20,17 +20,17 @@ class TestIssuerPlugin(IssuerPlugin): @staticmethod def create_authority(options): - role = {'username': '', 'password': '', 'name': 'test'} + role = {"username": "", "password": "", "name": "test"} return SAN_CERT_STR, "", [role] class TestAsyncIssuerPlugin(IssuerPlugin): - title = 'Test Async' - slug = 'test-issuer-async' - description = 'Enables testing with pending certificates' + title = "Test Async" + slug = "test-issuer-async" + description = "Enables testing with pending certificates" - author = 'James Chuong' - author_url = 'https://github.com/jchuong' + author = "James Chuong" + author_url = "https://github.com/jchuong" def __init__(self, *args, **kwargs): super(TestAsyncIssuerPlugin, self).__init__(*args, **kwargs) @@ -43,7 +43,7 @@ class TestAsyncIssuerPlugin(IssuerPlugin): @staticmethod def create_authority(options): - role = {'username': '', 'password': '', 'name': 'test'} + role = {"username": "", "password": "", "name": "test"} return SAN_CERT_STR, "", [role] def cancel_ordered_certificate(self, pending_certificate, **kwargs): diff --git a/lemur/tests/plugins/notification_plugin.py b/lemur/tests/plugins/notification_plugin.py index ad393d60..4ad79704 100644 --- a/lemur/tests/plugins/notification_plugin.py +++ b/lemur/tests/plugins/notification_plugin.py @@ -2,12 +2,12 @@ from lemur.plugins.bases import NotificationPlugin class TestNotificationPlugin(NotificationPlugin): - title = 'Test' - slug = 'test-notification' - description = 'Enables testing' + title = "Test" + slug = "test-notification" + description = "Enables testing" - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): super(TestNotificationPlugin, self).__init__(*args, **kwargs) diff --git a/lemur/tests/plugins/source_plugin.py b/lemur/tests/plugins/source_plugin.py index 10402576..21ce245d 100644 --- a/lemur/tests/plugins/source_plugin.py +++ b/lemur/tests/plugins/source_plugin.py @@ -2,12 +2,12 @@ from lemur.plugins.bases import SourcePlugin class TestSourcePlugin(SourcePlugin): - title = 'Test' - slug = 'test-source' - description = 'Enables testing' + title = "Test" + slug = "test-source" + description = "Enables testing" - author = 'Kevin Glisson' - author_url = 'https://github.com/netflix/lemur.git' + author = "Kevin Glisson" + author_url = "https://github.com/netflix/lemur.git" def __init__(self, *args, **kwargs): super(TestSourcePlugin, self).__init__(*args, **kwargs) diff --git a/lemur/tests/test_api_keys.py b/lemur/tests/test_api_keys.py index e60773bf..9e293be2 100644 --- a/lemur/tests/test_api_keys.py +++ b/lemur/tests/test_api_keys.py @@ -4,219 +4,398 @@ import pytest from lemur.api_keys.views import * # noqa -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_api_key_list_get(client, token, status): assert client.get(api.url_for(ApiKeyList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_api_key_list_post_invalid(client, token, status): - assert client.post(api.url_for(ApiKeyList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(ApiKeyList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,user_id,status", [ - (VALID_USER_HEADER_TOKEN, 1, 200), - (VALID_ADMIN_HEADER_TOKEN, 2, 200), - (VALID_ADMIN_API_TOKEN, 2, 200), - ('', 0, 401) -]) +@pytest.mark.parametrize( + "token,user_id,status", + [ + (VALID_USER_HEADER_TOKEN, 1, 200), + (VALID_ADMIN_HEADER_TOKEN, 2, 200), + (VALID_ADMIN_API_TOKEN, 2, 200), + ("", 0, 401), + ], +) def test_api_key_list_post_valid_self(client, user_id, token, status): - assert client.post(api.url_for(ApiKeyList), data=json.dumps({'name': 'a test token', 'user': {'id': user_id, 'username': 'example', 'email': 'example@test.net'}, 'ttl': -1}), headers=token).status_code == status + assert ( + client.post( + api.url_for(ApiKeyList), + data=json.dumps( + { + "name": "a test token", + "user": { + "id": user_id, + "username": "example", + "email": "example@test.net", + }, + "ttl": -1, + } + ), + headers=token, + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_api_key_list_post_valid_no_permission(client, token, status): - assert client.post(api.url_for(ApiKeyList), data=json.dumps({'name': 'a test token', 'user': {'id': 2, 'username': 'example', 'email': 'example@test.net'}, 'ttl': -1}), headers=token).status_code == status + assert ( + client.post( + api.url_for(ApiKeyList), + data=json.dumps( + { + "name": "a test token", + "user": { + "id": 2, + "username": "example", + "email": "example@test.net", + }, + "ttl": -1, + } + ), + headers=token, + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_api_key_list_patch(client, token, status): - assert client.patch(api.url_for(ApiKeyList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(ApiKeyList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_api_key_list_delete(client, token, status): assert client.delete(api.url_for(ApiKeyList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_user_api_key_list_get(client, token, status): - assert client.get(api.url_for(ApiKeyUserList, user_id=1), headers=token).status_code == status + assert ( + client.get(api.url_for(ApiKeyUserList, user_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_user_api_key_list_post_invalid(client, token, status): - assert client.post(api.url_for(ApiKeyUserList, user_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(ApiKeyUserList, user_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,user_id,status", [ - (VALID_USER_HEADER_TOKEN, 1, 200), - (VALID_ADMIN_HEADER_TOKEN, 2, 200), - (VALID_ADMIN_API_TOKEN, 2, 200), - ('', 0, 401) -]) +@pytest.mark.parametrize( + "token,user_id,status", + [ + (VALID_USER_HEADER_TOKEN, 1, 200), + (VALID_ADMIN_HEADER_TOKEN, 2, 200), + (VALID_ADMIN_API_TOKEN, 2, 200), + ("", 0, 401), + ], +) def test_user_api_key_list_post_valid_self(client, user_id, token, status): - assert client.post(api.url_for(ApiKeyUserList, user_id=1), data=json.dumps({'name': 'a test token', 'user': {'id': user_id}, 'ttl': -1}), headers=token).status_code == status + assert ( + client.post( + api.url_for(ApiKeyUserList, user_id=1), + data=json.dumps( + {"name": "a test token", "user": {"id": user_id}, "ttl": -1} + ), + headers=token, + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_user_api_key_list_post_valid_no_permission(client, token, status): - assert client.post(api.url_for(ApiKeyUserList, user_id=2), data=json.dumps({'name': 'a test token', 'user': {'id': 2}, 'ttl': -1}), headers=token).status_code == status + assert ( + client.post( + api.url_for(ApiKeyUserList, user_id=2), + data=json.dumps({"name": "a test token", "user": {"id": 2}, "ttl": -1}), + headers=token, + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_api_key_list_patch(client, token, status): - assert client.patch(api.url_for(ApiKeyUserList, user_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(ApiKeyUserList, user_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_api_key_list_delete(client, token, status): - assert client.delete(api.url_for(ApiKeyUserList, user_id=1), headers=token).status_code == status + assert ( + client.delete(api.url_for(ApiKeyUserList, user_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) -@pytest.mark.skip(reason="no way of getting an actual user onto the access key to generate a jwt") +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) +@pytest.mark.skip( + reason="no way of getting an actual user onto the access key to generate a jwt" +) def test_api_key_get(client, token, status): assert client.get(api.url_for(ApiKeys, aid=1), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_api_key_post(client, token, status): assert client.post(api.url_for(ApiKeys, aid=1), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_api_key_patch(client, token, status): - assert client.patch(api.url_for(ApiKeys, aid=1), headers=token).status_code == status + assert ( + client.patch(api.url_for(ApiKeys, aid=1), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) -@pytest.mark.skip(reason="no way of getting an actual user onto the access key to generate a jwt") +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) +@pytest.mark.skip( + reason="no way of getting an actual user onto the access key to generate a jwt" +) def test_api_key_put_permssions(client, token, status): - assert client.put(api.url_for(ApiKeys, aid=1), data=json.dumps({'name': 'Test', 'revoked': False, 'ttl': -1}), headers=token).status_code == status + assert ( + client.put( + api.url_for(ApiKeys, aid=1), + data=json.dumps({"name": "Test", "revoked": False, "ttl": -1}), + headers=token, + ).status_code + == status + ) # This test works while the other doesn't because the schema allows user id to be null. -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_api_key_described_get(client, token, status): - assert client.get(api.url_for(ApiKeysDescribed, aid=1), headers=token).status_code == status + assert ( + client.get(api.url_for(ApiKeysDescribed, aid=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) -@pytest.mark.skip(reason="no way of getting an actual user onto the access key to generate a jwt") +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) +@pytest.mark.skip( + reason="no way of getting an actual user onto the access key to generate a jwt" +) def test_user_api_key_get(client, token, status): - assert client.get(api.url_for(UserApiKeys, uid=1, aid=1), headers=token).status_code == status + assert ( + client.get(api.url_for(UserApiKeys, uid=1, aid=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_api_key_post(client, token, status): - assert client.post(api.url_for(UserApiKeys, uid=2, aid=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(UserApiKeys, uid=2, aid=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_api_key_patch(client, token, status): - assert client.patch(api.url_for(UserApiKeys, uid=2, aid=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(UserApiKeys, uid=2, aid=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) -@pytest.mark.skip(reason="no way of getting an actual user onto the access key to generate a jwt") +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) +@pytest.mark.skip( + reason="no way of getting an actual user onto the access key to generate a jwt" +) def test_user_api_key_put_permssions(client, token, status): - assert client.put(api.url_for(UserApiKeys, uid=2, aid=1), data=json.dumps({'name': 'Test', 'revoked': False, 'ttl': -1}), headers=token).status_code == status + assert ( + client.put( + api.url_for(UserApiKeys, uid=2, aid=1), + data=json.dumps({"name": "Test", "revoked": False, "ttl": -1}), + headers=token, + ).status_code + == status + ) diff --git a/lemur/tests/test_authorities.py b/lemur/tests/test_authorities.py index e865ab41..9649e949 100644 --- a/lemur/tests/test_authorities.py +++ b/lemur/tests/test_authorities.py @@ -4,22 +4,29 @@ import pytest from lemur.authorities.views import * # noqa from lemur.tests.factories import AuthorityFactory, RoleFactory -from lemur.tests.vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from lemur.tests.vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) def test_authority_input_schema(client, role, issuer_plugin, logged_in_user): from lemur.authorities.schemas import AuthorityInputSchema input_data = { - 'name': 'Example Authority', - 'owner': 'jim@example.com', - 'description': 'An example authority.', - 'commonName': 'An Example Authority', - 'plugin': {'slug': 'test-issuer', 'plugin_options': [{'name': 'test', 'value': 'blah'}]}, - 'type': 'root', - 'signingAlgorithm': 'sha256WithRSA', - 'keyType': 'RSA2048', - 'sensitivity': 'medium' + "name": "Example Authority", + "owner": "jim@example.com", + "description": "An example authority.", + "commonName": "An Example Authority", + "plugin": { + "slug": "test-issuer", + "plugin_options": [{"name": "test", "value": "blah"}], + }, + "type": "root", + "signingAlgorithm": "sha256WithRSA", + "keyType": "RSA2048", + "sensitivity": "medium", } data, errors = AuthorityInputSchema().load(input_data) @@ -28,179 +35,286 @@ def test_authority_input_schema(client, role, issuer_plugin, logged_in_user): def test_user_authority(session, client, authority, role, user, issuer_plugin): - u = user['user'] + u = user["user"] u.roles.append(role) authority.roles.append(role) session.commit() - assert client.get(api.url_for(AuthoritiesList), headers=user['token']).json['total'] == 1 + assert ( + client.get(api.url_for(AuthoritiesList), headers=user["token"]).json["total"] + == 1 + ) u.roles.remove(role) session.commit() - assert client.get(api.url_for(AuthoritiesList), headers=user['token']).json['total'] == 0 + assert ( + client.get(api.url_for(AuthoritiesList), headers=user["token"]).json["total"] + == 0 + ) def test_create_authority(issuer_plugin, user): from lemur.authorities.service import create - authority = create(plugin={'plugin_object': issuer_plugin, 'slug': issuer_plugin.slug}, owner='jim@example.com', type='root', creator=user['user']) + + authority = create( + plugin={"plugin_object": issuer_plugin, "slug": issuer_plugin.slug}, + owner="jim@example.com", + type="root", + creator=user["user"], + ) assert authority.authority_certificate -@pytest.mark.parametrize("token, count", [ - (VALID_USER_HEADER_TOKEN, 0), - (VALID_ADMIN_HEADER_TOKEN, 3), - (VALID_ADMIN_API_TOKEN, 3), -]) +@pytest.mark.parametrize( + "token, count", + [ + (VALID_USER_HEADER_TOKEN, 0), + (VALID_ADMIN_HEADER_TOKEN, 3), + (VALID_ADMIN_API_TOKEN, 3), + ], +) def test_admin_authority(client, authority, issuer_plugin, token, count): - assert client.get(api.url_for(AuthoritiesList), headers=token).json['total'] == count + assert ( + client.get(api.url_for(AuthoritiesList), headers=token).json["total"] == count + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_authority_get(client, token, status): - assert client.get(api.url_for(Authorities, authority_id=1), headers=token).status_code == status + assert ( + client.get(api.url_for(Authorities, authority_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_authority_post(client, token, status): - assert client.post(api.url_for(Authorities, authority_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(Authorities, authority_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_authority_put(client, token, status): - assert client.put(api.url_for(Authorities, authority_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(Authorities, authority_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_authority_delete(client, token, status): - assert client.delete(api.url_for(Authorities, authority_id=1), headers=token).status_code == status + assert ( + client.delete( + api.url_for(Authorities, authority_id=1), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_authority_patch(client, token, status): - assert client.patch(api.url_for(Authorities, authority_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(Authorities, authority_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_authorities_get(client, token, status): assert client.get(api.url_for(AuthoritiesList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_authorities_post(client, token, status): - assert client.post(api.url_for(AuthoritiesList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(AuthoritiesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_authorities_put(client, token, status): - assert client.put(api.url_for(AuthoritiesList), data={}, headers=token).status_code == status + assert ( + client.put(api.url_for(AuthoritiesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_authorities_delete(client, token, status): - assert client.delete(api.url_for(AuthoritiesList), headers=token).status_code == status + assert ( + client.delete(api.url_for(AuthoritiesList), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_authorities_patch(client, token, status): - assert client.patch(api.url_for(AuthoritiesList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(AuthoritiesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_certificate_authorities_get(client, token, status): assert client.get(api.url_for(AuthoritiesList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_certificate_authorities_post(client, token, status): - assert client.post(api.url_for(AuthoritiesList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(AuthoritiesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_authorities_put(client, token, status): - assert client.put(api.url_for(AuthoritiesList), data={}, headers=token).status_code == status + assert ( + client.put(api.url_for(AuthoritiesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_authorities_delete(client, token, status): - assert client.delete(api.url_for(AuthoritiesList), headers=token).status_code == status + assert ( + client.delete(api.url_for(AuthoritiesList), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_authorities_patch(client, token, status): - assert client.patch(api.url_for(AuthoritiesList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(AuthoritiesList), data={}, headers=token).status_code + == status + ) def test_authority_roles(client, session, issuer_plugin): @@ -209,23 +323,29 @@ def test_authority_roles(client, session, issuer_plugin): session.flush() data = { - 'owner': auth.owner, - 'name': auth.name, - 'description': auth.description, - 'active': True, - 'roles': [ - {'id': role.id}, - ], + "owner": auth.owner, + "name": auth.name, + "description": auth.description, + "active": True, + "roles": [{"id": role.id}], } # Add role - resp = client.put(api.url_for(Authorities, authority_id=auth.id), data=json.dumps(data), headers=VALID_ADMIN_HEADER_TOKEN) + resp = client.put( + api.url_for(Authorities, authority_id=auth.id), + data=json.dumps(data), + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert resp.status_code == 200 - assert len(resp.json['roles']) == 1 + assert len(resp.json["roles"]) == 1 assert set(auth.roles) == {role} # Remove role - del data['roles'][0] - resp = client.put(api.url_for(Authorities, authority_id=auth.id), data=json.dumps(data), headers=VALID_ADMIN_HEADER_TOKEN) + del data["roles"][0] + resp = client.put( + api.url_for(Authorities, authority_id=auth.id), + data=json.dumps(data), + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert resp.status_code == 200 - assert len(resp.json['roles']) == 0 + assert len(resp.json["roles"]) == 0 diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index cc8a5224..07b5ee4e 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -17,32 +17,53 @@ from lemur.common import utils from lemur.domains.models import Domain -from lemur.tests.vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN, CSR_STR, \ - INTERMEDIATE_CERT_STR, SAN_CERT_STR, SAN_CERT_CSR, SAN_CERT_KEY, ROOTCA_KEY, ROOTCA_CERT_STR +from lemur.tests.vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, + CSR_STR, + INTERMEDIATE_CERT_STR, + SAN_CERT_STR, + SAN_CERT_CSR, + SAN_CERT_KEY, + ROOTCA_KEY, + ROOTCA_CERT_STR, +) def test_get_or_increase_name(session, certificate): from lemur.certificates.models import get_or_increase_name from lemur.tests.factories import CertificateFactory - serial = 'AFF2DB4F8D2D4D8E80FA382AE27C2333' + serial = "AFF2DB4F8D2D4D8E80FA382AE27C2333" - assert get_or_increase_name(certificate.name, certificate.serial) == '{0}-{1}'.format(certificate.name, serial) + assert get_or_increase_name( + certificate.name, certificate.serial + ) == "{0}-{1}".format(certificate.name, serial) - certificate.name = 'test-cert-11111111' - assert get_or_increase_name(certificate.name, certificate.serial) == 'test-cert-11111111-' + serial + certificate.name = "test-cert-11111111" + assert ( + get_or_increase_name(certificate.name, certificate.serial) + == "test-cert-11111111-" + serial + ) - certificate.name = 'test-cert-11111111-1' - assert get_or_increase_name('test-cert-11111111-1', certificate.serial) == 'test-cert-11111111-1-' + serial + certificate.name = "test-cert-11111111-1" + assert ( + get_or_increase_name("test-cert-11111111-1", certificate.serial) + == "test-cert-11111111-1-" + serial + ) - cert2 = CertificateFactory(name='certificate1-' + serial) + cert2 = CertificateFactory(name="certificate1-" + serial) session.commit() - assert get_or_increase_name('certificate1', int(serial, 16)) == 'certificate1-{}-1'.format(serial) + assert get_or_increase_name( + "certificate1", int(serial, 16) + ) == "certificate1-{}-1".format(serial) def test_get_all_certs(session, certificate): from lemur.certificates.service import get_all_certs + assert len(get_all_certs()) > 1 @@ -66,7 +87,7 @@ def test_delete_cert(session): from lemur.certificates.service import delete, get from lemur.tests.factories import CertificateFactory - delete_this = CertificateFactory(name='DELETEME') + delete_this = CertificateFactory(name="DELETEME") session.commit() cert_exists = get(delete_this.id) @@ -85,21 +106,24 @@ def test_get_by_attributes(session, certificate): from lemur.certificates.service import get_by_attributes # Should get one cert - certificate1 = get_by_attributes({ - 'name': 'SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231' - }) + certificate1 = get_by_attributes( + { + "name": "SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231" + } + ) # Should get one cert using multiple attrs - certificate2 = get_by_attributes({ - 'name': 'test-cert-11111111-1', - 'cn': 'san.example.org' - }) + certificate2 = get_by_attributes( + {"name": "test-cert-11111111-1", "cn": "san.example.org"} + ) # Should get multiple certs - multiple = get_by_attributes({ - 'cn': 'LemurTrust Unittests Class 1 CA 2018', - 'issuer': 'LemurTrustUnittestsRootCA2018' - }) + multiple = get_by_attributes( + { + "cn": "LemurTrust Unittests Class 1 CA 2018", + "issuer": "LemurTrustUnittestsRootCA2018", + } + ) assert len(certificate1) == 1 assert len(certificate2) == 1 @@ -109,14 +133,11 @@ def test_get_by_attributes(session, certificate): def test_find_duplicates(session): from lemur.certificates.service import find_duplicates - cert = { - 'body': SAN_CERT_STR, - 'chain': INTERMEDIATE_CERT_STR - } + cert = {"body": SAN_CERT_STR, "chain": INTERMEDIATE_CERT_STR} dups1 = find_duplicates(cert) - cert['chain'] = '' + cert["chain"] = "" dups2 = find_duplicates(cert) @@ -138,13 +159,15 @@ def test_certificate_output_schema(session, certificate, issuer_plugin): from lemur.certificates.schemas import CertificateOutputSchema # Clear the cached attribute first - if 'parsed_cert' in certificate.__dict__: - del certificate.__dict__['parsed_cert'] + if "parsed_cert" in certificate.__dict__: + del certificate.__dict__["parsed_cert"] # Make sure serialization parses the cert only once (uses cached 'parsed_cert' attribute) - with patch('lemur.common.utils.parse_certificate', side_effect=utils.parse_certificate) as wrapper: + with patch( + "lemur.common.utils.parse_certificate", side_effect=utils.parse_certificate + ) as wrapper: data, errors = CertificateOutputSchema().dump(certificate) - assert data['issuer'] == 'LemurTrustUnittestsClass1CA2018' + assert data["issuer"] == "LemurTrustUnittestsClass1CA2018" assert wrapper.call_count == 1 @@ -152,24 +175,21 @@ def test_certificate_output_schema(session, certificate, issuer_plugin): def test_certificate_edit_schema(session): from lemur.certificates.schemas import CertificateEditInputSchema - input_data = {'owner': 'bob@example.com'} + input_data = {"owner": "bob@example.com"} data, errors = CertificateEditInputSchema().load(input_data) - assert len(data['notifications']) == 3 + assert len(data["notifications"]) == 3 def test_authority_key_identifier_schema(): from lemur.schemas import AuthorityKeyIdentifierSchema - input_data = { - 'useKeyIdentifier': True, - 'useAuthorityCert': True - } + + input_data = {"useKeyIdentifier": True, "useAuthorityCert": True} data, errors = AuthorityKeyIdentifierSchema().load(input_data) - assert sorted(data) == sorted({ - 'use_key_identifier': True, - 'use_authority_cert': True - }) + assert sorted(data) == sorted( + {"use_key_identifier": True, "use_authority_cert": True} + ) assert not errors data, errors = AuthorityKeyIdentifierSchema().dumps(data) @@ -179,11 +199,12 @@ def test_authority_key_identifier_schema(): def test_certificate_info_access_schema(): from lemur.schemas import CertificateInfoAccessSchema - input_data = {'includeAIA': True} + + input_data = {"includeAIA": True} data, errors = CertificateInfoAccessSchema().load(input_data) assert not errors - assert data == {'include_aia': True} + assert data == {"include_aia": True} data, errors = CertificateInfoAccessSchema().dump(data) assert not errors @@ -193,11 +214,11 @@ def test_certificate_info_access_schema(): def test_subject_key_identifier_schema(): from lemur.schemas import SubjectKeyIdentifierSchema - input_data = {'includeSKI': True} + input_data = {"includeSKI": True} data, errors = SubjectKeyIdentifierSchema().load(input_data) assert not errors - assert data == {'include_ski': True} + assert data == {"include_ski": True} data, errors = SubjectKeyIdentifierSchema().dump(data) assert not errors assert data == input_data @@ -207,16 +228,9 @@ def test_extension_schema(client): from lemur.certificates.schemas import ExtensionSchema input_data = { - 'keyUsage': { - 'useKeyEncipherment': True, - 'useDigitalSignature': True - }, - 'extendedKeyUsage': { - 'useServerAuthentication': True - }, - 'subjectKeyIdentifier': { - 'includeSKI': True - } + "keyUsage": {"useKeyEncipherment": True, "useDigitalSignature": True}, + "extendedKeyUsage": {"useServerAuthentication": True}, + "subjectKeyIdentifier": {"includeSKI": True}, } data, errors = ExtensionSchema().load(input_data) @@ -230,24 +244,24 @@ def test_certificate_input_schema(client, authority): from lemur.certificates.schemas import CertificateInputSchema input_data = { - 'commonName': 'test.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityStart': arrow.get(2018, 11, 9).isoformat(), - 'validityEnd': arrow.get(2019, 11, 9).isoformat(), - 'dnsProvider': None, + "commonName": "test.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityStart": arrow.get(2018, 11, 9).isoformat(), + "validityEnd": arrow.get(2019, 11, 9).isoformat(), + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) assert not errors - assert data['authority'].id == authority.id + assert data["authority"].id == authority.id # make sure the defaults got set - assert data['common_name'] == 'test.example.com' - assert data['country'] == 'US' - assert data['location'] == 'Los Gatos' + assert data["common_name"] == "test.example.com" + assert data["country"] == "US" + assert data["location"] == "Los Gatos" assert len(data.keys()) == 19 @@ -256,28 +270,22 @@ def test_certificate_input_with_extensions(client, authority): from lemur.certificates.schemas import CertificateInputSchema input_data = { - 'commonName': 'test.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'extensions': { - 'keyUsage': { - 'digital_signature': True + "commonName": "test.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "extensions": { + "keyUsage": {"digital_signature": True}, + "extendedKeyUsage": { + "useClientAuthentication": True, + "useServerAuthentication": True, }, - 'extendedKeyUsage': { - 'useClientAuthentication': True, - 'useServerAuthentication': True + "subjectKeyIdentifier": {"includeSKI": True}, + "subAltNames": { + "names": [{"nameType": "DNSName", "value": "test.example.com"}] }, - 'subjectKeyIdentifier': { - 'includeSKI': True - }, - 'subAltNames': { - 'names': [ - {'nameType': 'DNSName', 'value': 'test.example.com'} - ] - } }, - 'dnsProvider': None, + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) @@ -287,48 +295,61 @@ def test_certificate_input_with_extensions(client, authority): def test_certificate_input_schema_parse_csr(authority): from lemur.certificates.schemas import CertificateInputSchema - test_san_dns = 'foobar.com' - extensions = {'sub_alt_names': {'names': x509.SubjectAlternativeName([x509.DNSName(test_san_dns)])}} - csr, private_key = create_csr(owner='joe@example.com', common_name='ACommonName', organization='test', - organizational_unit='Meters', country='NL', state='Noord-Holland', location='Amsterdam', - key_type='RSA2048', extensions=extensions) + test_san_dns = "foobar.com" + extensions = { + "sub_alt_names": { + "names": x509.SubjectAlternativeName([x509.DNSName(test_san_dns)]) + } + } + csr, private_key = create_csr( + owner="joe@example.com", + common_name="ACommonName", + organization="test", + organizational_unit="Meters", + country="NL", + state="Noord-Holland", + location="Amsterdam", + key_type="RSA2048", + extensions=extensions, + ) input_data = { - 'commonName': 'test.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'csr': csr, - 'dnsProvider': None, + "commonName": "test.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "csr": csr, + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) - for san in data['extensions']['sub_alt_names']['names']: + for san in data["extensions"]["sub_alt_names"]["names"]: assert san.value == test_san_dns assert not errors def test_certificate_out_of_range_date(client, authority): from lemur.certificates.schemas import CertificateInputSchema + input_data = { - 'commonName': 'test.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityYears': 100, - 'dnsProvider': None, + "commonName": "test.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityYears": 100, + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) assert errors - input_data['validityStart'] = '2017-04-30T00:12:34.513631' + input_data["validityStart"] = "2017-04-30T00:12:34.513631" data, errors = CertificateInputSchema().load(input_data) assert errors - input_data['validityEnd'] = '2018-04-30T00:12:34.513631' + input_data["validityEnd"] = "2018-04-30T00:12:34.513631" data, errors = CertificateInputSchema().load(input_data) assert errors @@ -336,13 +357,14 @@ def test_certificate_out_of_range_date(client, authority): def test_certificate_valid_years(client, authority): from lemur.certificates.schemas import CertificateInputSchema + input_data = { - 'commonName': 'test.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityYears': 1, - 'dnsProvider': None, + "commonName": "test.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityYears": 1, + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) @@ -351,14 +373,15 @@ def test_certificate_valid_years(client, authority): def test_certificate_valid_dates(client, authority): from lemur.certificates.schemas import CertificateInputSchema + input_data = { - 'commonName': 'test.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityStart': '2020-01-01T00:00:00', - 'validityEnd': '2020-01-01T00:00:01', - 'dnsProvider': None, + "commonName": "test.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityStart": "2020-01-01T00:00:00", + "validityEnd": "2020-01-01T00:00:01", + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) @@ -368,14 +391,15 @@ def test_certificate_valid_dates(client, authority): def test_certificate_cn_admin(client, authority, logged_in_admin): """Admin is exempt from CN/SAN domain restrictions.""" from lemur.certificates.schemas import CertificateInputSchema + input_data = { - 'commonName': '*.admin-overrides-whitelist.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityStart': '2020-01-01T00:00:00', - 'validityEnd': '2020-01-01T00:00:01', - 'dnsProvider': None, + "commonName": "*.admin-overrides-whitelist.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityStart": "2020-01-01T00:00:00", + "validityEnd": "2020-01-01T00:00:01", + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) @@ -385,22 +409,23 @@ def test_certificate_cn_admin(client, authority, logged_in_admin): def test_certificate_allowed_names(client, authority, session, logged_in_user): """Test for allowed CN and SAN values.""" from lemur.certificates.schemas import CertificateInputSchema + input_data = { - 'commonName': 'Names with spaces are not checked', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityStart': '2020-01-01T00:00:00', - 'validityEnd': '2020-01-01T00:00:01', - 'extensions': { - 'subAltNames': { - 'names': [ - {'nameType': 'DNSName', 'value': 'allowed.example.com'}, - {'nameType': 'IPAddress', 'value': '127.0.0.1'}, + "commonName": "Names with spaces are not checked", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityStart": "2020-01-01T00:00:00", + "validityEnd": "2020-01-01T00:00:01", + "extensions": { + "subAltNames": { + "names": [ + {"nameType": "DNSName", "value": "allowed.example.com"}, + {"nameType": "IPAddress", "value": "127.0.0.1"}, ] } }, - 'dnsProvider': None, + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) @@ -415,74 +440,82 @@ def test_certificate_incative_authority(client, authority, session, logged_in_us session.add(authority) input_data = { - 'commonName': 'foo.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityStart': '2020-01-01T00:00:00', - 'validityEnd': '2020-01-01T00:00:01', - 'dnsProvider': None, + "commonName": "foo.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityStart": "2020-01-01T00:00:00", + "validityEnd": "2020-01-01T00:00:01", + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) - assert errors['authority'][0] == "The authority is inactive." + assert errors["authority"][0] == "The authority is inactive." def test_certificate_disallowed_names(client, authority, session, logged_in_user): """The CN and SAN are disallowed by LEMUR_WHITELISTED_DOMAINS.""" from lemur.certificates.schemas import CertificateInputSchema + input_data = { - 'commonName': '*.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityStart': '2020-01-01T00:00:00', - 'validityEnd': '2020-01-01T00:00:01', - 'extensions': { - 'subAltNames': { - 'names': [ - {'nameType': 'DNSName', 'value': 'allowed.example.com'}, - {'nameType': 'DNSName', 'value': 'evilhacker.org'}, + "commonName": "*.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityStart": "2020-01-01T00:00:00", + "validityEnd": "2020-01-01T00:00:01", + "extensions": { + "subAltNames": { + "names": [ + {"nameType": "DNSName", "value": "allowed.example.com"}, + {"nameType": "DNSName", "value": "evilhacker.org"}, ] } }, - 'dnsProvider': None, + "dnsProvider": None, } data, errors = CertificateInputSchema().load(input_data) - assert errors['common_name'][0].startswith("Domain *.example.com does not match whitelisted domain patterns") - assert (errors['extensions']['sub_alt_names']['names'][0] - .startswith("Domain evilhacker.org does not match whitelisted domain patterns")) + assert errors["common_name"][0].startswith( + "Domain *.example.com does not match whitelisted domain patterns" + ) + assert errors["extensions"]["sub_alt_names"]["names"][0].startswith( + "Domain evilhacker.org does not match whitelisted domain patterns" + ) def test_certificate_sensitive_name(client, authority, session, logged_in_user): """The CN is disallowed by 'sensitive' flag on Domain model.""" from lemur.certificates.schemas import CertificateInputSchema + input_data = { - 'commonName': 'sensitive.example.com', - 'owner': 'jim@example.com', - 'authority': {'id': authority.id}, - 'description': 'testtestest', - 'validityStart': '2020-01-01T00:00:00', - 'validityEnd': '2020-01-01T00:00:01', - 'dnsProvider': None, + "commonName": "sensitive.example.com", + "owner": "jim@example.com", + "authority": {"id": authority.id}, + "description": "testtestest", + "validityStart": "2020-01-01T00:00:00", + "validityEnd": "2020-01-01T00:00:01", + "dnsProvider": None, } - session.add(Domain(name='sensitive.example.com', sensitive=True)) + session.add(Domain(name="sensitive.example.com", sensitive=True)) data, errors = CertificateInputSchema().load(input_data) - assert errors['common_name'][0].startswith("Domain sensitive.example.com has been marked as sensitive") + assert errors["common_name"][0].startswith( + "Domain sensitive.example.com has been marked as sensitive" + ) def test_certificate_upload_schema_ok(client): from lemur.certificates.schemas import CertificateUploadInputSchema + data = { - 'name': 'Jane', - 'owner': 'pwner@example.com', - 'body': SAN_CERT_STR, - 'privateKey': SAN_CERT_KEY, - 'chain': INTERMEDIATE_CERT_STR, - 'csr': SAN_CERT_CSR, - 'external_id': '1234', + "name": "Jane", + "owner": "pwner@example.com", + "body": SAN_CERT_STR, + "privateKey": SAN_CERT_KEY, + "chain": INTERMEDIATE_CERT_STR, + "csr": SAN_CERT_CSR, + "external_id": "1234", } data, errors = CertificateUploadInputSchema().load(data) assert not errors @@ -490,20 +523,19 @@ def test_certificate_upload_schema_ok(client): def test_certificate_upload_schema_minimal(client): from lemur.certificates.schemas import CertificateUploadInputSchema - data = { - 'owner': 'pwner@example.com', - 'body': SAN_CERT_STR, - } + + data = {"owner": "pwner@example.com", "body": SAN_CERT_STR} data, errors = CertificateUploadInputSchema().load(data) assert not errors def test_certificate_upload_schema_long_chain(client): from lemur.certificates.schemas import CertificateUploadInputSchema + data = { - 'owner': 'pwner@example.com', - 'body': SAN_CERT_STR, - 'chain': INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR + "owner": "pwner@example.com", + "body": SAN_CERT_STR, + "chain": INTERMEDIATE_CERT_STR + "\n" + ROOTCA_CERT_STR, } data, errors = CertificateUploadInputSchema().load(data) assert not errors @@ -511,87 +543,106 @@ def test_certificate_upload_schema_long_chain(client): def test_certificate_upload_schema_invalid_body(client): from lemur.certificates.schemas import CertificateUploadInputSchema + data = { - 'owner': 'pwner@example.com', - 'body': 'Hereby I certify that this is a valid body', + "owner": "pwner@example.com", + "body": "Hereby I certify that this is a valid body", } data, errors = CertificateUploadInputSchema().load(data) - assert errors == {'body': ['Public certificate presented is not valid.']} + assert errors == {"body": ["Public certificate presented is not valid."]} def test_certificate_upload_schema_invalid_pkey(client): from lemur.certificates.schemas import CertificateUploadInputSchema + data = { - 'owner': 'pwner@example.com', - 'body': SAN_CERT_STR, - 'privateKey': 'Look at me Im a private key!!111', + "owner": "pwner@example.com", + "body": SAN_CERT_STR, + "privateKey": "Look at me Im a private key!!111", } data, errors = CertificateUploadInputSchema().load(data) - assert errors == {'private_key': ['Private key presented is not valid.']} + assert errors == {"private_key": ["Private key presented is not valid."]} def test_certificate_upload_schema_invalid_chain(client): from lemur.certificates.schemas import CertificateUploadInputSchema - data = { - 'body': SAN_CERT_STR, - 'chain': 'CHAINSAW', - 'owner': 'pwner@example.com', - } + + data = {"body": SAN_CERT_STR, "chain": "CHAINSAW", "owner": "pwner@example.com"} data, errors = CertificateUploadInputSchema().load(data) - assert errors == {'chain': ['Invalid certificate in certificate chain.']} + assert errors == {"chain": ["Invalid certificate in certificate chain."]} def test_certificate_upload_schema_wrong_pkey(client): from lemur.certificates.schemas import CertificateUploadInputSchema + data = { - 'body': SAN_CERT_STR, - 'privateKey': ROOTCA_KEY, - 'chain': INTERMEDIATE_CERT_STR, - 'owner': 'pwner@example.com', + "body": SAN_CERT_STR, + "privateKey": ROOTCA_KEY, + "chain": INTERMEDIATE_CERT_STR, + "owner": "pwner@example.com", } data, errors = CertificateUploadInputSchema().load(data) - assert errors == {'_schema': ['Private key does not match certificate.']} + assert errors == {"_schema": ["Private key does not match certificate."]} def test_certificate_upload_schema_wrong_chain(client): from lemur.certificates.schemas import CertificateUploadInputSchema + data = { - 'owner': 'pwner@example.com', - 'body': SAN_CERT_STR, - 'chain': ROOTCA_CERT_STR, + "owner": "pwner@example.com", + "body": SAN_CERT_STR, + "chain": ROOTCA_CERT_STR, } data, errors = CertificateUploadInputSchema().load(data) - assert errors == {'_schema': ["Incorrect chain certificate(s) provided: 'san.example.org' is not signed by " - "'LemurTrust Unittests Root CA 2018'"]} + assert errors == { + "_schema": [ + "Incorrect chain certificate(s) provided: 'san.example.org' is not signed by " + "'LemurTrust Unittests Root CA 2018'" + ] + } def test_certificate_upload_schema_wrong_chain_2nd(client): from lemur.certificates.schemas import CertificateUploadInputSchema + data = { - 'owner': 'pwner@example.com', - 'body': SAN_CERT_STR, - 'chain': INTERMEDIATE_CERT_STR + '\n' + SAN_CERT_STR, + "owner": "pwner@example.com", + "body": SAN_CERT_STR, + "chain": INTERMEDIATE_CERT_STR + "\n" + SAN_CERT_STR, } data, errors = CertificateUploadInputSchema().load(data) - assert errors == {'_schema': ["Incorrect chain certificate(s) provided: 'LemurTrust Unittests Class 1 CA 2018' is " - "not signed by 'san.example.org'"]} + assert errors == { + "_schema": [ + "Incorrect chain certificate(s) provided: 'LemurTrust Unittests Class 1 CA 2018' is " + "not signed by 'san.example.org'" + ] + } def test_create_basic_csr(client): csr_config = dict( - common_name='example.com', - organization='Example, Inc.', - organizational_unit='Operations', - country='US', - state='CA', - location='A place', - owner='joe@example.com', - key_type='RSA2048', - extensions=dict(names=dict(sub_alt_names=x509.SubjectAlternativeName([x509.DNSName('test.example.com'), x509.DNSName('test2.example.com')]))) + common_name="example.com", + organization="Example, Inc.", + organizational_unit="Operations", + country="US", + state="CA", + location="A place", + owner="joe@example.com", + key_type="RSA2048", + extensions=dict( + names=dict( + sub_alt_names=x509.SubjectAlternativeName( + [ + x509.DNSName("test.example.com"), + x509.DNSName("test2.example.com"), + ] + ) + ) + ), ) csr, pem = create_csr(**csr_config) - csr = x509.load_pem_x509_csr(csr.encode('utf-8'), default_backend()) + csr = x509.load_pem_x509_csr(csr.encode("utf-8"), default_backend()) for name in csr.subject: assert name.value in csr_config.values() @@ -603,13 +654,13 @@ def test_csr_empty_san(client): """ csr_text, pkey = create_csr( - common_name='daniel-san.example.com', - owner='daniel-san@example.com', - key_type='RSA2048', - extensions={'sub_alt_names': {'names': x509.SubjectAlternativeName([])}} + common_name="daniel-san.example.com", + owner="daniel-san@example.com", + key_type="RSA2048", + extensions={"sub_alt_names": {"names": x509.SubjectAlternativeName([])}}, ) - csr = x509.load_pem_x509_csr(csr_text.encode('utf-8'), default_backend()) + csr = x509.load_pem_x509_csr(csr_text.encode("utf-8"), default_backend()) with pytest.raises(x509.ExtensionNotFound): csr.extensions.get_extension_for_class(x509.SubjectAlternativeName) @@ -620,13 +671,13 @@ def test_csr_disallowed_cn(client, logged_in_user): from lemur.common import validators request, pkey = create_csr( - common_name='evilhacker.org', - owner='joe@example.com', - key_type='RSA2048', + common_name="evilhacker.org", owner="joe@example.com", key_type="RSA2048" ) with pytest.raises(ValidationError) as err: validators.csr(request) - assert str(err.value).startswith('Domain evilhacker.org does not match whitelisted domain patterns') + assert str(err.value).startswith( + "Domain evilhacker.org does not match whitelisted domain patterns" + ) def test_csr_disallowed_san(client, logged_in_user): @@ -635,46 +686,71 @@ def test_csr_disallowed_san(client, logged_in_user): request, pkey = create_csr( common_name="CN with spaces isn't a domain and is thus allowed", - owner='joe@example.com', - key_type='RSA2048', - extensions={'sub_alt_names': {'names': x509.SubjectAlternativeName([x509.DNSName('evilhacker.org')])}} + owner="joe@example.com", + key_type="RSA2048", + extensions={ + "sub_alt_names": { + "names": x509.SubjectAlternativeName([x509.DNSName("evilhacker.org")]) + } + }, ) with pytest.raises(ValidationError) as err: validators.csr(request) - assert str(err.value).startswith('Domain evilhacker.org does not match whitelisted domain patterns') + assert str(err.value).startswith( + "Domain evilhacker.org does not match whitelisted domain patterns" + ) def test_get_name_from_arn(client): from lemur.certificates.service import get_name_from_arn - arn = 'arn:aws:iam::11111111:server-certificate/mycertificate' - assert get_name_from_arn(arn) == 'mycertificate' + + arn = "arn:aws:iam::11111111:server-certificate/mycertificate" + assert get_name_from_arn(arn) == "mycertificate" def test_get_account_number(client): from lemur.certificates.service import get_account_number - arn = 'arn:aws:iam::11111111:server-certificate/mycertificate' - assert get_account_number(arn) == '11111111' + + arn = "arn:aws:iam::11111111:server-certificate/mycertificate" + assert get_account_number(arn) == "11111111" def test_mint_certificate(issuer_plugin, authority): from lemur.certificates.service import mint - cert_body, private_key, chain, external_id, csr = mint(authority=authority, csr=CSR_STR) + + cert_body, private_key, chain, external_id, csr = mint( + authority=authority, csr=CSR_STR + ) assert cert_body == SAN_CERT_STR def test_create_certificate(issuer_plugin, authority, user): from lemur.certificates.service import create - cert = create(authority=authority, csr=CSR_STR, owner='joe@example.com', creator=user['user']) - assert str(cert.not_after) == '2047-12-31T22:00:00+00:00' - assert str(cert.not_before) == '2017-12-31T22:00:00+00:00' - assert cert.issuer == 'LemurTrustUnittestsClass1CA2018' - assert cert.name == 'SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231-AFF2DB4F8D2D4D8E80FA382AE27C2333' - cert = create(authority=authority, csr=CSR_STR, owner='joe@example.com', name='ACustomName1', creator=user['user']) - assert cert.name == 'ACustomName1' + cert = create( + authority=authority, csr=CSR_STR, owner="joe@example.com", creator=user["user"] + ) + assert str(cert.not_after) == "2047-12-31T22:00:00+00:00" + assert str(cert.not_before) == "2017-12-31T22:00:00+00:00" + assert cert.issuer == "LemurTrustUnittestsClass1CA2018" + assert ( + cert.name + == "SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231-AFF2DB4F8D2D4D8E80FA382AE27C2333" + ) + + cert = create( + authority=authority, + csr=CSR_STR, + owner="joe@example.com", + name="ACustomName1", + creator=user["user"], + ) + assert cert.name == "ACustomName1" -def test_reissue_certificate(issuer_plugin, crypto_authority, certificate, logged_in_user): +def test_reissue_certificate( + issuer_plugin, crypto_authority, certificate, logged_in_user +): from lemur.certificates.service import reissue_certificate # test-authority would return a mismatching private key, so use 'cryptography-issuer' plugin instead. @@ -684,286 +760,511 @@ def test_reissue_certificate(issuer_plugin, crypto_authority, certificate, logge def test_create_csr(): - csr, private_key = create_csr(owner='joe@example.com', common_name='ACommonName', organization='test', organizational_unit='Meters', country='US', - state='CA', location='Here', key_type='RSA2048') + csr, private_key = create_csr( + owner="joe@example.com", + common_name="ACommonName", + organization="test", + organizational_unit="Meters", + country="US", + state="CA", + location="Here", + key_type="RSA2048", + ) assert csr assert private_key - extensions = {'sub_alt_names': {'names': x509.SubjectAlternativeName([x509.DNSName('AnotherCommonName')])}} - csr, private_key = create_csr(owner='joe@example.com', common_name='ACommonName', organization='test', organizational_unit='Meters', country='US', - state='CA', location='Here', extensions=extensions, key_type='RSA2048') + extensions = { + "sub_alt_names": { + "names": x509.SubjectAlternativeName([x509.DNSName("AnotherCommonName")]) + } + } + csr, private_key = create_csr( + owner="joe@example.com", + common_name="ACommonName", + organization="test", + organizational_unit="Meters", + country="US", + state="CA", + location="Here", + extensions=extensions, + key_type="RSA2048", + ) assert csr assert private_key def test_import(user): from lemur.certificates.service import import_certificate - cert = import_certificate(body=SAN_CERT_STR, chain=INTERMEDIATE_CERT_STR, private_key=SAN_CERT_KEY, creator=user['user']) - assert str(cert.not_after) == '2047-12-31T22:00:00+00:00' - assert str(cert.not_before) == '2017-12-31T22:00:00+00:00' - assert cert.issuer == 'LemurTrustUnittestsClass1CA2018' - assert cert.name.startswith('SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231') - cert = import_certificate(body=SAN_CERT_STR, chain=INTERMEDIATE_CERT_STR, private_key=SAN_CERT_KEY, owner='joe@example.com', name='ACustomName2', creator=user['user']) - assert cert.name == 'ACustomName2' + cert = import_certificate( + body=SAN_CERT_STR, + chain=INTERMEDIATE_CERT_STR, + private_key=SAN_CERT_KEY, + creator=user["user"], + ) + assert str(cert.not_after) == "2047-12-31T22:00:00+00:00" + assert str(cert.not_before) == "2017-12-31T22:00:00+00:00" + assert cert.issuer == "LemurTrustUnittestsClass1CA2018" + assert cert.name.startswith( + "SAN-san.example.org-LemurTrustUnittestsClass1CA2018-20171231-20471231" + ) + + cert = import_certificate( + body=SAN_CERT_STR, + chain=INTERMEDIATE_CERT_STR, + private_key=SAN_CERT_KEY, + owner="joe@example.com", + name="ACustomName2", + creator=user["user"], + ) + assert cert.name == "ACustomName2" @pytest.mark.skip def test_upload(user): from lemur.certificates.service import upload - cert = upload(body=SAN_CERT_STR, chain=INTERMEDIATE_CERT_STR, private_key=SAN_CERT_KEY, owner='joe@example.com', creator=user['user']) - assert str(cert.not_after) == '2040-01-01T20:30:52+00:00' - assert str(cert.not_before) == '2015-06-26T20:30:52+00:00' - assert cert.issuer == 'Example' - assert cert.name == 'long.lived.com-Example-20150626-20400101-3' - cert = upload(body=SAN_CERT_STR, chain=INTERMEDIATE_CERT_STR, private_key=SAN_CERT_KEY, owner='joe@example.com', name='ACustomName', creator=user['user']) - assert 'ACustomName' in cert.name + cert = upload( + body=SAN_CERT_STR, + chain=INTERMEDIATE_CERT_STR, + private_key=SAN_CERT_KEY, + owner="joe@example.com", + creator=user["user"], + ) + assert str(cert.not_after) == "2040-01-01T20:30:52+00:00" + assert str(cert.not_before) == "2015-06-26T20:30:52+00:00" + assert cert.issuer == "Example" + assert cert.name == "long.lived.com-Example-20150626-20400101-3" + + cert = upload( + body=SAN_CERT_STR, + chain=INTERMEDIATE_CERT_STR, + private_key=SAN_CERT_KEY, + owner="joe@example.com", + name="ACustomName", + creator=user["user"], + ) + assert "ACustomName" in cert.name # verify upload with a private key as a str def test_upload_private_key_str(user): from lemur.certificates.service import upload - cert = upload(body=SAN_CERT_STR, chain=INTERMEDIATE_CERT_STR, private_key=SAN_CERT_KEY, owner='joe@example.com', name='ACustomName', creator=user['user']) + + cert = upload( + body=SAN_CERT_STR, + chain=INTERMEDIATE_CERT_STR, + private_key=SAN_CERT_KEY, + owner="joe@example.com", + name="ACustomName", + creator=user["user"], + ) assert cert -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_certificate_get_private_key(client, token, status): - assert client.get(api.url_for(Certificates, certificate_id=1), headers=token).status_code == status + assert ( + client.get( + api.url_for(Certificates, certificate_id=1), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_certificate_get(client, token, status): - assert client.get(api.url_for(Certificates, certificate_id=1), headers=token).status_code == status + assert ( + client.get( + api.url_for(Certificates, certificate_id=1), headers=token + ).status_code + == status + ) def test_certificate_get_body(client): - response_body = client.get(api.url_for(Certificates, certificate_id=1), headers=VALID_USER_HEADER_TOKEN).json - assert response_body['serial'] == '211983098819107449768450703123665283596' - assert response_body['serialHex'] == '9F7A75B39DAE4C3F9524C68B06DA6A0C' - assert response_body['distinguishedName'] == ('CN=LemurTrust Unittests Class 1 CA 2018,' - 'O=LemurTrust Enterprises Ltd,' - 'OU=Unittesting Operations Center,' - 'C=EE,' - 'ST=N/A,' - 'L=Earth') + response_body = client.get( + api.url_for(Certificates, certificate_id=1), headers=VALID_USER_HEADER_TOKEN + ).json + assert response_body["serial"] == "211983098819107449768450703123665283596" + assert response_body["serialHex"] == "9F7A75B39DAE4C3F9524C68B06DA6A0C" + assert response_body["distinguishedName"] == ( + "CN=LemurTrust Unittests Class 1 CA 2018," + "O=LemurTrust Enterprises Ltd," + "OU=Unittesting Operations Center," + "C=EE," + "ST=N/A," + "L=Earth" + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_post(client, token, status): - assert client.post(api.url_for(Certificates, certificate_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(Certificates, certificate_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_certificate_put(client, token, status): - assert client.put(api.url_for(Certificates, certificate_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(Certificates, certificate_id=1), data={}, headers=token + ).status_code + == status + ) def test_certificate_put_with_data(client, certificate, issuer_plugin): - resp = client.put(api.url_for(Certificates, certificate_id=certificate.id), data=json.dumps({'owner': 'bob@example.com', 'description': 'test', 'notify': True}), headers=VALID_ADMIN_HEADER_TOKEN) + resp = client.put( + api.url_for(Certificates, certificate_id=certificate.id), + data=json.dumps( + {"owner": "bob@example.com", "description": "test", "notify": True} + ), + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert resp.status_code == 200 -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 204), - (VALID_ADMIN_API_TOKEN, 412), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 204), + (VALID_ADMIN_API_TOKEN, 412), + ("", 401), + ], +) def test_certificate_delete(client, token, status): - assert client.delete(api.url_for(Certificates, certificate_id=1), headers=token).status_code == status + assert ( + client.delete( + api.url_for(Certificates, certificate_id=1), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 204), - (VALID_ADMIN_API_TOKEN, 204), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 204), + (VALID_ADMIN_API_TOKEN, 204), + ("", 401), + ], +) def test_invalid_certificate_delete(client, invalid_certificate, token, status): - assert client.delete( - api.url_for(Certificates, certificate_id=invalid_certificate.id), headers=token).status_code == status + assert ( + client.delete( + api.url_for(Certificates, certificate_id=invalid_certificate.id), + headers=token, + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_patch(client, token, status): - assert client.patch(api.url_for(Certificates, certificate_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(Certificates, certificate_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_certificates_get(client, token, status): - assert client.get(api.url_for(CertificatesList), headers=token).status_code == status + assert ( + client.get(api.url_for(CertificatesList), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_certificates_post(client, token, status): - assert client.post(api.url_for(CertificatesList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(CertificatesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificates_put(client, token, status): - assert client.put(api.url_for(CertificatesList), data={}, headers=token).status_code == status + assert ( + client.put(api.url_for(CertificatesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificates_delete(client, token, status): - assert client.delete(api.url_for(CertificatesList), headers=token).status_code == status + assert ( + client.delete(api.url_for(CertificatesList), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificates_patch(client, token, status): - assert client.patch(api.url_for(CertificatesList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(CertificatesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_credentials_post(client, token, status): - assert client.post(api.url_for(CertificatePrivateKey, certificate_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(CertificatePrivateKey, certificate_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_credentials_put(client, token, status): - assert client.put(api.url_for(CertificatePrivateKey, certificate_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(CertificatePrivateKey, certificate_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_credentials_delete(client, token, status): - assert client.delete(api.url_for(CertificatePrivateKey, certificate_id=1), headers=token).status_code == status + assert ( + client.delete( + api.url_for(CertificatePrivateKey, certificate_id=1), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificate_credentials_patch(client, token, status): - assert client.patch(api.url_for(CertificatePrivateKey, certificate_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(CertificatePrivateKey, certificate_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificates_upload_get(client, token, status): - assert client.get(api.url_for(CertificatesUpload), headers=token).status_code == status + assert ( + client.get(api.url_for(CertificatesUpload), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_certificates_upload_post(client, token, status): - assert client.post(api.url_for(CertificatesUpload), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(CertificatesUpload), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificates_upload_put(client, token, status): - assert client.put(api.url_for(CertificatesUpload), data={}, headers=token).status_code == status + assert ( + client.put(api.url_for(CertificatesUpload), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificates_upload_delete(client, token, status): - assert client.delete(api.url_for(CertificatesUpload), headers=token).status_code == status + assert ( + client.delete(api.url_for(CertificatesUpload), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_certificates_upload_patch(client, token, status): - assert client.patch(api.url_for(CertificatesUpload), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(CertificatesUpload), data={}, headers=token + ).status_code + == status + ) def test_sensitive_sort(client): - resp = client.get(api.url_for(CertificatesList) + '?sortBy=private_key&sortDir=asc', headers=VALID_ADMIN_HEADER_TOKEN) - assert "'private_key' is not sortable or filterable" in resp.json['message'] + resp = client.get( + api.url_for(CertificatesList) + "?sortBy=private_key&sortDir=asc", + headers=VALID_ADMIN_HEADER_TOKEN, + ) + assert "'private_key' is not sortable or filterable" in resp.json["message"] def test_boolean_filter(client): - resp = client.get(api.url_for(CertificatesList) + '?filter=notify;true', headers=VALID_ADMIN_HEADER_TOKEN) + resp = client.get( + api.url_for(CertificatesList) + "?filter=notify;true", + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert resp.status_code == 200 # Also don't crash with invalid input (we currently treat that as false) - resp = client.get(api.url_for(CertificatesList) + '?filter=notify;whatisthis', headers=VALID_ADMIN_HEADER_TOKEN) + resp = client.get( + api.url_for(CertificatesList) + "?filter=notify;whatisthis", + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert resp.status_code == 200 diff --git a/lemur/tests/test_defaults.py b/lemur/tests/test_defaults.py index da9d6c79..b8daa575 100644 --- a/lemur/tests/test_defaults.py +++ b/lemur/tests/test_defaults.py @@ -8,14 +8,18 @@ from .vectors import SAN_CERT, WILDCARD_CERT, INTERMEDIATE_CERT def test_cert_get_cn(client): from lemur.common.defaults import common_name - assert common_name(SAN_CERT) == 'san.example.org' + assert common_name(SAN_CERT) == "san.example.org" def test_cert_sub_alt_domains(client): from lemur.common.defaults import domains assert domains(INTERMEDIATE_CERT) == [] - assert domains(SAN_CERT) == ['san.example.org', 'san2.example.org', 'daniel-san.example.org'] + assert domains(SAN_CERT) == [ + "san.example.org", + "san2.example.org", + "daniel-san.example.org", + ] def test_cert_is_san(client): @@ -28,94 +32,119 @@ def test_cert_is_san(client): def test_cert_is_wildcard(client): from lemur.common.defaults import is_wildcard + assert is_wildcard(WILDCARD_CERT) assert not is_wildcard(INTERMEDIATE_CERT) def test_cert_bitstrength(client): from lemur.common.defaults import bitstrength + assert bitstrength(INTERMEDIATE_CERT) == 2048 def test_cert_issuer(client): from lemur.common.defaults import issuer - assert issuer(INTERMEDIATE_CERT) == 'LemurTrustUnittestsRootCA2018' + + assert issuer(INTERMEDIATE_CERT) == "LemurTrustUnittestsRootCA2018" def test_text_to_slug(client): from lemur.common.defaults import text_to_slug - assert text_to_slug('test - string') == 'test-string' - assert text_to_slug('test - string', '') == 'teststring' + + assert text_to_slug("test - string") == "test-string" + assert text_to_slug("test - string", "") == "teststring" # Accented characters are decomposed - assert text_to_slug('föö bär') == 'foo-bar' + assert text_to_slug("föö bär") == "foo-bar" # Melt away the Unicode Snowman - assert text_to_slug('\u2603') == '' - assert text_to_slug('\u2603test\u2603') == 'test' - assert text_to_slug('snow\u2603man') == 'snow-man' - assert text_to_slug('snow\u2603man', '') == 'snowman' + assert text_to_slug("\u2603") == "" + assert text_to_slug("\u2603test\u2603") == "test" + assert text_to_slug("snow\u2603man") == "snow-man" + assert text_to_slug("snow\u2603man", "") == "snowman" # IDNA-encoded domain names should be kept as-is - assert text_to_slug('xn--i1b6eqas.xn--xmpl-loa9b3671b.com') == 'xn--i1b6eqas.xn--xmpl-loa9b3671b.com' + assert ( + text_to_slug("xn--i1b6eqas.xn--xmpl-loa9b3671b.com") + == "xn--i1b6eqas.xn--xmpl-loa9b3671b.com" + ) def test_create_name(client): from lemur.common.defaults import certificate_name from datetime import datetime - assert certificate_name( - 'example.com', - 'Example Inc,', - datetime(2015, 5, 7, 0, 0, 0), - datetime(2015, 5, 12, 0, 0, 0), - False - ) == 'example.com-ExampleInc-20150507-20150512' - assert certificate_name( - 'example.com', - 'Example Inc,', - datetime(2015, 5, 7, 0, 0, 0), - datetime(2015, 5, 12, 0, 0, 0), - True - ) == 'SAN-example.com-ExampleInc-20150507-20150512' - assert certificate_name( - 'xn--mnchen-3ya.de', - 'Vertrauenswürdig Autorität', - datetime(2015, 5, 7, 0, 0, 0), - datetime(2015, 5, 12, 0, 0, 0), - False - ) == 'xn--mnchen-3ya.de-VertrauenswurdigAutoritat-20150507-20150512' - assert certificate_name( - 'selfie.example.org', - '', - datetime(2015, 5, 7, 0, 0, 0), - datetime(2025, 5, 12, 13, 37, 0), - False - ) == 'selfie.example.org-selfsigned-20150507-20250512' + + assert ( + certificate_name( + "example.com", + "Example Inc,", + datetime(2015, 5, 7, 0, 0, 0), + datetime(2015, 5, 12, 0, 0, 0), + False, + ) + == "example.com-ExampleInc-20150507-20150512" + ) + assert ( + certificate_name( + "example.com", + "Example Inc,", + datetime(2015, 5, 7, 0, 0, 0), + datetime(2015, 5, 12, 0, 0, 0), + True, + ) + == "SAN-example.com-ExampleInc-20150507-20150512" + ) + assert ( + certificate_name( + "xn--mnchen-3ya.de", + "Vertrauenswürdig Autorität", + datetime(2015, 5, 7, 0, 0, 0), + datetime(2015, 5, 12, 0, 0, 0), + False, + ) + == "xn--mnchen-3ya.de-VertrauenswurdigAutoritat-20150507-20150512" + ) + assert ( + certificate_name( + "selfie.example.org", + "", + datetime(2015, 5, 7, 0, 0, 0), + datetime(2025, 5, 12, 13, 37, 0), + False, + ) + == "selfie.example.org-selfsigned-20150507-20250512" + ) def test_issuer(client, cert_builder, issuer_private_key): from lemur.common.defaults import issuer - assert issuer(INTERMEDIATE_CERT) == 'LemurTrustUnittestsRootCA2018' + assert issuer(INTERMEDIATE_CERT) == "LemurTrustUnittestsRootCA2018" # We need to override builder's issuer name cert_builder._issuer_name = None # Unicode issuer name - cert = (cert_builder - .issuer_name(x509.Name([x509.NameAttribute(x509.NameOID.COMMON_NAME, 'Vertrauenswürdig Autorität')])) - .sign(issuer_private_key, hashes.SHA256(), default_backend())) - assert issuer(cert) == 'VertrauenswurdigAutoritat' + cert = cert_builder.issuer_name( + x509.Name( + [x509.NameAttribute(x509.NameOID.COMMON_NAME, "Vertrauenswürdig Autorität")] + ) + ).sign(issuer_private_key, hashes.SHA256(), default_backend()) + assert issuer(cert) == "VertrauenswurdigAutoritat" # Fallback to 'Organization' field when issuer CN is missing - cert = (cert_builder - .issuer_name(x509.Name([x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, 'No Such Organization')])) - .sign(issuer_private_key, hashes.SHA256(), default_backend())) - assert issuer(cert) == 'NoSuchOrganization' + cert = cert_builder.issuer_name( + x509.Name( + [x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, "No Such Organization")] + ) + ).sign(issuer_private_key, hashes.SHA256(), default_backend()) + assert issuer(cert) == "NoSuchOrganization" # Missing issuer name - cert = (cert_builder - .issuer_name(x509.Name([])) - .sign(issuer_private_key, hashes.SHA256(), default_backend())) - assert issuer(cert) == '' + cert = cert_builder.issuer_name(x509.Name([])).sign( + issuer_private_key, hashes.SHA256(), default_backend() + ) + assert issuer(cert) == "" def test_issuer_selfsigned(selfsigned_cert): from lemur.common.defaults import issuer - assert issuer(selfsigned_cert) == '' + + assert issuer(selfsigned_cert) == "" diff --git a/lemur/tests/test_destinations.py b/lemur/tests/test_destinations.py index 11f03d9e..d17c703b 100644 --- a/lemur/tests/test_destinations.py +++ b/lemur/tests/test_destinations.py @@ -3,20 +3,22 @@ import pytest from lemur.destinations.views import * # noqa -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) def test_destination_input_schema(client, destination_plugin, destination): from lemur.destinations.schemas import DestinationInputSchema input_data = { - 'label': 'destination1', - 'options': {}, - 'description': 'my destination', - 'active': True, - 'plugin': { - 'slug': 'test-destination' - } + "label": "destination1", + "options": {}, + "description": "my destination", + "active": True, + "plugin": {"slug": "test-destination"}, } data, errors = DestinationInputSchema().load(input_data) @@ -24,91 +26,154 @@ def test_destination_input_schema(client, destination_plugin, destination): assert not errors -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 404), - (VALID_ADMIN_HEADER_TOKEN, 404), - (VALID_ADMIN_API_TOKEN, 404), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 404), + (VALID_ADMIN_HEADER_TOKEN, 404), + (VALID_ADMIN_API_TOKEN, 404), + ("", 401), + ], +) def test_destination_get(client, token, status): - assert client.get(api.url_for(Destinations, destination_id=1), headers=token).status_code == status + assert ( + client.get( + api.url_for(Destinations, destination_id=1), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_destination_post_(client, token, status): - assert client.post(api.url_for(Destinations, destination_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(Destinations, destination_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_destination_put(client, token, status): - assert client.put(api.url_for(Destinations, destination_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(Destinations, destination_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_destination_delete(client, token, status): - assert client.delete(api.url_for(Destinations, destination_id=1), headers=token).status_code == status + assert ( + client.delete( + api.url_for(Destinations, destination_id=1), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_destination_patch(client, token, status): - assert client.patch(api.url_for(Destinations, destination_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(Destinations, destination_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_destination_list_post_(client, token, status): - assert client.post(api.url_for(DestinationsList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(DestinationsList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_destination_list_get(client, token, status): - assert client.get(api.url_for(DestinationsList), headers=token).status_code == status + assert ( + client.get(api.url_for(DestinationsList), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_destination_list_delete(client, token, status): - assert client.delete(api.url_for(DestinationsList), headers=token).status_code == status + assert ( + client.delete(api.url_for(DestinationsList), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_destination_list_patch(client, token, status): - assert client.patch(api.url_for(DestinationsList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(DestinationsList), data={}, headers=token).status_code + == status + ) diff --git a/lemur/tests/test_domains.py b/lemur/tests/test_domains.py index 873412b2..47023f8c 100644 --- a/lemur/tests/test_domains.py +++ b/lemur/tests/test_domains.py @@ -3,94 +3,152 @@ import pytest from lemur.domains.views import * # noqa -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_domain_get(client, token, status): - assert client.get(api.url_for(Domains, domain_id=1), headers=token).status_code == status + assert ( + client.get(api.url_for(Domains, domain_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_domain_post_(client, token, status): - assert client.post(api.url_for(Domains, domain_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(Domains, domain_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_domain_put(client, token, status): - assert client.put(api.url_for(Domains, domain_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(Domains, domain_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_domain_delete(client, token, status): - assert client.delete(api.url_for(Domains, domain_id=1), headers=token).status_code == status + assert ( + client.delete(api.url_for(Domains, domain_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_domain_patch(client, token, status): - assert client.patch(api.url_for(Domains, domain_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(Domains, domain_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_domain_list_post_(client, token, status): - assert client.post(api.url_for(DomainsList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(DomainsList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_domain_list_get(client, token, status): assert client.get(api.url_for(DomainsList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_domain_list_delete(client, token, status): assert client.delete(api.url_for(DomainsList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_domain_list_patch(client, token, status): - assert client.patch(api.url_for(DomainsList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(DomainsList), data={}, headers=token).status_code + == status + ) diff --git a/lemur/tests/test_endpoints.py b/lemur/tests/test_endpoints.py index 4ea0a4aa..af073e53 100644 --- a/lemur/tests/test_endpoints.py +++ b/lemur/tests/test_endpoints.py @@ -4,11 +4,16 @@ from lemur.endpoints.views import * # noqa from lemur.tests.factories import EndpointFactory, CertificateFactory -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) def test_rotate_certificate(client, source_plugin): from lemur.deployment.service import rotate_certificate + new_certificate = CertificateFactory() endpoint = EndpointFactory() @@ -16,91 +21,147 @@ def test_rotate_certificate(client, source_plugin): assert endpoint.certificate == new_certificate -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 404), - (VALID_ADMIN_HEADER_TOKEN, 404), - (VALID_ADMIN_API_TOKEN, 404), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 404), + (VALID_ADMIN_HEADER_TOKEN, 404), + (VALID_ADMIN_API_TOKEN, 404), + ("", 401), + ], +) def test_endpoint_get(client, token, status): - assert client.get(api.url_for(Endpoints, endpoint_id=1), headers=token).status_code == status + assert ( + client.get(api.url_for(Endpoints, endpoint_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_endpoint_post_(client, token, status): - assert client.post(api.url_for(Endpoints, endpoint_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(Endpoints, endpoint_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_endpoint_put(client, token, status): - assert client.put(api.url_for(Endpoints, endpoint_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(Endpoints, endpoint_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_endpoint_delete(client, token, status): - assert client.delete(api.url_for(Endpoints, endpoint_id=1), headers=token).status_code == status + assert ( + client.delete(api.url_for(Endpoints, endpoint_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_endpoint_patch(client, token, status): - assert client.patch(api.url_for(Endpoints, endpoint_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(Endpoints, endpoint_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_endpoint_list_post_(client, token, status): - assert client.post(api.url_for(EndpointsList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(EndpointsList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_endpoint_list_get(client, token, status): assert client.get(api.url_for(EndpointsList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_endpoint_list_delete(client, token, status): - assert client.delete(api.url_for(EndpointsList), headers=token).status_code == status + assert ( + client.delete(api.url_for(EndpointsList), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_endpoint_list_patch(client, token, status): - assert client.patch(api.url_for(EndpointsList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(EndpointsList), data={}, headers=token).status_code + == status + ) diff --git a/lemur/tests/test_ldap.py b/lemur/tests/test_ldap.py index a636afdc..8e4027a9 100644 --- a/lemur/tests/test_ldap.py +++ b/lemur/tests/test_ldap.py @@ -1,51 +1,69 @@ import pytest -from lemur.auth.ldap import * # noqa +from lemur.auth.ldap import * # noqa from mock import patch, MagicMock class LdapPrincipalTester(LdapPrincipal): - def __init__(self, args): super().__init__(args) - self.ldap_server = 'ldap://localhost' + self.ldap_server = "ldap://localhost" def bind_test(self): - groups = [('user', {'memberOf': ['CN=Lemur Access,OU=Groups,DC=example,DC=com'.encode('utf-8'), - 'CN=Pen Pushers,OU=Groups,DC=example,DC=com'.encode('utf-8')]})] + groups = [ + ( + "user", + { + "memberOf": [ + "CN=Lemur Access,OU=Groups,DC=example,DC=com".encode("utf-8"), + "CN=Pen Pushers,OU=Groups,DC=example,DC=com".encode("utf-8"), + ] + }, + ) + ] self.ldap_client = MagicMock() self.ldap_client.search_s.return_value = groups self._bind() def authorize_test_groups_to_roles_admin(self): - self.ldap_groups = ''.join(['CN=Pen Pushers,OU=Groups,DC=example,DC=com', - 'CN=Lemur Admins,OU=Groups,DC=example,DC=com', - 'CN=Lemur Read Only,OU=Groups,DC=example,DC=com']) + self.ldap_groups = "".join( + [ + "CN=Pen Pushers,OU=Groups,DC=example,DC=com", + "CN=Lemur Admins,OU=Groups,DC=example,DC=com", + "CN=Lemur Read Only,OU=Groups,DC=example,DC=com", + ] + ) self.ldap_required_group = None - self.ldap_groups_to_roles = {'Lemur Admins': 'admin', 'Lemur Read Only': 'read-only'} + self.ldap_groups_to_roles = { + "Lemur Admins": "admin", + "Lemur Read Only": "read-only", + } return self._authorize() def authorize_test_required_group(self, group): - self.ldap_groups = ''.join(['CN=Lemur Access,OU=Groups,DC=example,DC=com', - 'CN=Pen Pushers,OU=Groups,DC=example,DC=com']) + self.ldap_groups = "".join( + [ + "CN=Lemur Access,OU=Groups,DC=example,DC=com", + "CN=Pen Pushers,OU=Groups,DC=example,DC=com", + ] + ) self.ldap_required_group = group return self._authorize() @pytest.fixture() def principal(session): - args = {'username': 'user', 'password': 'p4ssw0rd'} + args = {"username": "user", "password": "p4ssw0rd"} yield LdapPrincipalTester(args) class TestLdapPrincipal: - - @patch('ldap.initialize') + @patch("ldap.initialize") def test_bind(self, app, principal): self.test_ldap_user = principal self.test_ldap_user.bind_test() - group = 'Pen Pushers' + group = "Pen Pushers" assert group in self.test_ldap_user.ldap_groups - assert self.test_ldap_user.ldap_principal == 'user@example.com' + assert self.test_ldap_user.ldap_principal == "user@example.com" def test_authorize_groups_to_roles_admin(self, app, principal): self.test_ldap_user = principal @@ -54,11 +72,11 @@ class TestLdapPrincipal: def test_authorize_required_group_missing(self, app, principal): self.test_ldap_user = principal - roles = self.test_ldap_user.authorize_test_required_group('Not Allowed') + roles = self.test_ldap_user.authorize_test_required_group("Not Allowed") assert not roles def test_authorize_required_group_access(self, session, principal): self.test_ldap_user = principal - roles = self.test_ldap_user.authorize_test_required_group('Lemur Access') + roles = self.test_ldap_user.authorize_test_required_group("Lemur Access") assert len(roles) >= 1 assert any(x.name == "user@example.com" for x in roles) diff --git a/lemur/tests/test_logs.py b/lemur/tests/test_logs.py index 516f5bb7..6705ffca 100644 --- a/lemur/tests/test_logs.py +++ b/lemur/tests/test_logs.py @@ -1,21 +1,32 @@ import pytest -from lemur.tests.vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from lemur.tests.vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) from lemur.logs.views import * # noqa def test_private_key_audit(client, certificate): from lemur.certificates.views import CertificatePrivateKey, api + assert len(certificate.logs) == 0 - client.get(api.url_for(CertificatePrivateKey, certificate_id=certificate.id), headers=VALID_ADMIN_HEADER_TOKEN) + client.get( + api.url_for(CertificatePrivateKey, certificate_id=certificate.id), + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert len(certificate.logs) == 1 -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_get_logs(client, token, status): assert client.get(api.url_for(LogsList), headers=token).status_code == status diff --git a/lemur/tests/test_messaging.py b/lemur/tests/test_messaging.py index fc0e62da..98e9ebf3 100644 --- a/lemur/tests/test_messaging.py +++ b/lemur/tests/test_messaging.py @@ -8,14 +8,21 @@ from moto import mock_ses def test_needs_notification(app, certificate, notification): from lemur.notifications.messaging import needs_notification + assert not needs_notification(certificate) with pytest.raises(Exception): - notification.options = [{'name': 'interval', 'value': 10}, {'name': 'unit', 'value': 'min'}] + notification.options = [ + {"name": "interval", "value": 10}, + {"name": "unit", "value": "min"}, + ] certificate.notifications.append(notification) needs_notification(certificate) - certificate.notifications[0].options = [{'name': 'interval', 'value': 10}, {'name': 'unit', 'value': 'days'}] + certificate.notifications[0].options = [ + {"name": "interval", "value": 10}, + {"name": "unit", "value": "days"}, + ] assert not needs_notification(certificate) delta = certificate.not_after - timedelta(days=10) @@ -30,7 +37,8 @@ def test_get_certificates(app, certificate, notification): delta = certificate.not_after - timedelta(days=2) notification.options = [ - {'name': 'interval', 'value': 2}, {'name': 'unit', 'value': 'days'} + {"name": "interval", "value": 2}, + {"name": "unit", "value": "days"}, ] with freeze_time(delta.datetime): @@ -55,11 +63,16 @@ def test_get_eligible_certificates(app, certificate, notification): from lemur.notifications.messaging import get_eligible_certificates certificate.notifications.append(notification) - certificate.notifications[0].options = [{'name': 'interval', 'value': 10}, {'name': 'unit', 'value': 'days'}] + certificate.notifications[0].options = [ + {"name": "interval", "value": 10}, + {"name": "unit", "value": "days"}, + ] delta = certificate.not_after - timedelta(days=10) with freeze_time(delta.datetime): - assert get_eligible_certificates() == {certificate.owner: {notification.label: [(notification, certificate)]}} + assert get_eligible_certificates() == { + certificate.owner: {notification.label: [(notification, certificate)]} + } @mock_ses @@ -67,7 +80,10 @@ def test_send_expiration_notification(certificate, notification, notification_pl from lemur.notifications.messaging import send_expiration_notifications certificate.notifications.append(notification) - certificate.notifications[0].options = [{'name': 'interval', 'value': 10}, {'name': 'unit', 'value': 'days'}] + certificate.notifications[0].options = [ + {"name": "interval", "value": 10}, + {"name": "unit", "value": "days"}, + ] delta = certificate.not_after - timedelta(days=10) with freeze_time(delta.datetime): @@ -75,7 +91,9 @@ def test_send_expiration_notification(certificate, notification, notification_pl @mock_ses -def test_send_expiration_notification_with_no_notifications(certificate, notification, notification_plugin): +def test_send_expiration_notification_with_no_notifications( + certificate, notification, notification_plugin +): from lemur.notifications.messaging import send_expiration_notifications delta = certificate.not_after - timedelta(days=10) @@ -86,4 +104,5 @@ def test_send_expiration_notification_with_no_notifications(certificate, notific @mock_ses def test_send_rotation_notification(notification_plugin, certificate): from lemur.notifications.messaging import send_rotation_notification + send_rotation_notification(certificate, notification_plugin=notification_plugin) diff --git a/lemur/tests/test_missing.py b/lemur/tests/test_missing.py index 4f2c20c6..be615ced 100644 --- a/lemur/tests/test_missing.py +++ b/lemur/tests/test_missing.py @@ -9,9 +9,12 @@ def test_convert_validity_years(session): with freeze_time("2016-01-01"): data = convert_validity_years(dict(validity_years=2)) - assert data['validity_start'] == arrow.utcnow().isoformat() - assert data['validity_end'] == arrow.utcnow().replace(years=+2).isoformat() + assert data["validity_start"] == arrow.utcnow().isoformat() + assert data["validity_end"] == arrow.utcnow().replace(years=+2).isoformat() with freeze_time("2015-01-10"): data = convert_validity_years(dict(validity_years=1)) - assert data['validity_end'] == arrow.utcnow().replace(years=+1, days=-2).isoformat() + assert ( + data["validity_end"] + == arrow.utcnow().replace(years=+1, days=-2).isoformat() + ) diff --git a/lemur/tests/test_notifications.py b/lemur/tests/test_notifications.py index 6daee0a8..20079f97 100644 --- a/lemur/tests/test_notifications.py +++ b/lemur/tests/test_notifications.py @@ -3,20 +3,22 @@ import pytest from lemur.notifications.views import * # noqa -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) def test_notification_input_schema(client, notification_plugin, notification): from lemur.notifications.schemas import NotificationInputSchema input_data = { - 'label': 'notification1', - 'options': {}, - 'description': 'my notification', - 'active': True, - 'plugin': { - 'slug': 'test-notification' - } + "label": "notification1", + "options": {}, + "description": "my notification", + "active": True, + "plugin": {"slug": "test-notification"}, } data, errors = NotificationInputSchema().load(input_data) @@ -24,91 +26,156 @@ def test_notification_input_schema(client, notification_plugin, notification): assert not errors -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_notification_get(client, notification_plugin, notification, token, status): - assert client.get(api.url_for(Notifications, notification_id=notification.id), headers=token).status_code == status + assert ( + client.get( + api.url_for(Notifications, notification_id=notification.id), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_notification_post_(client, token, status): - assert client.post(api.url_for(Notifications, notification_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(Notifications, notification_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_notification_put(client, token, status): - assert client.put(api.url_for(Notifications, notification_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(Notifications, notification_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_notification_delete(client, token, status): - assert client.delete(api.url_for(Notifications, notification_id=1), headers=token).status_code == status + assert ( + client.delete( + api.url_for(Notifications, notification_id=1), headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_notification_patch(client, token, status): - assert client.patch(api.url_for(Notifications, notification_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(Notifications, notification_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_notification_list_post_(client, token, status): - assert client.post(api.url_for(NotificationsList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(NotificationsList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) -def test_notification_list_get(client, notification_plugin, notification, token, status): - assert client.get(api.url_for(NotificationsList), headers=token).status_code == status +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) +def test_notification_list_get( + client, notification_plugin, notification, token, status +): + assert ( + client.get(api.url_for(NotificationsList), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_notification_list_delete(client, token, status): - assert client.delete(api.url_for(NotificationsList), headers=token).status_code == status + assert ( + client.delete(api.url_for(NotificationsList), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_notification_list_patch(client, token, status): - assert client.patch(api.url_for(NotificationsList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(NotificationsList), data={}, headers=token).status_code + == status + ) diff --git a/lemur/tests/test_pending_certificates.py b/lemur/tests/test_pending_certificates.py index 043002d3..3e755574 100644 --- a/lemur/tests/test_pending_certificates.py +++ b/lemur/tests/test_pending_certificates.py @@ -4,12 +4,19 @@ import pytest from marshmallow import ValidationError from lemur.pending_certificates.views import * # noqa -from .vectors import CSR_STR, INTERMEDIATE_CERT_STR, VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, \ - VALID_USER_HEADER_TOKEN, WILDCARD_CERT_STR +from .vectors import ( + CSR_STR, + INTERMEDIATE_CERT_STR, + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, + WILDCARD_CERT_STR, +) def test_increment_attempt(pending_certificate): from lemur.pending_certificates.service import increment_attempt + initial_attempt = pending_certificate.number_attempts attempts = increment_attempt(pending_certificate) assert attempts == initial_attempt + 1 @@ -17,50 +24,66 @@ def test_increment_attempt(pending_certificate): def test_create_pending_certificate(async_issuer_plugin, async_authority, user): from lemur.certificates.service import create - pending_cert = create(authority=async_authority, csr=CSR_STR, owner='joe@example.com', creator=user['user'], - common_name='ACommonName') - assert pending_cert.external_id == '12345' + + pending_cert = create( + authority=async_authority, + csr=CSR_STR, + owner="joe@example.com", + creator=user["user"], + common_name="ACommonName", + ) + assert pending_cert.external_id == "12345" def test_create_pending(pending_certificate, user, session): import copy from lemur.pending_certificates.service import create_certificate, get - cert = {'body': WILDCARD_CERT_STR, - 'chain': INTERMEDIATE_CERT_STR, - 'external_id': '54321'} + + cert = { + "body": WILDCARD_CERT_STR, + "chain": INTERMEDIATE_CERT_STR, + "external_id": "54321", + } # Weird copy because the session behavior. pending_certificate is a valid object but the # return of vars(pending_certificate) is a sessionobject, and so nothing from the pending_cert # is used to create the certificate. Maybe a bug due to using vars(), and should copy every # field explicitly. pending_certificate = copy.copy(get(pending_certificate.id)) - real_cert = create_certificate(pending_certificate, cert, user['user']) + real_cert = create_certificate(pending_certificate, cert, user["user"]) assert real_cert.owner == pending_certificate.owner assert real_cert.notify == pending_certificate.notify assert real_cert.private_key == pending_certificate.private_key - assert real_cert.external_id == '54321' + assert real_cert.external_id == "54321" -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 204), - (VALID_ADMIN_API_TOKEN, 204), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 204), + (VALID_ADMIN_API_TOKEN, 204), + ("", 401), + ], +) def test_pending_cancel(client, pending_certificate, token, status): - assert client.delete(api.url_for(PendingCertificates, pending_certificate_id=pending_certificate.id), - data=json.dumps({'note': "unit test", 'send_email': False}), - headers=token).status_code == status + assert ( + client.delete( + api.url_for( + PendingCertificates, pending_certificate_id=pending_certificate.id + ), + data=json.dumps({"note": "unit test", "send_email": False}), + headers=token, + ).status_code + == status + ) def test_pending_upload(pending_certificate_from_full_chain_ca): from lemur.pending_certificates.service import upload from lemur.certificates.service import get - cert = {'body': WILDCARD_CERT_STR, - 'chain': None, - 'external_id': None - } + cert = {"body": WILDCARD_CERT_STR, "chain": None, "external_id": None} pending_cert = upload(pending_certificate_from_full_chain_ca.id, **cert) assert pending_cert.resolved @@ -71,9 +94,10 @@ def test_pending_upload_with_chain(pending_certificate_from_partial_chain_ca): from lemur.pending_certificates.service import upload from lemur.certificates.service import get - cert = {'body': WILDCARD_CERT_STR, - 'chain': INTERMEDIATE_CERT_STR, - 'external_id': None + cert = { + "body": WILDCARD_CERT_STR, + "chain": INTERMEDIATE_CERT_STR, + "external_id": None, } pending_cert = upload(pending_certificate_from_partial_chain_ca.id, **cert) @@ -84,11 +108,9 @@ def test_pending_upload_with_chain(pending_certificate_from_partial_chain_ca): def test_invalid_pending_upload_with_chain(pending_certificate_from_partial_chain_ca): from lemur.pending_certificates.service import upload - cert = {'body': WILDCARD_CERT_STR, - 'chain': None, - 'external_id': None - } + cert = {"body": WILDCARD_CERT_STR, "chain": None, "external_id": None} with pytest.raises(ValidationError) as err: upload(pending_certificate_from_partial_chain_ca.id, **cert) assert str(err.value).startswith( - 'Incorrect chain certificate(s) provided: \'*.wild.example.org\' is not signed by \'LemurTrust Unittests Root CA 2018') + "Incorrect chain certificate(s) provided: '*.wild.example.org' is not signed by 'LemurTrust Unittests Root CA 2018" + ) diff --git a/lemur/tests/test_roles.py b/lemur/tests/test_roles.py index e5483e00..6e612062 100644 --- a/lemur/tests/test_roles.py +++ b/lemur/tests/test_roles.py @@ -3,16 +3,23 @@ import json import pytest from lemur.roles.views import * # noqa -from lemur.tests.factories import RoleFactory, AuthorityFactory, CertificateFactory, UserFactory -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from lemur.tests.factories import ( + RoleFactory, + AuthorityFactory, + CertificateFactory, + UserFactory, +) +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) def test_role_input_schema(client): from lemur.roles.schemas import RoleInputSchema - input_data = { - 'name': 'myRole' - } + input_data = {"name": "myRole"} data, errors = RoleInputSchema().load(input_data) @@ -38,60 +45,80 @@ def test_multiple_authority_certificate_association(session, client): assert role.certificates[1].name == certificate1.name -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_role_get(client, token, status): - assert client.get(api.url_for(Roles, role_id=1), headers=token).status_code == status + assert ( + client.get(api.url_for(Roles, role_id=1), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_role_post_(client, token, status): - assert client.post(api.url_for(Roles, role_id=1), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(Roles, role_id=1), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 400), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 400), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_role_put(client, token, status): - assert client.put(api.url_for(Roles, role_id=1), data={}, headers=token).status_code == status + assert ( + client.put(api.url_for(Roles, role_id=1), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_role_put_with_data(client, session, token, status): user = UserFactory() role = RoleFactory() session.commit() - data = { - 'users': [ - {'id': user.id} - ], - 'id': role.id, - 'name': role.name - } + data = {"users": [{"id": user.id}], "id": role.id, "name": role.name} - assert client.put(api.url_for(Roles, role_id=role.id), data=json.dumps(data), headers=token).status_code == status + assert ( + client.put( + api.url_for(Roles, role_id=role.id), data=json.dumps(data), headers=token + ).status_code + == status + ) def test_role_put_with_data_and_user(client, session): from lemur.auth.service import create_token + user = UserFactory() role = RoleFactory(users=[user]) role1 = RoleFactory() @@ -99,83 +126,119 @@ def test_role_put_with_data_and_user(client, session): session.commit() headers = { - 'Authorization': 'Basic ' + create_token(user), - 'Content-Type': 'application/json' + "Authorization": "Basic " + create_token(user), + "Content-Type": "application/json", } data = { - 'users': [ - {'id': user1.id}, - {'id': user.id} - ], - 'id': role.id, - 'name': role.name + "users": [{"id": user1.id}, {"id": user.id}], + "id": role.id, + "name": role.name, } - assert client.put(api.url_for(Roles, role_id=role.id), data=json.dumps(data), headers=headers).status_code == 200 - assert client.get(api.url_for(RolesList), data={}, headers=headers).json['total'] > 1 + assert ( + client.put( + api.url_for(Roles, role_id=role.id), data=json.dumps(data), headers=headers + ).status_code + == 200 + ) + assert ( + client.get(api.url_for(RolesList), data={}, headers=headers).json["total"] > 1 + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_role_delete(client, token, status, role): - assert client.delete(api.url_for(Roles, role_id=role.id), headers=token).status_code == status + assert ( + client.delete(api.url_for(Roles, role_id=role.id), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_role_patch(client, token, status): - assert client.patch(api.url_for(Roles, role_id=1), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(Roles, role_id=1), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_role_list_post_(client, token, status): - assert client.post(api.url_for(RolesList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(RolesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_role_list_get(client, token, status): assert client.get(api.url_for(RolesList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_role_list_delete(client, token, status): assert client.delete(api.url_for(RolesList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_role_list_patch(client, token, status): - assert client.patch(api.url_for(RolesList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(RolesList), data={}, headers=token).status_code + == status + ) def test_sensitive_filter(client): - resp = client.get(api.url_for(RolesList) + '?filter=password;a', headers=VALID_ADMIN_HEADER_TOKEN) - assert "'password' is not sortable or filterable" in resp.json['message'] + resp = client.get( + api.url_for(RolesList) + "?filter=password;a", headers=VALID_ADMIN_HEADER_TOKEN + ) + assert "'password' is not sortable or filterable" in resp.json["message"] diff --git a/lemur/tests/test_schemas.py b/lemur/tests/test_schemas.py index e2a05213..2c085849 100644 --- a/lemur/tests/test_schemas.py +++ b/lemur/tests/test_schemas.py @@ -14,15 +14,15 @@ def test_get_object_attribute(): get_object_attribute([{}], many=True) with pytest.raises(ValidationError): - get_object_attribute([{}, {'id': 1}], many=True) + get_object_attribute([{}, {"id": 1}], many=True) with pytest.raises(ValidationError): - get_object_attribute([{}, {'name': 'test'}], many=True) + get_object_attribute([{}, {"name": "test"}], many=True) - assert get_object_attribute({'name': 'test'}) == 'name' - assert get_object_attribute({'id': 1}) == 'id' - assert get_object_attribute([{'name': 'test'}], many=True) == 'name' - assert get_object_attribute([{'id': 1}], many=True) == 'id' + assert get_object_attribute({"name": "test"}) == "name" + assert get_object_attribute({"id": 1}) == "id" + assert get_object_attribute([{"name": "test"}], many=True) == "name" + assert get_object_attribute([{"id": 1}], many=True) == "id" def test_fetch_objects(session): @@ -33,26 +33,26 @@ def test_fetch_objects(session): role1 = RoleFactory() session.commit() - data = {'id': role.id} + data = {"id": role.id} found_role = fetch_objects(Role, data) assert found_role == role - data = {'name': role.name} + data = {"name": role.name} found_role = fetch_objects(Role, data) assert found_role == role - data = [{'id': role.id}, {'id': role1.id}] + data = [{"id": role.id}, {"id": role1.id}] found_roles = fetch_objects(Role, data, many=True) assert found_roles == [role, role1] - data = [{'name': role.name}, {'name': role1.name}] + data = [{"name": role.name}, {"name": role1.name}] found_roles = fetch_objects(Role, data, many=True) assert found_roles == [role, role1] with pytest.raises(ValidationError): - data = [{'name': 'blah'}, {'name': role1.name}] + data = [{"name": "blah"}, {"name": role1.name}] fetch_objects(Role, data, many=True) with pytest.raises(ValidationError): - data = {'name': 'nah'} + data = {"name": "nah"} fetch_objects(Role, data) diff --git a/lemur/tests/test_sources.py b/lemur/tests/test_sources.py index 1ce0d9ba..312c008f 100644 --- a/lemur/tests/test_sources.py +++ b/lemur/tests/test_sources.py @@ -2,17 +2,22 @@ import pytest from lemur.sources.views import * # noqa -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN, WILDCARD_CERT_STR, \ - WILDCARD_CERT_KEY +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, + WILDCARD_CERT_STR, + WILDCARD_CERT_KEY, +) def validate_source_schema(client): from lemur.sources.schemas import SourceInputSchema input_data = { - 'label': 'exampleSource', - 'options': {}, - 'plugin': {'slug': 'aws-source'} + "label": "exampleSource", + "options": {}, + "plugin": {"slug": "aws-source"}, } data, errors = SourceInputSchema().load(input_data) @@ -26,111 +31,171 @@ def test_create_certificate(user, source): certificate_create({}, source) data = { - 'body': WILDCARD_CERT_STR, - 'private_key': WILDCARD_CERT_KEY, - 'owner': 'bob@example.com', - 'creator': user['user'] + "body": WILDCARD_CERT_STR, + "private_key": WILDCARD_CERT_KEY, + "owner": "bob@example.com", + "creator": user["user"], } cert = certificate_create(data, source) assert cert.notifications -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 404), - (VALID_ADMIN_HEADER_TOKEN, 404), - (VALID_ADMIN_API_TOKEN, 404), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 404), + (VALID_ADMIN_HEADER_TOKEN, 404), + (VALID_ADMIN_API_TOKEN, 404), + ("", 401), + ], +) def test_source_get(client, source_plugin, token, status): - assert client.get(api.url_for(Sources, source_id=43543), headers=token).status_code == status + assert ( + client.get(api.url_for(Sources, source_id=43543), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_source_post_(client, token, status): - assert client.post(api.url_for(Sources, source_id=1), data={}, headers=token).status_code == status + assert ( + client.post( + api.url_for(Sources, source_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_source_put(client, token, status): - assert client.put(api.url_for(Sources, source_id=1), data={}, headers=token).status_code == status + assert ( + client.put( + api.url_for(Sources, source_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_source_delete(client, token, status): - assert client.delete(api.url_for(Sources, source_id=1), headers=token).status_code == status + assert ( + client.delete(api.url_for(Sources, source_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_source_patch(client, token, status): - assert client.patch(api.url_for(Sources, source_id=1), data={}, headers=token).status_code == status + assert ( + client.patch( + api.url_for(Sources, source_id=1), data={}, headers=token + ).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_sources_list_get(client, source_plugin, token, status): assert client.get(api.url_for(SourcesList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_sources_list_post(client, token, status): - assert client.post(api.url_for(SourcesList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(SourcesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_sources_list_put(client, token, status): - assert client.put(api.url_for(SourcesList), data={}, headers=token).status_code == status + assert ( + client.put(api.url_for(SourcesList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_sources_list_delete(client, token, status): assert client.delete(api.url_for(SourcesList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_sources_list_patch(client, token, status): - assert client.patch(api.url_for(SourcesList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(SourcesList), data={}, headers=token).status_code + == status + ) diff --git a/lemur/tests/test_users.py b/lemur/tests/test_users.py index 61db93bf..9e67f868 100644 --- a/lemur/tests/test_users.py +++ b/lemur/tests/test_users.py @@ -4,16 +4,20 @@ import pytest from lemur.tests.factories import UserFactory, RoleFactory from lemur.users.views import * # noqa -from .vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN +from .vectors import ( + VALID_ADMIN_API_TOKEN, + VALID_ADMIN_HEADER_TOKEN, + VALID_USER_HEADER_TOKEN, +) def test_user_input_schema(client): from lemur.users.schemas import UserInputSchema input_data = { - 'username': 'example', - 'password': '1233432', - 'email': 'example@example.com' + "username": "example", + "password": "1233432", + "email": "example@example.com", } data, errors = UserInputSchema().load(input_data) @@ -21,104 +25,156 @@ def test_user_input_schema(client): assert not errors -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_user_get(client, token, status): - assert client.get(api.url_for(Users, user_id=1), headers=token).status_code == status + assert ( + client.get(api.url_for(Users, user_id=1), headers=token).status_code == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_post_(client, token, status): - assert client.post(api.url_for(Users, user_id=1), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(Users, user_id=1), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_user_put(client, token, status): - assert client.put(api.url_for(Users, user_id=1), data={}, headers=token).status_code == status + assert ( + client.put(api.url_for(Users, user_id=1), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_delete(client, token, status): - assert client.delete(api.url_for(Users, user_id=1), headers=token).status_code == status + assert ( + client.delete(api.url_for(Users, user_id=1), headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_patch(client, token, status): - assert client.patch(api.url_for(Users, user_id=1), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(Users, user_id=1), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 403), - (VALID_ADMIN_HEADER_TOKEN, 400), - (VALID_ADMIN_API_TOKEN, 400), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 403), + (VALID_ADMIN_HEADER_TOKEN, 400), + (VALID_ADMIN_API_TOKEN, 400), + ("", 401), + ], +) def test_user_list_post_(client, token, status): - assert client.post(api.url_for(UsersList), data={}, headers=token).status_code == status + assert ( + client.post(api.url_for(UsersList), data={}, headers=token).status_code + == status + ) -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 200), - (VALID_ADMIN_HEADER_TOKEN, 200), - (VALID_ADMIN_API_TOKEN, 200), - ('', 401) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 200), + (VALID_ADMIN_HEADER_TOKEN, 200), + (VALID_ADMIN_API_TOKEN, 200), + ("", 401), + ], +) def test_user_list_get(client, token, status): assert client.get(api.url_for(UsersList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_list_delete(client, token, status): assert client.delete(api.url_for(UsersList), headers=token).status_code == status -@pytest.mark.parametrize("token,status", [ - (VALID_USER_HEADER_TOKEN, 405), - (VALID_ADMIN_HEADER_TOKEN, 405), - (VALID_ADMIN_API_TOKEN, 405), - ('', 405) -]) +@pytest.mark.parametrize( + "token,status", + [ + (VALID_USER_HEADER_TOKEN, 405), + (VALID_ADMIN_HEADER_TOKEN, 405), + (VALID_ADMIN_API_TOKEN, 405), + ("", 405), + ], +) def test_user_list_patch(client, token, status): - assert client.patch(api.url_for(UsersList), data={}, headers=token).status_code == status + assert ( + client.patch(api.url_for(UsersList), data={}, headers=token).status_code + == status + ) def test_sensitive_filter(client): - resp = client.get(api.url_for(UsersList) + '?filter=password;a', headers=VALID_ADMIN_HEADER_TOKEN) - assert "'password' is not sortable or filterable" in resp.json['message'] + resp = client.get( + api.url_for(UsersList) + "?filter=password;a", headers=VALID_ADMIN_HEADER_TOKEN + ) + assert "'password' is not sortable or filterable" in resp.json["message"] def test_sensitive_sort(client): - resp = client.get(api.url_for(UsersList) + '?sortBy=password&sortDir=asc', headers=VALID_ADMIN_HEADER_TOKEN) - assert "'password' is not sortable or filterable" in resp.json['message'] + resp = client.get( + api.url_for(UsersList) + "?sortBy=password&sortDir=asc", + headers=VALID_ADMIN_HEADER_TOKEN, + ) + assert "'password' is not sortable or filterable" in resp.json["message"] def test_user_role_changes(client, session): @@ -128,25 +184,30 @@ def test_user_role_changes(client, session): session.flush() data = { - 'active': True, - 'id': user.id, - 'username': user.username, - 'email': user.email, - 'roles': [ - {'id': role1.id}, - {'id': role2.id}, - ], + "active": True, + "id": user.id, + "username": user.username, + "email": user.email, + "roles": [{"id": role1.id}, {"id": role2.id}], } # PUT two roles - resp = client.put(api.url_for(Users, user_id=user.id), data=json.dumps(data), headers=VALID_ADMIN_HEADER_TOKEN) + resp = client.put( + api.url_for(Users, user_id=user.id), + data=json.dumps(data), + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert resp.status_code == 200 - assert len(resp.json['roles']) == 2 + assert len(resp.json["roles"]) == 2 assert set(user.roles) == {role1, role2} # Remove one role and PUT again - del data['roles'][1] - resp = client.put(api.url_for(Users, user_id=user.id), data=json.dumps(data), headers=VALID_ADMIN_HEADER_TOKEN) + del data["roles"][1] + resp = client.put( + api.url_for(Users, user_id=user.id), + data=json.dumps(data), + headers=VALID_ADMIN_HEADER_TOKEN, + ) assert resp.status_code == 200 - assert len(resp.json['roles']) == 1 + assert len(resp.json["roles"]) == 1 assert set(user.roles) == {role1} diff --git a/lemur/tests/test_utils.py b/lemur/tests/test_utils.py index 74c11643..2e117d25 100644 --- a/lemur/tests/test_utils.py +++ b/lemur/tests/test_utils.py @@ -1,40 +1,49 @@ import pytest -from lemur.tests.vectors import SAN_CERT, INTERMEDIATE_CERT, ROOTCA_CERT, EC_CERT_EXAMPLE, ECDSA_PRIME256V1_CERT, ECDSA_SECP384r1_CERT, DSA_CERT +from lemur.tests.vectors import ( + SAN_CERT, + INTERMEDIATE_CERT, + ROOTCA_CERT, + EC_CERT_EXAMPLE, + ECDSA_PRIME256V1_CERT, + ECDSA_SECP384r1_CERT, + DSA_CERT, +) def test_generate_private_key(): from lemur.common.utils import generate_private_key - assert generate_private_key('RSA2048') - assert generate_private_key('RSA4096') - assert generate_private_key('ECCPRIME192V1') - assert generate_private_key('ECCPRIME256V1') - assert generate_private_key('ECCSECP192R1') - assert generate_private_key('ECCSECP224R1') - assert generate_private_key('ECCSECP256R1') - assert generate_private_key('ECCSECP384R1') - assert generate_private_key('ECCSECP521R1') - assert generate_private_key('ECCSECP256K1') - assert generate_private_key('ECCSECT163K1') - assert generate_private_key('ECCSECT233K1') - assert generate_private_key('ECCSECT283K1') - assert generate_private_key('ECCSECT409K1') - assert generate_private_key('ECCSECT571K1') - assert generate_private_key('ECCSECT163R2') - assert generate_private_key('ECCSECT233R1') - assert generate_private_key('ECCSECT283R1') - assert generate_private_key('ECCSECT409R1') - assert generate_private_key('ECCSECT571R2') + assert generate_private_key("RSA2048") + assert generate_private_key("RSA4096") + assert generate_private_key("ECCPRIME192V1") + assert generate_private_key("ECCPRIME256V1") + assert generate_private_key("ECCSECP192R1") + assert generate_private_key("ECCSECP224R1") + assert generate_private_key("ECCSECP256R1") + assert generate_private_key("ECCSECP384R1") + assert generate_private_key("ECCSECP521R1") + assert generate_private_key("ECCSECP256K1") + assert generate_private_key("ECCSECT163K1") + assert generate_private_key("ECCSECT233K1") + assert generate_private_key("ECCSECT283K1") + assert generate_private_key("ECCSECT409K1") + assert generate_private_key("ECCSECT571K1") + assert generate_private_key("ECCSECT163R2") + assert generate_private_key("ECCSECT233R1") + assert generate_private_key("ECCSECT283R1") + assert generate_private_key("ECCSECT409R1") + assert generate_private_key("ECCSECT571R2") with pytest.raises(Exception): - generate_private_key('LEMUR') + generate_private_key("LEMUR") def test_get_authority_key(): - '''test get authority key function''' + """test get authority key function""" from lemur.common.utils import get_authority_key - test_cert = '''-----BEGIN CERTIFICATE----- + + test_cert = """-----BEGIN CERTIFICATE----- MIIGYjCCBEqgAwIBAgIUVS7mn6LR5XlQyEGxQ4w9YAWL/XIwDQYJKoZIhvcNAQEN BQAweTELMAkGA1UEBhMCREUxDTALBgNVBAgTBEJvbm4xEDAOBgNVBAcTB0dlcm1h bnkxITAfBgNVBAoTGFRlbGVrb20gRGV1dHNjaGxhbmQgR21iSDELMAkGA1UECxMC @@ -70,9 +79,9 @@ zc75IDsn5wP6A3KflduWW7ri0bYUiKe5higMcbUM0aXzTEAVxsxPk8aEsR9dazF7 y4L/msew3UjFE3ovDHgStjWM1NBMxuIvJEbWOsiB2WA2l3FiT8HvFi0eX/0hbkGi 5LL+oz7nvm9Of7te/BV6Rq0rXWN4d6asO+QlLkTqbmAH6rwunmPCY7MbLXXtP/qM KFfxwrO1 ------END CERTIFICATE-----''' +-----END CERTIFICATE-----""" authority_key = get_authority_key(test_cert) - assert authority_key == 'feacb541be81771293affa412d8dc9f66a3ebb80' + assert authority_key == "feacb541be81771293affa412d8dc9f66a3ebb80" def test_is_selfsigned(selfsigned_cert): diff --git a/lemur/tests/test_validators.py b/lemur/tests/test_validators.py index c3d5357d..77148079 100644 --- a/lemur/tests/test_validators.py +++ b/lemur/tests/test_validators.py @@ -12,7 +12,7 @@ def test_private_key(session): parse_private_key(SAN_CERT_KEY) with pytest.raises(ValueError): - parse_private_key('invalid_private_key') + parse_private_key("invalid_private_key") def test_validate_private_key(session): @@ -29,7 +29,7 @@ def test_sub_alt_type(session): from lemur.common.validators import sub_alt_type with pytest.raises(ValidationError): - sub_alt_type('CNAME') + sub_alt_type("CNAME") def test_dates(session): @@ -44,7 +44,13 @@ def test_dates(session): dates(dict(validity_end=datetime(2016, 1, 1))) with pytest.raises(ValidationError): - dates(dict(validity_start=datetime(2016, 1, 5), validity_end=datetime(2016, 1, 1))) + dates( + dict(validity_start=datetime(2016, 1, 5), validity_end=datetime(2016, 1, 1)) + ) with pytest.raises(ValidationError): - dates(dict(validity_start=datetime(2016, 1, 1), validity_end=datetime(2016, 1, 10))) + dates( + dict( + validity_start=datetime(2016, 1, 1), validity_end=datetime(2016, 1, 10) + ) + ) diff --git a/lemur/tests/test_verify.py b/lemur/tests/test_verify.py index a1f0f5eb..348f6559 100644 --- a/lemur/tests/test_verify.py +++ b/lemur/tests/test_verify.py @@ -13,20 +13,24 @@ from .vectors import INTERMEDIATE_CERT_STR def test_verify_simple_cert(): """Simple certificate without CRL or OCSP.""" # Verification returns None if there are no means to verify a cert - assert verify_string(INTERMEDIATE_CERT_STR, '') is None + assert verify_string(INTERMEDIATE_CERT_STR, "") is None def test_verify_crl_unknown_scheme(cert_builder, private_key): """Unknown distribution point URI schemes should be ignored.""" - ldap_uri = 'ldap://ldap.example.org/cn=Example%20Certificate%20Authority?certificateRevocationList;binary' - crl_dp = x509.DistributionPoint([UniformResourceIdentifier(ldap_uri)], - relative_name=None, reasons=None, crl_issuer=None) - cert = (cert_builder - .add_extension(x509.CRLDistributionPoints([crl_dp]), critical=False) - .sign(private_key, hashes.SHA256(), default_backend())) + ldap_uri = "ldap://ldap.example.org/cn=Example%20Certificate%20Authority?certificateRevocationList;binary" + crl_dp = x509.DistributionPoint( + [UniformResourceIdentifier(ldap_uri)], + relative_name=None, + reasons=None, + crl_issuer=None, + ) + cert = cert_builder.add_extension( + x509.CRLDistributionPoints([crl_dp]), critical=False + ).sign(private_key, hashes.SHA256(), default_backend()) with mktempfile() as cert_tmp: - with open(cert_tmp, 'wb') as f: + with open(cert_tmp, "wb") as f: f.write(cert.public_bytes(serialization.Encoding.PEM)) # Must not raise exception @@ -35,15 +39,19 @@ def test_verify_crl_unknown_scheme(cert_builder, private_key): def test_verify_crl_unreachable(cert_builder, private_key): """Unreachable CRL distribution point results in error.""" - ldap_uri = 'http://invalid.example.org/crl/foobar.crl' - crl_dp = x509.DistributionPoint([UniformResourceIdentifier(ldap_uri)], - relative_name=None, reasons=None, crl_issuer=None) - cert = (cert_builder - .add_extension(x509.CRLDistributionPoints([crl_dp]), critical=False) - .sign(private_key, hashes.SHA256(), default_backend())) + ldap_uri = "http://invalid.example.org/crl/foobar.crl" + crl_dp = x509.DistributionPoint( + [UniformResourceIdentifier(ldap_uri)], + relative_name=None, + reasons=None, + crl_issuer=None, + ) + cert = cert_builder.add_extension( + x509.CRLDistributionPoints([crl_dp]), critical=False + ).sign(private_key, hashes.SHA256(), default_backend()) with mktempfile() as cert_tmp: - with open(cert_tmp, 'wb') as f: + with open(cert_tmp, "wb") as f: f.write(cert.public_bytes(serialization.Encoding.PEM)) with pytest.raises(Exception, match="Unable to retrieve CRL:"): diff --git a/lemur/tests/vectors.py b/lemur/tests/vectors.py index cb5800a1..0768cdac 100644 --- a/lemur/tests/vectors.py +++ b/lemur/tests/vectors.py @@ -1,20 +1,23 @@ from lemur.common.utils import parse_certificate VALID_USER_HEADER_TOKEN = { - 'Authorization': 'Basic ' + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE1MjE2NTIwMjIsImV4cCI6MjM4NTY1MjAyMiwic3ViIjoxfQ.uK4PZjVAs0gt6_9h2EkYkKd64nFXdOq-rHsJZzeQicc', - 'Content-Type': 'application/json' + "Authorization": "Basic " + + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOjE1MjE2NTIwMjIsImV4cCI6MjM4NTY1MjAyMiwic3ViIjoxfQ.uK4PZjVAs0gt6_9h2EkYkKd64nFXdOq-rHsJZzeQicc", + "Content-Type": "application/json", } VALID_ADMIN_HEADER_TOKEN = { - 'Authorization': 'Basic ' + 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpYXQiOjE1MjE2NTE2NjMsInN1YiI6MiwiYWlkIjoxfQ.wyf5PkQNcggLrMFqxDfzjY-GWPw_XsuWvU2GmQaC5sg', - 'Content-Type': 'application/json' + "Authorization": "Basic " + + "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpYXQiOjE1MjE2NTE2NjMsInN1YiI6MiwiYWlkIjoxfQ.wyf5PkQNcggLrMFqxDfzjY-GWPw_XsuWvU2GmQaC5sg", + "Content-Type": "application/json", } VALID_ADMIN_API_TOKEN = { - 'Authorization': 'Basic ' + 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOjIsImFpZCI6MSwiaWF0IjoxNDM1MjMzMzY5fQ.umW0I_oh4MVZ2qrClzj9SfYnQl6cd0HGzh9EwkDW60I', - 'Content-Type': 'application/json' + "Authorization": "Basic " + + "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOjIsImFpZCI6MSwiaWF0IjoxNDM1MjMzMzY5fQ.umW0I_oh4MVZ2qrClzj9SfYnQl6cd0HGzh9EwkDW60I", + "Content-Type": "application/json", } diff --git a/lemur/users/models.py b/lemur/users/models.py index 79125b9c..d7b900dc 100644 --- a/lemur/users/models.py +++ b/lemur/users/models.py @@ -33,7 +33,7 @@ def hash_password(mapper, connect, target): class User(db.Model): - __tablename__ = 'users' + __tablename__ = "users" id = Column(Integer, primary_key=True) password = Column(String(128)) active = Column(Boolean()) @@ -41,14 +41,24 @@ class User(db.Model): username = Column(String(255), nullable=False, unique=True) email = Column(String(128), unique=True) profile_picture = Column(String(255)) - roles = relationship('Role', secondary=roles_users, passive_deletes=True, backref=db.backref('user'), lazy='dynamic') - certificates = relationship('Certificate', backref=db.backref('user'), lazy='dynamic') - pending_certificates = relationship('PendingCertificate', backref=db.backref('user'), lazy='dynamic') - authorities = relationship('Authority', backref=db.backref('user'), lazy='dynamic') - keys = relationship('ApiKey', backref=db.backref('user'), lazy='dynamic') - logs = relationship('Log', backref=db.backref('user'), lazy='dynamic') + roles = relationship( + "Role", + secondary=roles_users, + passive_deletes=True, + backref=db.backref("user"), + lazy="dynamic", + ) + certificates = relationship( + "Certificate", backref=db.backref("user"), lazy="dynamic" + ) + pending_certificates = relationship( + "PendingCertificate", backref=db.backref("user"), lazy="dynamic" + ) + authorities = relationship("Authority", backref=db.backref("user"), lazy="dynamic") + keys = relationship("ApiKey", backref=db.backref("user"), lazy="dynamic") + logs = relationship("Log", backref=db.backref("user"), lazy="dynamic") - sensitive_fields = ('password',) + sensitive_fields = ("password",) def check_password(self, password): """ @@ -68,7 +78,7 @@ class User(db.Model): :return: """ if self.password: - self.password = bcrypt.generate_password_hash(self.password).decode('utf-8') + self.password = bcrypt.generate_password_hash(self.password).decode("utf-8") @property def is_admin(self): @@ -79,11 +89,11 @@ class User(db.Model): :return: """ for role in self.roles: - if role.name == 'admin': + if role.name == "admin": return True def __repr__(self): return "User(username={username})".format(username=self.username) -listen(User, 'before_insert', hash_password) +listen(User, "before_insert", hash_password) diff --git a/lemur/users/schemas.py b/lemur/users/schemas.py index b5a21127..74bd93e9 100644 --- a/lemur/users/schemas.py +++ b/lemur/users/schemas.py @@ -8,7 +8,11 @@ from marshmallow import fields from lemur.common.schema import LemurInputSchema, LemurOutputSchema -from lemur.schemas import AssociatedRoleSchema, AssociatedCertificateSchema, AssociatedAuthoritySchema +from lemur.schemas import ( + AssociatedRoleSchema, + AssociatedCertificateSchema, + AssociatedAuthoritySchema, +) class UserInputSchema(LemurInputSchema): diff --git a/lemur/users/service.py b/lemur/users/service.py index c6557cb9..8fb91aa3 100644 --- a/lemur/users/service.py +++ b/lemur/users/service.py @@ -96,7 +96,7 @@ def get_by_email(email): :param email: :return: """ - return database.get(User, email, field='email') + return database.get(User, email, field="email") def get_by_username(username): @@ -106,7 +106,7 @@ def get_by_username(username): :param username: :return: """ - return database.get(User, username, field='username') + return database.get(User, username, field="username") def get_all(): @@ -129,10 +129,10 @@ def render(args): """ query = database.session_query(User) - filt = args.pop('filter') + filt = args.pop("filter") if filt: - terms = filt.split(';') + terms = filt.split(";") query = database.filter(query, User, terms) return database.sort_and_page(query, User, args) diff --git a/lemur/users/views.py b/lemur/users/views.py index eb67f014..06729177 100644 --- a/lemur/users/views.py +++ b/lemur/users/views.py @@ -18,15 +18,20 @@ from lemur.users import service from lemur.certificates import service as certificate_service from lemur.roles import service as role_service -from lemur.users.schemas import user_input_schema, user_output_schema, users_output_schema +from lemur.users.schemas import ( + user_input_schema, + user_output_schema, + users_output_schema, +) -mod = Blueprint('users', __name__) +mod = Blueprint("users", __name__) api = Api(mod) class UsersList(AuthenticatedResource): """ Defines the 'users' endpoint """ + def __init__(self): self.reqparse = reqparse.RequestParser() super(UsersList, self).__init__() @@ -83,8 +88,8 @@ class UsersList(AuthenticatedResource): :statuscode 200: no error """ parser = paginated_parser.copy() - parser.add_argument('owner', type=str, location='args') - parser.add_argument('id', type=str, location='args') + parser.add_argument("owner", type=str, location="args") + parser.add_argument("id", type=str, location="args") args = parser.parse_args() return service.render(args) @@ -137,7 +142,14 @@ class UsersList(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 200: no error """ - return service.create(data['username'], data['password'], data['email'], data['active'], None, data['roles']) + return service.create( + data["username"], + data["password"], + data["email"], + data["active"], + None, + data["roles"], + ) class Users(AuthenticatedResource): @@ -225,7 +237,14 @@ class Users(AuthenticatedResource): :reqheader Authorization: OAuth token to authenticate :statuscode 200: no error """ - return service.update(user_id, data['username'], data['email'], data['active'], None, data['roles']) + return service.update( + user_id, + data["username"], + data["email"], + data["active"], + None, + data["roles"], + ) class CertificateUsers(AuthenticatedResource): @@ -365,8 +384,12 @@ class Me(AuthenticatedResource): return g.current_user -api.add_resource(Me, '/auth/me', endpoint='me') -api.add_resource(UsersList, '/users', endpoint='users') -api.add_resource(Users, '/users/', endpoint='user') -api.add_resource(CertificateUsers, '/certificates//creator', endpoint='certificateCreator') -api.add_resource(RoleUsers, '/roles//users', endpoint='roleUsers') +api.add_resource(Me, "/auth/me", endpoint="me") +api.add_resource(UsersList, "/users", endpoint="users") +api.add_resource(Users, "/users/", endpoint="user") +api.add_resource( + CertificateUsers, + "/certificates//creator", + endpoint="certificateCreator", +) +api.add_resource(RoleUsers, "/roles//users", endpoint="roleUsers") diff --git a/lemur/utils.py b/lemur/utils.py index 1661e3f7..909d959a 100644 --- a/lemur/utils.py +++ b/lemur/utils.py @@ -31,7 +31,9 @@ def mktempfile(): @contextmanager def mktemppath(): try: - path = os.path.join(tempfile._get_default_tempdir(), next(tempfile._get_candidate_names())) + path = os.path.join( + tempfile._get_default_tempdir(), next(tempfile._get_candidate_names()) + ) yield path finally: try: @@ -53,7 +55,7 @@ def get_keys(): # when running lemur create_config, this code needs to work despite # the fact that there is not a current_app with a config at that point - keys = current_app.config.get('LEMUR_ENCRYPTION_KEYS', []) + keys = current_app.config.get("LEMUR_ENCRYPTION_KEYS", []) # this function is expected to return a list of keys, but we want # to let people just specify a single key @@ -97,7 +99,7 @@ class Vault(types.TypeDecorator): # ensure bytes for fernet if isinstance(value, str): - value = value.encode('utf-8') + value = value.encode("utf-8") return MultiFernet(self.keys).encrypt(value) @@ -117,4 +119,4 @@ class Vault(types.TypeDecorator): if not value: return - return MultiFernet(self.keys).decrypt(value).decode('utf8') + return MultiFernet(self.keys).decrypt(value).decode("utf8") diff --git a/requirements-dev.txt b/requirements-dev.txt index 1a5b5f9d..bfbadc8a 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -25,12 +25,12 @@ pygments==2.4.0 # via readme-renderer pyyaml==5.1 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine -requests==2.21.0 # via requests-toolbelt, twine +requests==2.22.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit tqdm==4.32.1 # via twine twine==1.13.0 -urllib3==1.24.3 # via requests -virtualenv==16.5.0 # via pre-commit +urllib3==1.25.2 # via requests +virtualenv==16.6.0 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.5.0 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index f23de8f4..bf60d82f 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.147 -botocore==1.12.147 +boto3==1.9.149 +botocore==1.12.149 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -102,5 +102,5 @@ tabulate==0.8.3 twofish==0.3.0 urllib3==1.24.3 vine==1.3.0 -werkzeug==0.15.2 +werkzeug==0.15.4 xmltodict==0.12.0 diff --git a/requirements-tests.in b/requirements-tests.in index dcd3d0c7..d624d4f7 100644 --- a/requirements-tests.in +++ b/requirements-tests.in @@ -1,5 +1,6 @@ # Run `make up-reqs` to update pinned dependencies in requirement text files +black coverage factory-boy Faker diff --git a/requirements-tests.txt b/requirements-tests.txt index 27837359..95ceb652 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -4,19 +4,21 @@ # # pip-compile --no-index --output-file=requirements-tests.txt requirements-tests.in # +appdirs==1.4.3 # via black asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest -attrs==19.1.0 # via pytest +attrs==19.1.0 # via black, pytest aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto -boto3==1.9.147 # via aws-sam-translator, moto +black==19.3b0 +boto3==1.9.149 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.147 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.149 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography -cfn-lint==0.20.1 # via moto +cfn-lint==0.20.2 # via moto chardet==3.0.4 # via requests -click==7.0 # via flask +click==7.0 # via black, flask coverage==4.5.3 cryptography==2.6.1 # via moto docker-pycreds==0.4.0 # via docker @@ -55,15 +57,16 @@ python-jose==3.0.1 # via moto pytz==2019.1 # via moto pyyaml==5.1 requests-mock==1.6.0 -requests==2.21.0 # via cfn-lint, docker, moto, requests-mock, responses +requests==2.22.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 six==1.12.0 # via aws-sam-translator, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client text-unidecode==1.2 # via faker +toml==0.10.0 # via black urllib3==1.24.3 # via botocore, requests wcwidth==0.1.7 # via pytest websocket-client==0.56.0 # via docker -werkzeug==0.15.2 # via flask, moto, pytest-flask +werkzeug==0.15.4 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto diff --git a/requirements.txt b/requirements.txt index 935e85ca..66f4fd40 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.147 -botocore==1.12.147 +boto3==1.9.149 +botocore==1.12.149 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -87,5 +87,5 @@ tabulate==0.8.3 twofish==0.3.0 # via pyjks urllib3==1.24.3 # via botocore, requests vine==1.3.0 # via amqp, celery -werkzeug==0.15.2 # via flask +werkzeug==0.15.4 # via flask xmltodict==0.12.0 From c5ec5fa41f2a3ce74b3877a5ea61119c716e9049 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 16 May 2019 08:13:42 -0700 Subject: [PATCH 220/357] Add bandit to test and pre-commit --- .pre-commit-config.yaml | 11 ++++++++++- requirements-docs.txt | 2 +- requirements-tests.in | 1 + requirements-tests.txt | 8 +++++++- requirements.txt | 2 +- 5 files changed, 20 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 995a8508..be4fee92 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,4 +12,13 @@ rev: stable hooks: - id: black - language_version: python3.7 \ No newline at end of file + language_version: python3.7 + +- repo: local + hooks: + - id: python-bandit-vulnerability-check + name: bandit + entry: bandit + args: ['--ini', 'tox.ini', '-r', 'consoleme'] + language: system + pass_filenames: false \ No newline at end of file diff --git a/requirements-docs.txt b/requirements-docs.txt index bf60d82f..71da2a48 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -82,7 +82,7 @@ pyyaml==5.1 raven[flask]==6.10.0 redis==3.2.1 requests-toolbelt==0.9.1 -requests[security]==2.21.0 +requests[security]==2.22.0 retrying==1.3.3 s3transfer==0.2.0 six==1.12.0 diff --git a/requirements-tests.in b/requirements-tests.in index d624d4f7..d315cf7a 100644 --- a/requirements-tests.in +++ b/requirements-tests.in @@ -1,5 +1,6 @@ # Run `make up-reqs` to update pinned dependencies in requirement text files +bandit black coverage factory-boy diff --git a/requirements-tests.txt b/requirements-tests.txt index 95ceb652..c9850c3c 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -10,6 +10,7 @@ atomicwrites==1.3.0 # via pytest attrs==19.1.0 # via black, pytest aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto +bandit==1.6.0 black==19.3b0 boto3==1.9.149 # via aws-sam-translator, moto boto==2.49.0 # via moto @@ -30,6 +31,8 @@ faker==1.0.7 flask==1.0.2 # via pytest-flask freezegun==0.3.11 future==0.17.1 # via aws-xray-sdk, python-jose +gitdb2==2.0.5 # via gitpython +gitpython==2.1.11 # via bandit idna==2.8 # via moto, requests itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask, moto @@ -44,6 +47,7 @@ mock==3.0.5 # via moto more-itertools==7.0.0 # via pytest moto==1.3.8 nose==1.3.7 +pbr==5.2.0 # via stevedore pluggy==0.11.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa @@ -61,7 +65,9 @@ requests==2.22.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 -six==1.12.0 # via aws-sam-translator, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client +six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +smmap2==2.0.5 # via gitdb2 +stevedore==1.30.1 # via bandit text-unidecode==1.2 # via faker toml==0.10.0 # via black urllib3==1.24.3 # via botocore, requests diff --git a/requirements.txt b/requirements.txt index 66f4fd40..77a0e6f9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -77,7 +77,7 @@ pyyaml==5.1 raven[flask]==6.10.0 redis==3.2.1 requests-toolbelt==0.9.1 # via acme -requests[security]==2.21.0 +requests[security]==2.22.0 retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 From 0320c04be284d6516771ad73faba78011d2e4d5f Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 16 May 2019 08:14:46 -0700 Subject: [PATCH 221/357] nosec comment --- lemur/factory.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/lemur/factory.py b/lemur/factory.py index b4066e78..499986ec 100644 --- a/lemur/factory.py +++ b/lemur/factory.py @@ -73,11 +73,8 @@ def from_file(file_path, silent=False): d.__file__ = file_path try: with open(file_path) as config_file: - exec( - compile( - config_file.read(), file_path, "exec" # nosec: config file safe - ), - d.__dict__, + exec( # nosec: config file safe + compile(config_file.read(), file_path, "exec"), d.__dict__ ) except IOError as e: if silent and e.errno in (errno.ENOENT, errno.EISDIR): From 4fac726cf414b239cd156529377181a26fa43e04 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Fri, 17 May 2019 08:48:26 -0700 Subject: [PATCH 222/357] Add support for JSON logging --- lemur/factory.py | 7 +++++++ requirements-dev.txt | 2 +- requirements-docs.txt | 6 ++++-- requirements-tests.txt | 8 ++++---- requirements.in | 1 + requirements.txt | 6 ++++-- 6 files changed, 21 insertions(+), 9 deletions(-) diff --git a/lemur/factory.py b/lemur/factory.py index 499986ec..e0cf5505 100644 --- a/lemur/factory.py +++ b/lemur/factory.py @@ -13,11 +13,13 @@ import os import imp import errno import pkg_resources +import socket from logging import Formatter, StreamHandler from logging.handlers import RotatingFileHandler from flask import Flask +import logmatic from lemur.certificates.hooks import activate_debug_dump from lemur.common.health import mod as health @@ -172,6 +174,11 @@ def configure_logging(app): ) ) + if app.config.get("LOG_JSON", False): + handler.setFormatter( + logmatic.JsonFormatter(extra={"hostname": socket.gethostname()}) + ) + handler.setLevel(app.config.get("LOG_LEVEL", "DEBUG")) app.logger.setLevel(app.config.get("LOG_LEVEL", "DEBUG")) app.logger.addHandler(handler) diff --git a/requirements-dev.txt b/requirements-dev.txt index bfbadc8a..7e9677e3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -33,4 +33,4 @@ twine==1.13.0 urllib3==1.25.2 # via requests virtualenv==16.6.0 # via pre-commit webencodings==0.5.1 # via bleach -zipp==0.5.0 # via importlib-metadata +zipp==0.5.1 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 71da2a48..780cc41e 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -17,8 +17,8 @@ babel==2.6.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.149 -botocore==1.12.149 +boto3==1.9.150 +botocore==1.12.150 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -54,6 +54,7 @@ josepy==1.1.0 jsonlines==1.2.0 kombu==4.5.0 lockfile==0.12.2 +logmatic-python==0.1.7 mako==1.0.10 markupsafe==1.1.1 marshmallow-sqlalchemy==0.16.3 @@ -77,6 +78,7 @@ pyparsing==2.4.0 # via packaging pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 +python-json-logger==0.1.11 pytz==2019.1 pyyaml==5.1 raven[flask]==6.10.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index c9850c3c..e1c30e33 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -12,12 +12,12 @@ aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto bandit==1.6.0 black==19.3b0 -boto3==1.9.149 # via aws-sam-translator, moto +boto3==1.9.150 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.149 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.150 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography -cfn-lint==0.20.2 # via moto +cfn-lint==0.20.3 # via moto chardet==3.0.4 # via requests click==7.0 # via black, flask coverage==4.5.3 @@ -61,7 +61,7 @@ python-jose==3.0.1 # via moto pytz==2019.1 # via moto pyyaml==5.1 requests-mock==1.6.0 -requests==2.22.0 # via cfn-lint, docker, moto, requests-mock, responses +requests==2.21.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 diff --git a/requirements.in b/requirements.in index e69c61d2..81c797f8 100644 --- a/requirements.in +++ b/requirements.in @@ -29,6 +29,7 @@ inflection jinja2 kombu lockfile +logmatic-python marshmallow-sqlalchemy marshmallow ndg-httpsclient diff --git a/requirements.txt b/requirements.txt index 77a0e6f9..9ada7df0 100644 --- a/requirements.txt +++ b/requirements.txt @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.149 -botocore==1.12.149 +boto3==1.9.150 +botocore==1.12.150 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -51,6 +51,7 @@ josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare kombu==4.5.0 lockfile==0.12.2 +logmatic-python==0.1.7 mako==1.0.10 # via alembic markupsafe==1.1.1 # via jinja2, mako marshmallow-sqlalchemy==0.16.3 @@ -71,6 +72,7 @@ pyopenssl==19.0.0 pyrfc3339==1.1 # via acme python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic +python-json-logger==0.1.11 # via logmatic-python python-ldap==3.2.0 pytz==2019.1 # via acme, celery, flask-restful, pyrfc3339 pyyaml==5.1 From 34c7e5230bfe32a84a5292433647f0a6d1a3d861 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 21 May 2019 12:52:41 -0700 Subject: [PATCH 223/357] Set a limit on number of retries --- lemur/plugins/lemur_aws/iam.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index 5a6b753d..39d1c714 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -24,7 +24,7 @@ def retry_throttled(exception): if exception.response["Error"]["Code"] == "NoSuchEntity": return False - metrics.send("iam_retry", "counter", 1) + metrics.send("iam_retry", "counter", 1, metric_tags={"exception": str(exception)}) return True @@ -52,7 +52,7 @@ def create_arn_from_cert(account_number, region, certificate_name): @sts_client("iam") -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25) def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): """ Upload a certificate to AWS @@ -94,7 +94,7 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): @sts_client("iam") -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25) def delete_cert(cert_name, **kwargs): """ Delete a certificate from AWS @@ -111,7 +111,7 @@ def delete_cert(cert_name, **kwargs): @sts_client("iam") -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25) def get_certificate(name, **kwargs): """ Retrieves an SSL certificate. @@ -125,7 +125,7 @@ def get_certificate(name, **kwargs): @sts_client("iam") -@retry(retry_on_exception=retry_throttled, wait_fixed=2000) +@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25) def get_certificates(**kwargs): """ Fetches one page of certificate objects for a given account. From 1423ac0d9803a97f6d1a7386e2a533c7ca631434 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 21 May 2019 12:55:33 -0700 Subject: [PATCH 224/357] More metrics --- lemur/plugins/lemur_aws/iam.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index 39d1c714..67c35262 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -72,6 +72,7 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs): else: name = name + "-" + path.strip("/") + metrics.send("upload_cert", "counter", 1, metric_tags={"name": name, "path": path}) try: if cert_chain: return client.upload_server_certificate( @@ -103,6 +104,7 @@ def delete_cert(cert_name, **kwargs): :return: """ client = kwargs.pop("client") + metrics.send("delete_cert", "counter", 1, metric_tags={"cert_name": cert_name}) try: client.delete_server_certificate(ServerCertificateName=cert_name) except botocore.exceptions.ClientError as e: @@ -119,6 +121,7 @@ def get_certificate(name, **kwargs): :return: """ client = kwargs.pop("client") + metrics.send("get_certificate", "counter", 1, metric_tags={"name": name}) return client.get_server_certificate(ServerCertificateName=name)[ "ServerCertificate" ] @@ -133,6 +136,7 @@ def get_certificates(**kwargs): :return: """ client = kwargs.pop("client") + metrics.send("get_certificates", "counter", 1) return client.list_server_certificates(**kwargs) @@ -142,6 +146,12 @@ def get_all_certificates(**kwargs): """ certificates = [] account_number = kwargs.get("account_number") + metrics.send( + "get_all_certificates", + "counter", + 1, + metric_tags={"account_number": account_number}, + ) while True: response = get_certificates(**kwargs) From 09c7076e79d61d0ed190b3b4c77bf346f307f227 Mon Sep 17 00:00:00 2001 From: Ryan DeShone Date: Wed, 22 May 2019 17:12:10 -0400 Subject: [PATCH 225/357] Handle double data field in API v2 --- lemur/plugins/lemur_vault_dest/plugin.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index c8843cf5..58a6dc18 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -311,6 +311,7 @@ def get_secret(client, mount, path): result = client.secrets.kv.v2.read_secret_version( path=path, mount_point=mount ) + result = result['data'] except ConnectionError: pass finally: From 5059cb731ab5864358847ebac1907aa239b02f70 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 28 May 2019 12:38:33 -0700 Subject: [PATCH 226/357] Support read replicas in Lemur for improved performance --- requirements-dev.txt | 10 +++++----- requirements-docs.txt | 23 ++++++++++++----------- requirements-tests.txt | 19 ++++++++++--------- requirements.in | 1 + requirements.txt | 19 ++++++++++--------- 5 files changed, 38 insertions(+), 34 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 7e9677e3..cfe01a7b 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,16 +4,16 @@ # # pip-compile --no-index --output-file=requirements-dev.txt requirements-dev.in # -aspy.yaml==1.2.0 # via pre-commit +aspy.yaml==1.3.0 # via pre-commit bleach==3.1.0 # via readme-renderer certifi==2019.3.9 # via requests -cfgv==1.6.0 # via pre-commit +cfgv==2.0.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 identify==1.4.3 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.12 # via pre-commit +importlib-metadata==0.15 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 @@ -21,7 +21,7 @@ pkginfo==1.5.0.1 # via twine pre-commit==1.16.1 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 -pygments==2.4.0 # via readme-renderer +pygments==2.4.2 # via readme-renderer pyyaml==5.1 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine @@ -30,7 +30,7 @@ six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit tqdm==4.32.1 # via twine twine==1.13.0 -urllib3==1.25.2 # via requests +urllib3==1.25.3 # via requests virtualenv==16.6.0 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.5.1 # via importlib-metadata diff --git a/requirements-docs.txt b/requirements-docs.txt index 780cc41e..80223954 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -10,22 +10,22 @@ alembic-autogenerate-enums==0.0.2 alembic==1.0.10 amqp==2.4.2 aniso8601==6.0.0 -arrow==0.13.1 +arrow==0.13.2 asn1crypto==0.24.0 asyncpool==1.0 -babel==2.6.0 # via sphinx +babel==2.7.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.150 -botocore==1.12.150 +boto3==1.9.157 +botocore==1.12.157 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.3 chardet==3.0.4 click==7.0 -cloudflare==2.1.0 +cloudflare==2.3.0 cryptography==2.6.1 dnspython3==1.15.0 dnspython==1.15.0 @@ -34,15 +34,16 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.4.0 +flask-migrate==2.5.2 flask-principal==0.4.0 +flask-replicated==1.2 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 -flask==1.0.2 +flask==1.0.3 future==0.17.1 gunicorn==19.9.0 -hvac==0.8.2 +hvac==0.9.1 idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 @@ -69,7 +70,7 @@ pyasn1-modules==0.2.5 pyasn1==0.4.5 pycparser==2.19 pycryptodomex==3.8.1 -pygments==2.4.0 # via sphinx +pygments==2.4.2 # via sphinx pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 @@ -99,10 +100,10 @@ sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.2 # via sphinx sphinxcontrib-serializinghtml==1.1.3 # via sphinx sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.3 +sqlalchemy==1.3.4 tabulate==0.8.3 twofish==0.3.0 -urllib3==1.24.3 +urllib3==1.25.3 vine==1.3.0 werkzeug==0.15.4 xmltodict==0.12.0 diff --git a/requirements-tests.txt b/requirements-tests.txt index e1c30e33..f769d844 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -12,34 +12,34 @@ aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto bandit==1.6.0 black==19.3b0 -boto3==1.9.150 # via aws-sam-translator, moto +boto3==1.9.157 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.150 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.157 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography -cfn-lint==0.20.3 # via moto +cfn-lint==0.21.3 # via moto chardet==3.0.4 # via requests click==7.0 # via black, flask coverage==4.5.3 cryptography==2.6.1 # via moto -docker-pycreds==0.4.0 # via docker -docker==3.7.2 # via moto +docker==4.0.1 # via moto docutils==0.14 # via botocore ecdsa==0.13.2 # via python-jose factory-boy==2.12.0 faker==1.0.7 -flask==1.0.2 # via pytest-flask +flask==1.0.3 # via pytest-flask freezegun==0.3.11 future==0.17.1 # via aws-xray-sdk, python-jose gitdb2==2.0.5 # via gitpython gitpython==2.1.11 # via bandit idna==2.8 # via moto, requests +importlib-metadata==0.15 # via pluggy itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask, moto jmespath==0.9.4 # via boto3, botocore jsondiff==1.1.2 # via moto jsonpatch==1.23 # via cfn-lint -jsonpickle==1.1 # via aws-xray-sdk +jsonpickle==1.2 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch jsonschema==2.6.0 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 @@ -48,7 +48,7 @@ more-itertools==7.0.0 # via pytest moto==1.3.8 nose==1.3.7 pbr==5.2.0 # via stevedore -pluggy==0.11.0 # via pytest +pluggy==0.12.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa pycparser==2.19 # via cffi @@ -65,7 +65,7 @@ requests==2.21.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 -six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client smmap2==2.0.5 # via gitdb2 stevedore==1.30.1 # via bandit text-unidecode==1.2 # via faker @@ -76,3 +76,4 @@ websocket-client==0.56.0 # via docker werkzeug==0.15.4 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto +zipp==0.5.1 # via importlib-metadata diff --git a/requirements.in b/requirements.in index 81c797f8..ae64c225 100644 --- a/requirements.in +++ b/requirements.in @@ -22,6 +22,7 @@ Flask-Script Flask-SQLAlchemy Flask Flask-Cors +flask_replicated future gunicorn hvac # required for the vault destination plugin diff --git a/requirements.txt b/requirements.txt index 9ada7df0..2371ffd4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,21 +9,21 @@ alembic-autogenerate-enums==0.0.2 alembic==1.0.10 # via flask-migrate amqp==2.4.2 # via kombu aniso8601==6.0.0 # via flask-restful -arrow==0.13.1 +arrow==0.13.2 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.150 -botocore==1.12.150 +boto3==1.9.157 +botocore==1.12.157 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 cffi==1.12.3 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask -cloudflare==2.1.0 +cloudflare==2.3.0 cryptography==2.6.1 dnspython3==1.15.0 dnspython==1.15.0 # via dnspython3 @@ -32,15 +32,16 @@ dyn==1.8.1 flask-bcrypt==0.7.1 flask-cors==3.0.7 flask-mail==0.9.1 -flask-migrate==2.4.0 +flask-migrate==2.5.2 flask-principal==0.4.0 +flask-replicated==1.2 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 -flask==1.0.2 +flask==1.0.3 future==0.17.1 gunicorn==19.9.0 -hvac==0.8.2 +hvac==0.9.1 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask @@ -84,10 +85,10 @@ retrying==1.3.3 s3transfer==0.2.0 # via boto3 six==1.12.0 sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.3 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy==1.3.4 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 twofish==0.3.0 # via pyjks -urllib3==1.24.3 # via botocore, requests +urllib3==1.25.3 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.15.4 # via flask xmltodict==0.12.0 From fd35a269555146d53fa4ee109fe36cb7610d9bff Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 28 May 2019 12:45:39 -0700 Subject: [PATCH 227/357] Support read replicas --- lemur/factory.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lemur/factory.py b/lemur/factory.py index e0cf5505..0563d873 100644 --- a/lemur/factory.py +++ b/lemur/factory.py @@ -19,6 +19,7 @@ from logging import Formatter, StreamHandler from logging.handlers import RotatingFileHandler from flask import Flask +from flask_replicated import FlaskReplicated import logmatic from lemur.certificates.hooks import activate_debug_dump @@ -53,6 +54,7 @@ def create_app(app_name=None, blueprints=None, config=None): configure_blueprints(app, blueprints) configure_extensions(app) configure_logging(app) + configure_database(app) install_plugins(app) @app.teardown_appcontext @@ -158,6 +160,11 @@ def configure_blueprints(app, blueprints): app.register_blueprint(blueprint, url_prefix="/api/{0}".format(API_VERSION)) +def configure_database(app): + if app.config.get("SQLALCHEMY_ENABLE_FLASK_REPLICATED"): + FlaskReplicated(app) + + def configure_logging(app): """ Sets up application wide logging. From f81adb137159dd4dff816b76f3c645b424dd1a90 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 29 May 2019 12:20:05 -0700 Subject: [PATCH 228/357] Make get_or_increase_name queries less demanding --- lemur/certificates/models.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index 965f79d1..d2414821 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -71,21 +71,21 @@ def get_sequence(name): def get_or_increase_name(name, serial): - certificates = Certificate.query.filter( - Certificate.name.ilike("{0}%".format(name)) - ).all() + certificates = Certificate.query.filter(Certificate.name == name).all() if not certificates: return name serial_name = "{0}-{1}".format(name, hex(int(serial))[2:].upper()) - certificates = Certificate.query.filter( - Certificate.name.ilike("{0}%".format(serial_name)) - ).all() + certificates = Certificate.query.filter(Certificate.name == serial_name).all() if not certificates: return serial_name + certificates = Certificate.query.filter( + Certificate.name.ilike("{0}%".format(serial_name)) + ).all() + ends = [0] root, end = get_sequence(serial_name) for cert in certificates: From 5e389f3f48be941a59ba572379dbf9b9b33c1556 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 29 May 2019 12:38:17 -0700 Subject: [PATCH 229/357] Add certificate1 to test DB --- lemur/tests/test_certificates.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index 07b5ee4e..adafa605 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -53,7 +53,8 @@ def test_get_or_increase_name(session, certificate): == "test-cert-11111111-1-" + serial ) - cert2 = CertificateFactory(name="certificate1-" + serial) + CertificateFactory(name="certificate1") + CertificateFactory(name="certificate1-" + serial) session.commit() assert get_or_increase_name( From 13d46ae42e610ce4fb019105b6969e8d5e4da637 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 30 May 2019 08:55:30 -0700 Subject: [PATCH 230/357] indexing the not after field in the cert table --- lemur/certificates/models.py | 2 ++ lemur/migrations/versions/b33c838cb669_.py | 26 ++++++++++++++++++++++ 2 files changed, 28 insertions(+) create mode 100644 lemur/migrations/versions/b33c838cb669_.py diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index bd6e8b5e..65245e27 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -106,6 +106,8 @@ class Certificate(db.Model): not_before = Column(ArrowType) not_after = Column(ArrowType) + not_after_ix = Index('ix_certificates_not_after', not_after.desc()) + date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False) signing_algorithm = Column(String(128)) diff --git a/lemur/migrations/versions/b33c838cb669_.py b/lemur/migrations/versions/b33c838cb669_.py new file mode 100644 index 00000000..eb04d4a1 --- /dev/null +++ b/lemur/migrations/versions/b33c838cb669_.py @@ -0,0 +1,26 @@ +"""adding index on the not_after field + +Revision ID: b33c838cb669 +Revises: 318b66568358 +Create Date: 2019-05-30 08:42:05.294109 + +""" + +# revision identifiers, used by Alembic. +revision = 'b33c838cb669' +down_revision = '318b66568358' + +from alembic import op +import sqlalchemy as sa + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_index('ix_certificates_not_after', 'certificates', [sa.text('not_after DESC')], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index('ix_certificates_not_after', table_name='certificates') + # ### end Alembic commands ### From 071c083eae586d4df92f85a3c1a9c2e1c90f1030 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 30 May 2019 10:21:03 -0700 Subject: [PATCH 231/357] hiding expired certs after 6 months from the main page --- lemur/certificates/service.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 51fede4f..544c03d8 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -330,6 +330,12 @@ def render(args): query = database.session_query(Certificate) time_range = args.pop("time_range") + if not time_range: + six_month_old = arrow.now()\ + .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -6))\ + .format("YYYY-MM-DD") + query = query.filter(Certificate.not_after > six_month_old) + destination_id = args.pop("destination_id") notification_id = args.pop("notification_id", None) show = args.pop("show") From 8b821d002363a8ecd212bcc940e91797e5c12b3f Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 30 May 2019 10:21:44 -0700 Subject: [PATCH 232/357] Enhance domains query and sensitive domain checking code; Allow creation of opt-out roles via config --- lemur/auth/permissions.py | 10 +++++++++- lemur/common/celery.py | 2 +- lemur/common/validators.py | 2 +- lemur/domains/service.py | 19 +++++++++++++++++-- 4 files changed, 28 insertions(+), 5 deletions(-) diff --git a/lemur/auth/permissions.py b/lemur/auth/permissions.py index c3c57356..a5964880 100644 --- a/lemur/auth/permissions.py +++ b/lemur/auth/permissions.py @@ -9,6 +9,7 @@ from functools import partial from collections import namedtuple +from flask import current_app from flask_principal import Permission, RoleNeed # Permissions @@ -21,7 +22,14 @@ CertificateOwnerNeed = partial(CertificateOwner, "role") class SensitiveDomainPermission(Permission): def __init__(self): - super(SensitiveDomainPermission, self).__init__(RoleNeed("admin")) + needs = [RoleNeed("admin")] + sensitive_domain_roles = current_app.config.get("SENSITIVE_DOMAIN_ROLES", []) + + if sensitive_domain_roles: + for role in sensitive_domain_roles: + needs.append(RoleNeed(role)) + + super(SensitiveDomainPermission, self).__init__(*needs) class CertificatePermission(Permission): diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 7eb1bb0d..2e87dbc3 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -16,13 +16,13 @@ from celery.exceptions import SoftTimeLimitExceeded from flask import current_app from lemur.authorities.service import get as get_authority +from lemur.destinations import service as destinations_service from lemur.extensions import metrics, sentry from lemur.factory import create_app from lemur.notifications.messaging import send_pending_failure_notification from lemur.pending_certificates import service as pending_certificate_service from lemur.plugins.base import plugins from lemur.sources.cli import clean, sync, validate_sources -from lemur.destinations import service as destinations_service from lemur.sources.service import add_aws_destination_to_sources if current_app: diff --git a/lemur/common/validators.py b/lemur/common/validators.py index 3e6ebcf9..2412e2d3 100644 --- a/lemur/common/validators.py +++ b/lemur/common/validators.py @@ -40,7 +40,7 @@ def sensitive_domain(domain): # Avoid circular import. from lemur.domains import service as domain_service - if any(d.sensitive for d in domain_service.get_by_name(domain)): + if domain_service.is_domain_sensitive(domain): raise ValidationError( "Domain {0} has been marked as sensitive. " "Contact an administrator to issue the certificate.".format(domain) diff --git a/lemur/domains/service.py b/lemur/domains/service.py index 8a581bfd..1944d9db 100644 --- a/lemur/domains/service.py +++ b/lemur/domains/service.py @@ -6,10 +6,11 @@ .. moduleauthor:: Kevin Glisson """ -from lemur.domains.models import Domain -from lemur.certificates.models import Certificate +from sqlalchemy import and_ from lemur import database +from lemur.certificates.models import Certificate +from lemur.domains.models import Domain def get(domain_id): @@ -42,6 +43,20 @@ def get_by_name(name): return database.get_all(Domain, name, field="name").all() +def is_domain_sensitive(name): + """ + Return True if domain is marked sensitive + + :param name: + :return: + """ + query = database.session_query(Domain) + + query = query.filter(and_(Domain.sensitive, Domain.name == name)) + + return database.find_all(query, Domain, {}).all() + + def create(name, sensitive): """ Create a new domain From b89dd36771b88a42434cf52e53cc05e1aaa74aa6 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 30 May 2019 10:21:53 -0700 Subject: [PATCH 233/357] updating requirements --- requirements-dev.txt | 2 +- requirements-docs.txt | 10 +++++----- requirements-tests.txt | 10 +++++----- requirements.txt | 10 +++++----- 4 files changed, 16 insertions(+), 16 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index cfe01a7b..030c3f93 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -13,7 +13,7 @@ docutils==0.14 # via readme-renderer flake8==3.5.0 identify==1.4.3 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.15 # via pre-commit +importlib-metadata==0.17 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 diff --git a/requirements-docs.txt b/requirements-docs.txt index 80223954..fbd70c49 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -8,7 +8,7 @@ acme==0.34.2 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 alembic==1.0.10 -amqp==2.4.2 +amqp==2.5.0 aniso8601==6.0.0 arrow==0.13.2 asn1crypto==0.24.0 @@ -17,8 +17,8 @@ babel==2.7.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.157 -botocore==1.12.157 +boto3==1.9.158 +botocore==1.12.158 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -53,7 +53,7 @@ jinja2==2.10.1 jmespath==0.9.4 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.5.0 +kombu==4.6.0 lockfile==0.12.2 logmatic-python==0.1.7 mako==1.0.10 @@ -69,7 +69,7 @@ psycopg2==2.8.2 pyasn1-modules==0.2.5 pyasn1==0.4.5 pycparser==2.19 -pycryptodomex==3.8.1 +pycryptodomex==3.8.2 pygments==2.4.2 # via sphinx pyjks==19.0.0 pyjwt==1.7.1 diff --git a/requirements-tests.txt b/requirements-tests.txt index f769d844..cf53fa17 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -12,9 +12,9 @@ aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto bandit==1.6.0 black==19.3b0 -boto3==1.9.157 # via aws-sam-translator, moto +boto3==1.9.158 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.157 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.158 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography cfn-lint==0.21.3 # via moto @@ -28,12 +28,12 @@ ecdsa==0.13.2 # via python-jose factory-boy==2.12.0 faker==1.0.7 flask==1.0.3 # via pytest-flask -freezegun==0.3.11 +freezegun==0.3.12 future==0.17.1 # via aws-xray-sdk, python-jose gitdb2==2.0.5 # via gitpython gitpython==2.1.11 # via bandit idna==2.8 # via moto, requests -importlib-metadata==0.15 # via pluggy +importlib-metadata==0.17 # via pluggy itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask, moto jmespath==0.9.4 # via boto3, botocore @@ -47,7 +47,7 @@ mock==3.0.5 # via moto more-itertools==7.0.0 # via pytest moto==1.3.8 nose==1.3.7 -pbr==5.2.0 # via stevedore +pbr==5.2.1 # via stevedore pluggy==0.12.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa diff --git a/requirements.txt b/requirements.txt index 2371ffd4..7dde8a3d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,7 +7,7 @@ acme==0.34.2 alembic-autogenerate-enums==0.0.2 alembic==1.0.10 # via flask-migrate -amqp==2.4.2 # via kombu +amqp==2.5.0 # via kombu aniso8601==6.0.0 # via flask-restful arrow==0.13.2 asn1crypto==0.24.0 # via cryptography @@ -15,8 +15,8 @@ asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.157 -botocore==1.12.157 +boto3==1.9.158 +botocore==1.12.158 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -50,7 +50,7 @@ jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.5.0 +kombu==4.6.0 lockfile==0.12.2 logmatic-python==0.1.7 mako==1.0.10 # via alembic @@ -65,7 +65,7 @@ psycopg2==2.8.2 pyasn1-modules==0.2.5 # via pyjks, python-ldap pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, pyjks, python-ldap pycparser==2.19 # via cffi -pycryptodomex==3.8.1 # via pyjks +pycryptodomex==3.8.2 # via pyjks pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 # via paramiko From e300cf6e1b5dc1d8bb4d36c69881b74824db0e92 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 30 May 2019 13:34:44 -0700 Subject: [PATCH 234/357] Downgrade Kombu --- requirements-docs.txt | 2 +- requirements.in | 2 +- requirements.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements-docs.txt b/requirements-docs.txt index fbd70c49..afd01a77 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -53,7 +53,7 @@ jinja2==2.10.1 jmespath==0.9.4 josepy==1.1.0 jsonlines==1.2.0 -kombu==4.6.0 +kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 mako==1.0.10 diff --git a/requirements.in b/requirements.in index ae64c225..d766b7a9 100644 --- a/requirements.in +++ b/requirements.in @@ -28,7 +28,7 @@ gunicorn hvac # required for the vault destination plugin inflection jinja2 -kombu +kombu<4.6.0 # Bug with inspecting active tasks: https://github.com/celery/kombu/issues/1051 lockfile logmatic-python marshmallow-sqlalchemy diff --git a/requirements.txt b/requirements.txt index 7dde8a3d..4542d440 100644 --- a/requirements.txt +++ b/requirements.txt @@ -50,7 +50,7 @@ jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore josepy==1.1.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.6.0 +kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 mako==1.0.10 # via alembic From 28b216273d6cb9e9f009816e6c9fc96362c39ab3 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Fri, 31 May 2019 14:07:26 -0700 Subject: [PATCH 235/357] Upgrading Gulp. If this is not necessary, we can remove it later. --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f47978db..fe1267a6 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,6 @@ "bower": "^1.8.2", "browser-sync": "^2.3.1", "del": "^2.2.2", - "gulp": "^3.8.11", "gulp-autoprefixer": "^3.1.1", "gulp-cache": "^0.4.5", "gulp-concat": "^2.4.1", @@ -60,6 +59,7 @@ "test": "gulp test" }, "devDependencies": { + "gulp": "^3.9.1", "jshint": "^2.8.0", "karma-chrome-launcher": "^2.0.0" } From 45231c2423a95fddbc9804fab99db61a817d43f7 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Fri, 31 May 2019 14:08:28 -0700 Subject: [PATCH 236/357] Added code to automatically add the common name as a DNS name while creating a certificate. --- .../certificate/tracking.tpl.html | 2 ++ .../app/angular/certificates/services.js | 20 +++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html index b64f6e3d..573510cd 100644 --- a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html +++ b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html @@ -33,6 +33,8 @@ uib-tooltip="If you need a certificate with multiple domains enter your primary domain here and the rest under 'Subject Alternate Names' by clicking 'More Options'" ng-model="certificate.commonName" placeholder="Common Name" class="form-control" ng-maxlength="64" + ng-blur="certificate.attachCommonName()" + ng-focus="certificate.removeCommonName()" required/>

diff --git a/lemur/static/app/angular/certificates/services.js b/lemur/static/app/angular/certificates/services.js index ecd7870f..3a23076d 100644 --- a/lemur/static/app/angular/certificates/services.js +++ b/lemur/static/app/angular/certificates/services.js @@ -18,6 +18,26 @@ angular.module('lemur') this.authority = authority; this.authority.maxDate = moment(this.authority.notAfter).subtract(1, 'days').format('YYYY/MM/DD'); }, + attachCommonName: function () { + if (this.extensions === undefined) { + this.extensions = {}; + } + + if (this.extensions.subAltNames === undefined) { + this.extensions.subAltNames = {'names': []}; + } + + if (angular.isString(this.commonName)) { + this.extensions.subAltNames.names.unshift({'nameType': 'DNSName', 'value': this.commonName}); + } + }, + removeCommonName: function () { + if (angular.isDefined(this.extensions) && angular.isDefined(this.extensions.subAltNames)) { + if (angular.equals(this.extensions.subAltNames.names[0].value, this.commonName)) { + this.extensions.subAltNames.names.shift(); + } + } + }, attachSubAltName: function () { if (this.extensions === undefined) { this.extensions = {}; From aeb32f4853a4ac0cdeeba85ba8ec561aebc97a18 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 4 Jun 2019 08:21:52 -0700 Subject: [PATCH 237/357] Upgrade docker to 3.7 --- Dockerfile | 2 +- requirements-docs.txt | 14 +++++++------- requirements-tests.txt | 20 +++++++++++--------- requirements.txt | 12 ++++++------ 4 files changed, 25 insertions(+), 23 deletions(-) diff --git a/Dockerfile b/Dockerfile index b9d7335e..b9439be7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.5 +FROM python:3.7 RUN apt-get update RUN apt-get install -y make software-properties-common curl RUN curl -sL https://deb.nodesource.com/setup_7.x | bash - diff --git a/requirements-docs.txt b/requirements-docs.txt index afd01a77..c0fe427e 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -10,15 +10,15 @@ alembic-autogenerate-enums==0.0.2 alembic==1.0.10 amqp==2.5.0 aniso8601==6.0.0 -arrow==0.13.2 +arrow==0.14.2 asn1crypto==0.24.0 asyncpool==1.0 babel==2.7.0 # via sphinx bcrypt==3.1.6 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.158 -botocore==1.12.158 +boto3==1.9.160 +botocore==1.12.160 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -26,7 +26,7 @@ cffi==1.12.3 chardet==3.0.4 click==7.0 cloudflare==2.3.0 -cryptography==2.6.1 +cryptography==2.7 dnspython3==1.15.0 dnspython==1.15.0 docutils==0.14 @@ -36,7 +36,7 @@ flask-cors==3.0.7 flask-mail==0.9.1 flask-migrate==2.5.2 flask-principal==0.4.0 -flask-replicated==1.2 +flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 @@ -56,7 +56,7 @@ jsonlines==1.2.0 kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 -mako==1.0.10 +mako==1.0.11 markupsafe==1.1.1 marshmallow-sqlalchemy==0.16.3 marshmallow==2.19.2 @@ -91,7 +91,7 @@ s3transfer==0.2.0 six==1.12.0 snowballstemmer==1.2.1 # via sphinx sphinx-rtd-theme==0.4.3 -sphinx==2.0.1 +sphinx==2.1.0 sphinxcontrib-applehelp==1.0.1 # via sphinx sphinxcontrib-devhelp==1.0.1 # via sphinx sphinxcontrib-htmlhelp==1.0.2 # via sphinx diff --git a/requirements-tests.txt b/requirements-tests.txt index cf53fa17..77bc92af 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -12,16 +12,16 @@ aws-sam-translator==1.11.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto bandit==1.6.0 black==19.3b0 -boto3==1.9.158 # via aws-sam-translator, moto +boto3==1.9.160 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.158 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.160 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.3.9 # via requests cffi==1.12.3 # via cryptography -cfn-lint==0.21.3 # via moto +cfn-lint==0.21.4 # via moto chardet==3.0.4 # via requests click==7.0 # via black, flask coverage==4.5.3 -cryptography==2.6.1 # via moto +cryptography==2.7 # via moto docker==4.0.1 # via moto docutils==0.14 # via botocore ecdsa==0.13.2 # via python-jose @@ -33,7 +33,7 @@ future==0.17.1 # via aws-xray-sdk, python-jose gitdb2==2.0.5 # via gitpython gitpython==2.1.11 # via bandit idna==2.8 # via moto, requests -importlib-metadata==0.17 # via pluggy +importlib-metadata==0.17 # via pluggy, pytest itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask, moto jmespath==0.9.4 # via boto3, botocore @@ -47,30 +47,32 @@ mock==3.0.5 # via moto more-itertools==7.0.0 # via pytest moto==1.3.8 nose==1.3.7 +packaging==19.0 # via pytest pbr==5.2.1 # via stevedore pluggy==0.12.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa pycparser==2.19 # via cffi pyflakes==2.1.1 +pyparsing==2.4.0 # via packaging pytest-flask==0.15.0 pytest-mock==1.10.4 -pytest==4.5.0 +pytest==4.6.2 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==3.0.1 # via moto pytz==2019.1 # via moto pyyaml==5.1 requests-mock==1.6.0 -requests==2.21.0 # via cfn-lint, docker, moto, requests-mock, responses +requests==2.22.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 -six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, mock, moto, packaging, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client smmap2==2.0.5 # via gitdb2 stevedore==1.30.1 # via bandit text-unidecode==1.2 # via faker toml==0.10.0 # via black -urllib3==1.24.3 # via botocore, requests +urllib3==1.25.3 # via botocore, requests wcwidth==0.1.7 # via pytest websocket-client==0.56.0 # via docker werkzeug==0.15.4 # via flask, moto, pytest-flask diff --git a/requirements.txt b/requirements.txt index 4542d440..c19c7b6e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,14 +9,14 @@ alembic-autogenerate-enums==0.0.2 alembic==1.0.10 # via flask-migrate amqp==2.5.0 # via kombu aniso8601==6.0.0 # via flask-restful -arrow==0.13.2 +arrow==0.14.2 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 bcrypt==3.1.6 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.158 -botocore==1.12.158 +boto3==1.9.160 +botocore==1.12.160 celery[redis]==4.3.0 certifi==2019.3.9 certsrv==2.1.1 @@ -24,7 +24,7 @@ cffi==1.12.3 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.3.0 -cryptography==2.6.1 +cryptography==2.7 dnspython3==1.15.0 dnspython==1.15.0 # via dnspython3 docutils==0.14 # via botocore @@ -34,7 +34,7 @@ flask-cors==3.0.7 flask-mail==0.9.1 flask-migrate==2.5.2 flask-principal==0.4.0 -flask-replicated==1.2 +flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 @@ -53,7 +53,7 @@ jsonlines==1.2.0 # via cloudflare kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 -mako==1.0.10 # via alembic +mako==1.0.11 # via alembic markupsafe==1.1.1 # via jinja2, mako marshmallow-sqlalchemy==0.16.3 marshmallow==2.19.2 From 28e26a1bafe2636e52e2a1acbbe4de293b4e73d5 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 5 Jun 2019 17:57:11 -0700 Subject: [PATCH 238/357] to prevent duplicate emails, we might better remove owner and security email address from the notification recipient --- lemur/notifications/messaging.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/lemur/notifications/messaging.py b/lemur/notifications/messaging.py index 919b73db..928febd6 100644 --- a/lemur/notifications/messaging.py +++ b/lemur/notifications/messaging.py @@ -140,12 +140,6 @@ def send_expiration_notifications(exclude): notification_data.append(cert_data) security_data.append(cert_data) - notification_recipient = get_plugin_option( - "recipients", notification.options - ) - if notification_recipient: - notification_recipient = notification_recipient.split(",") - if send_notification( "expiration", notification_data, [owner], notification ): @@ -153,10 +147,16 @@ def send_expiration_notifications(exclude): else: failure += 1 + notification_recipient = get_plugin_option( + "recipients", notification.options + ) + if notification_recipient: + notification_recipient = notification_recipient.split(",") + # removing owner and security_email from notification_recipient + notification_recipient = [i for i in notification_recipient if i not in security_email and i != owner] + if ( notification_recipient - and owner != notification_recipient - and security_email != notification_recipient ): if send_notification( "expiration", From 0446aea20e67452b7ecc3afaf408ef70c00f46e6 Mon Sep 17 00:00:00 2001 From: Curtis Date: Thu, 6 Jun 2019 13:35:45 -0700 Subject: [PATCH 239/357] Update messaging.py --- lemur/notifications/messaging.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/notifications/messaging.py b/lemur/notifications/messaging.py index 928febd6..82db7b6e 100644 --- a/lemur/notifications/messaging.py +++ b/lemur/notifications/messaging.py @@ -52,7 +52,7 @@ def get_certificates(exclude=None): certs = [] - for c in windowed_query(q, Certificate.id, 100): + for c in windowed_query(q, Certificate.id, 10000): if needs_notification(c): certs.append(c) From 491d048948be684e28904e4ccb23bd7e347d82c6 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 10 Jun 2019 09:47:29 -0700 Subject: [PATCH 240/357] Modified the behavior of Permalink to access a newer, faster API --- .../app/angular/certificates/view/view.js | 25 ++++++++++++++----- 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 0008dd64..619afffb 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -17,7 +17,7 @@ angular.module('lemur') }); }) - .controller('CertificatesViewController', function ($q, $scope, $uibModal, $stateParams, CertificateApi, CertificateService, MomentService, ngTableParams, toaster) { + .controller('CertificatesViewController', function ($q, $scope, $uibModal, $stateParams, $location, CertificateApi, CertificateService, MomentService, ngTableParams, toaster) { $scope.filter = $stateParams; $scope.certificateTable = new ngTableParams({ page: 1, // show first page @@ -29,11 +29,24 @@ angular.module('lemur') }, { total: 0, // length of data getData: function ($defer, params) { - CertificateApi.getList(params.url()) - .then(function (data) { - params.total(data.total); - $defer.resolve(data); - }); + $scope.path = $location.path(); + // Handle Permalink clicks through a separate API + // Clicking on Permalink adds the certificate name to the URL after "certificates/", which is used to identify the click + if ($scope.path.indexOf("certificates/") > -1 && $scope.path.split("/")[2].length > 0) { + $scope.certificateName = $scope.path.split("/")[2]; + CertificateApi.one('name').one($scope.certificateName).getList() + .then(function (data) { + params.total(data.total); + $defer.resolve(data); + }); + } + else { + CertificateApi.getList(params.url()) + .then(function (data) { + params.total(data.total); + $defer.resolve(data); + }); + } } }); From c0f8fbb24fe3426901665dd8eb5db793dde2031e Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Tue, 11 Jun 2019 14:51:24 -0700 Subject: [PATCH 241/357] Modified Permalink behavior to access a newer, faster API --- lemur/static/app/angular/certificates/view/view.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 619afffb..6712e62a 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -32,8 +32,8 @@ angular.module('lemur') $scope.path = $location.path(); // Handle Permalink clicks through a separate API // Clicking on Permalink adds the certificate name to the URL after "certificates/", which is used to identify the click - if ($scope.path.indexOf("certificates/") > -1 && $scope.path.split("/")[2].length > 0) { - $scope.certificateName = $scope.path.split("/")[2]; + if ($scope.path.indexOf('certificates/') > -1 && $scope.path.split('/')[2].length > 0) { + $scope.certificateName = $scope.path.split('/')[2]; CertificateApi.one('name').one($scope.certificateName).getList() .then(function (data) { params.total(data.total); From cdb83c48c5504130a6370703845b812af7bf6c51 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 17 Jun 2019 10:41:11 -0700 Subject: [PATCH 242/357] API additions for viewing expired certs as well. Default behavior modified to show only valid certs and those which have expired less than 1 month ago. --- lemur/certificates/service.py | 12 +++++++----- lemur/certificates/views.py | 1 + 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 544c03d8..d9370232 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -329,12 +329,14 @@ def render(args): """ query = database.session_query(Certificate) - time_range = args.pop("time_range") - if not time_range: - six_month_old = arrow.now()\ - .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -6))\ + show_expired = args.pop("showExpired") + if show_expired != 1: + one_month_old = arrow.now()\ + .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))\ .format("YYYY-MM-DD") - query = query.filter(Certificate.not_after > six_month_old) + query = query.filter(Certificate.not_after > one_month_old) + + time_range = args.pop("time_range") destination_id = args.pop("destination_id") notification_id = args.pop("notification_id", None) diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 61a74a59..1a003e78 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -347,6 +347,7 @@ class CertificatesList(AuthenticatedResource): ) parser.add_argument("creator", type=str, location="args") parser.add_argument("show", type=str, location="args") + parser.add_argument("showExpired", type=int, location="args") args = parser.parse_args() args["user"] = g.user From f836c6fff6cdea1911b9d9771aae38ecade96870 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 17 Jun 2019 10:41:11 -0700 Subject: [PATCH 243/357] API additions for viewing expired certs as well. Default behavior modified to show only valid certs and those which have expired less than 1 month ago. --- lemur/certificates/service.py | 12 +++++++----- lemur/certificates/views.py | 1 + 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 544c03d8..d9370232 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -329,12 +329,14 @@ def render(args): """ query = database.session_query(Certificate) - time_range = args.pop("time_range") - if not time_range: - six_month_old = arrow.now()\ - .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -6))\ + show_expired = args.pop("showExpired") + if show_expired != 1: + one_month_old = arrow.now()\ + .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))\ .format("YYYY-MM-DD") - query = query.filter(Certificate.not_after > six_month_old) + query = query.filter(Certificate.not_after > one_month_old) + + time_range = args.pop("time_range") destination_id = args.pop("destination_id") notification_id = args.pop("notification_id", None) diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 61a74a59..1a003e78 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -347,6 +347,7 @@ class CertificatesList(AuthenticatedResource): ) parser.add_argument("creator", type=str, location="args") parser.add_argument("show", type=str, location="args") + parser.add_argument("showExpired", type=int, location="args") args = parser.parse_args() args["user"] = g.user From 8a08edb0f3db2113936c355053bd87e597af8c6c Mon Sep 17 00:00:00 2001 From: Marti Raudsepp Date: Tue, 18 Jun 2019 09:47:34 +0300 Subject: [PATCH 244/357] manage.py: Restore shebang line This is an executable file but cannot be executed without the interpreter. The shebang line was lost in commit 8cbc6b8325c08bb3a72932c7e67c6476f7d29edb --- lemur/manage.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lemur/manage.py b/lemur/manage.py index e6e85a9d..7dd3b3b4 100755 --- a/lemur/manage.py +++ b/lemur/manage.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python from __future__ import unicode_literals # at top of module import os From 56917614a20a0a295d88c8a3fee03566fe9188c7 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Wed, 19 Jun 2019 09:46:44 -0400 Subject: [PATCH 245/357] fixing regex to be more flexable --- lemur/plugins/lemur_vault_dest/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 803b0a0c..21c6784e 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -69,14 +69,14 @@ class VaultSourcePlugin(SourcePlugin): 'name': 'vaultPath', 'type': 'str', 'required': True, - 'validation': '^([a-zA-Z0-9_-]+/?)+$', + 'validation': '^([a-zA-Z0-9._-]+/?)+$', 'helpMessage': 'Must be a valid Vault secrets path' }, { 'name': 'objectName', 'type': 'str', 'required': True, - 'validation': '[0-9a-zA-Z:_-]+', + 'validation': '[0-9a-zA-Z.:_-]+', 'helpMessage': 'Object Name to search' }, ] From bbf50cf0b05033f2e72c17413d6c7635697f5c73 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Thu, 20 Jun 2019 08:26:32 -0400 Subject: [PATCH 246/357] updated dest as well as src --- lemur/plugins/lemur_vault_dest/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index c7db9b58..b6d1ed75 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -177,14 +177,14 @@ class VaultDestinationPlugin(DestinationPlugin): "name": "vaultPath", "type": "str", "required": True, - "validation": "^([a-zA-Z0-9_-]+/?)+$", + "validation": "^([a-zA-Z0-9._-]+/?)+$", "helpMessage": "Must be a valid Vault secrets path", }, { "name": "objectName", "type": "str", "required": False, - "validation": "[0-9a-zA-Z:_-]+", + "validation": "[0-9a-zA-Z.:_-]+", "helpMessage": "Name to bundle certs under, if blank use cn", }, { From 68815b8f44a6d787efee2f9e31e849c88edbe65c Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Thu, 20 Jun 2019 15:04:40 -0700 Subject: [PATCH 247/357] UI changes - Button to show / hide expired certs. --- .../app/angular/certificates/view/view.js | 31 +++++++++++++++++++ .../angular/certificates/view/view.tpl.html | 5 +++ 2 files changed, 36 insertions(+) diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 6712e62a..1d68d9f2 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -19,6 +19,9 @@ angular.module('lemur') .controller('CertificatesViewController', function ($q, $scope, $uibModal, $stateParams, $location, CertificateApi, CertificateService, MomentService, ngTableParams, toaster) { $scope.filter = $stateParams; + $scope.expiredText = ["Show Expired", "Hide Expired"]; + $scope.expiredValue = 0; + $scope.expiredButton = $scope.expiredText[$scope.expiredValue]; $scope.certificateTable = new ngTableParams({ page: 1, // show first page count: 10, // count per page @@ -50,6 +53,34 @@ angular.module('lemur') } }); + $scope.showExpired = function () { + if ($scope.expiredValue === 0) { + $scope.expiredValue = 1; + } + else { + $scope.expiredValue = 0; + } + $scope.expiredButton = $scope.expiredText[$scope.expiredValue]; + $scope.certificateTable = new ngTableParams({ + page: 1, // show first page + count: 10, // count per page + sorting: { + id: 'desc' // initial sorting + }, + filter: $scope.filter + }, { + getData: function ($defer, params) { + $scope.temp = angular.copy(params.url()); + $scope.temp.showExpired = $scope.expiredValue; + CertificateApi.getList($scope.temp) + .then(function (data) { + params.total(data.total); + $defer.resolve(data); + }); + } + }) + }; + $scope.momentService = MomentService; $scope.remove = function (certificate) { diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index 28b4e08e..ff086a88 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -17,6 +17,11 @@ btn-checkbox-true="1" btn-checkbox-false="0">Filter

+
+ +
From de0462e54fbf3d492387b05c01977b26aebef975 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Thu, 20 Jun 2019 15:41:32 -0700 Subject: [PATCH 248/357] Added missing semi-colon and changed double quotes to single quotes --- lemur/static/app/angular/certificates/view/view.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 1d68d9f2..065c778a 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -19,7 +19,7 @@ angular.module('lemur') .controller('CertificatesViewController', function ($q, $scope, $uibModal, $stateParams, $location, CertificateApi, CertificateService, MomentService, ngTableParams, toaster) { $scope.filter = $stateParams; - $scope.expiredText = ["Show Expired", "Hide Expired"]; + $scope.expiredText = ['Show Expired', 'Hide Expired']; $scope.expiredValue = 0; $scope.expiredButton = $scope.expiredText[$scope.expiredValue]; $scope.certificateTable = new ngTableParams({ @@ -78,7 +78,7 @@ angular.module('lemur') $defer.resolve(data); }); } - }) + }); }; $scope.momentService = MomentService; From 34cdd29a5011a84f432fd090670d1c017f536b0d Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 20 Jun 2019 16:06:26 -0700 Subject: [PATCH 249/357] removing the rotation enabled requirement, to keep the endpoint generic --- lemur/certificates/service.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 544c03d8..3bb46cdc 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -445,7 +445,7 @@ def query_name(certificate_name, args): def query_common_name(common_name, args): """ - Helper function that queries for not expired certificates by common name and owner which have auto-rotate enabled + Helper function that queries for not expired certificates by common name (and owner) :param common_name: :param args: @@ -462,7 +462,6 @@ def query_common_name(common_name, args): Certificate.query.filter(Certificate.cn.ilike(common_name)) .filter(Certificate.owner.ilike(owner)) .filter(Certificate.not_after >= current_time.format("YYYY-MM-DD")) - .filter(Certificate.rotation.is_(True)) .all() ) From 960064d5c6034910e9046c52d55a64f1444b3e4a Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Fri, 21 Jun 2019 11:32:16 -0700 Subject: [PATCH 250/357] Color change for Show Expired button --- lemur/static/app/angular/certificates/view/view.tpl.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index ff086a88..fbed4ca5 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -18,7 +18,7 @@ btn-checkbox-false="0">Filter
-
From 4565bd7dc699d2b9982febe580e04d88ae18030c Mon Sep 17 00:00:00 2001 From: Danny Thomas Date: Fri, 21 Jun 2019 13:33:55 -0700 Subject: [PATCH 251/357] Update SAN text --- .../app/angular/certificates/certificate/tracking.tpl.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html index 573510cd..19d8f37f 100644 --- a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html +++ b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html @@ -30,7 +30,7 @@
Date: Tue, 18 Jun 2019 09:43:00 +0300 Subject: [PATCH 252/357] Expose new certificate field hasPrivateKey We can also now disable the 'private key' tab when cert doesn't have a private key. --- lemur/certificates/models.py | 10 +++++++++- lemur/certificates/schemas.py | 1 + .../static/app/angular/certificates/view/view.tpl.html | 4 ++-- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index 978acd7a..0a76cd6b 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -137,7 +137,7 @@ class Certificate(db.Model): not_before = Column(ArrowType) not_after = Column(ArrowType) - not_after_ix = Index('ix_certificates_not_after', not_after.desc()) + not_after_ix = Index("ix_certificates_not_after", not_after.desc()) date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False) @@ -337,6 +337,14 @@ class Certificate(db.Model): def revoked(cls): return case([(cls.status == "revoked", True)], else_=False) + @hybrid_property + def has_private_key(self): + return self.private_key is not None + + @has_private_key.expression + def has_private_key(cls): + return case([(cls.private_key.is_(None), True)], else_=False) + @hybrid_property def in_rotation_window(self): """ diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index bf950e70..7f3c2ac0 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -247,6 +247,7 @@ class CertificateOutputSchema(LemurOutputSchema): # Note aliasing is the first step in deprecating these fields. notify = fields.Boolean() active = fields.Boolean(attribute="notify") + has_private_key = fields.Boolean() cn = fields.String() common_name = fields.String(attribute="cn") diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index fbed4ca5..9d5c7772 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -203,10 +203,10 @@
{{ certificate.body }}
- + Private Key - +
{{ certificate.privateKey }}
From 66998332974bf2f5bc7b4fad418363cbda361da2 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Mon, 24 Jun 2019 13:10:08 -0400 Subject: [PATCH 253/357] fixing empty chain --- lemur/plugins/lemur_vault_dest/plugin.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index b6d1ed75..615d09ac 100644 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -259,16 +259,21 @@ class VaultDestinationPlugin(DestinationPlugin): secret = get_secret(client, mount, path) secret["data"][cname] = {} + if cert_chain == 'None': + chain = '' + else: + chain = cert_chain + if bundle == "Nginx": - secret["data"][cname]["crt"] = "{0}\n{1}".format(body, cert_chain) + secret["data"][cname]["crt"] = "{0}\n{1}".format(body, chain) secret["data"][cname]["key"] = private_key elif bundle == "Apache": secret["data"][cname]["crt"] = body - secret["data"][cname]["chain"] = cert_chain + secret["data"][cname]["chain"] = chain secret["data"][cname]["key"] = private_key elif bundle == "PEM": secret["data"][cname]["pem"] = "{0}\n{1}\n{2}".format( - body, cert_chain, private_key + body, chain, private_key ) else: secret["data"][cname]["crt"] = body From 55a96ba7902fd1379b81cc40fc067c93d08cced2 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Mon, 24 Jun 2019 15:10:10 -0400 Subject: [PATCH 254/357] type none --- lemur/plugins/lemur_vault_dest/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) mode change 100644 => 100755 lemur/plugins/lemur_vault_dest/plugin.py diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py old mode 100644 new mode 100755 index 615d09ac..87e1b0f4 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -259,7 +259,7 @@ class VaultDestinationPlugin(DestinationPlugin): secret = get_secret(client, mount, path) secret["data"][cname] = {} - if cert_chain == 'None': + if not cert_chain: chain = '' else: chain = cert_chain From 86a1fb41ac70800f34e1a8fe834c230337ef5401 Mon Sep 17 00:00:00 2001 From: alwaysjolley Date: Tue, 25 Jun 2019 06:56:37 -0400 Subject: [PATCH 255/357] lint fix --- lemur/plugins/lemur_vault_dest/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_vault_dest/plugin.py b/lemur/plugins/lemur_vault_dest/plugin.py index 87e1b0f4..e1715592 100755 --- a/lemur/plugins/lemur_vault_dest/plugin.py +++ b/lemur/plugins/lemur_vault_dest/plugin.py @@ -260,9 +260,9 @@ class VaultDestinationPlugin(DestinationPlugin): secret["data"][cname] = {} if not cert_chain: - chain = '' + chain = '' else: - chain = cert_chain + chain = cert_chain if bundle == "Nginx": secret["data"][cname]["crt"] = "{0}\n{1}".format(body, chain) From 0e037973b27beb8a2be2737986d4d3cdebc8f1aa Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 26 Jun 2019 10:31:58 -0700 Subject: [PATCH 256/357] Revert "Faster permalink" --- .../app/angular/certificates/view/view.js | 25 +++++-------------- 1 file changed, 6 insertions(+), 19 deletions(-) diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 065c778a..e4ae0314 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -17,7 +17,7 @@ angular.module('lemur') }); }) - .controller('CertificatesViewController', function ($q, $scope, $uibModal, $stateParams, $location, CertificateApi, CertificateService, MomentService, ngTableParams, toaster) { + .controller('CertificatesViewController', function ($q, $scope, $uibModal, $stateParams, CertificateApi, CertificateService, MomentService, ngTableParams, toaster) { $scope.filter = $stateParams; $scope.expiredText = ['Show Expired', 'Hide Expired']; $scope.expiredValue = 0; @@ -32,24 +32,11 @@ angular.module('lemur') }, { total: 0, // length of data getData: function ($defer, params) { - $scope.path = $location.path(); - // Handle Permalink clicks through a separate API - // Clicking on Permalink adds the certificate name to the URL after "certificates/", which is used to identify the click - if ($scope.path.indexOf('certificates/') > -1 && $scope.path.split('/')[2].length > 0) { - $scope.certificateName = $scope.path.split('/')[2]; - CertificateApi.one('name').one($scope.certificateName).getList() - .then(function (data) { - params.total(data.total); - $defer.resolve(data); - }); - } - else { - CertificateApi.getList(params.url()) - .then(function (data) { - params.total(data.total); - $defer.resolve(data); - }); - } + CertificateApi.getList(params.url()) + .then(function (data) { + params.total(data.total); + $defer.resolve(data); + }); } }); From 0c5a8f20394fda07e7e349954ce1607425e6823e Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Mon, 1 Jul 2019 08:35:04 -0700 Subject: [PATCH 257/357] Relax celery time limit for source syncing; Ensure metric tags are string --- lemur/common/celery.py | 2 +- lemur/plugins/lemur_aws/elb.py | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 2e87dbc3..d3cc7621 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -252,7 +252,7 @@ def sync_all_sources(): sync_source.delay(source.label) -@celery.task(soft_time_limit=3600) +@celery.task(soft_time_limit=7200) def sync_source(source): """ This celery task will sync the specified source. diff --git a/lemur/plugins/lemur_aws/elb.py b/lemur/plugins/lemur_aws/elb.py index 1ab71b65..595a3826 100644 --- a/lemur/plugins/lemur_aws/elb.py +++ b/lemur/plugins/lemur_aws/elb.py @@ -27,7 +27,7 @@ def retry_throttled(exception): raise exception except Exception as e: current_app.logger.error("ELB retry_throttled triggered", exc_info=True) - metrics.send("elb_retry", "counter", 1, metric_tags={"exception": e}) + metrics.send("elb_retry", "counter", 1, metric_tags={"exception": str(e)}) sentry.captureException() if isinstance(exception, botocore.exceptions.ClientError): @@ -135,7 +135,7 @@ def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs): "counter", 1, metric_tags={ - "error": e, + "error": str(e), "endpoint_name": endpoint_name, "endpoint_port": endpoint_port, }, @@ -159,7 +159,7 @@ def get_elbs(**kwargs): client = kwargs.pop("client") return client.describe_load_balancers(**kwargs) except Exception as e: # noqa - metrics.send("get_elbs_error", "counter", 1, metric_tags={"error": e}) + metrics.send("get_elbs_error", "counter", 1, metric_tags={"error": str(e)}) sentry.captureException() raise @@ -177,7 +177,7 @@ def get_elbs_v2(**kwargs): client = kwargs.pop("client") return client.describe_load_balancers(**kwargs) except Exception as e: # noqa - metrics.send("get_elbs_v2_error", "counter", 1, metric_tags={"error": e}) + metrics.send("get_elbs_v2_error", "counter", 1, metric_tags={"error": str(e)}) sentry.captureException() raise @@ -196,7 +196,7 @@ def describe_listeners_v2(**kwargs): return client.describe_listeners(**kwargs) except Exception as e: # noqa metrics.send( - "describe_listeners_v2_error", "counter", 1, metric_tags={"error": e} + "describe_listeners_v2_error", "counter", 1, metric_tags={"error": str(e)} ) sentry.captureException() raise @@ -224,7 +224,7 @@ def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs): metric_tags={ "load_balancer_name": load_balancer_name, "policy_names": policy_names, - "error": e, + "error": str(e), }, ) sentry.captureException( @@ -252,7 +252,7 @@ def describe_ssl_policies_v2(policy_names, **kwargs): "describe_ssl_policies_v2_error", "counter", 1, - metric_tags={"policy_names": policy_names, "error": e}, + metric_tags={"policy_names": policy_names, "error": str(e)}, ) sentry.captureException(extra={"policy_names": str(policy_names)}) raise From 0b2a5e8646a24a4bc03dbde034e18bc944dea415 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 9 Jul 2019 09:51:51 -0700 Subject: [PATCH 258/357] updating the python version --- docs/quickstart/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart/index.rst b/docs/quickstart/index.rst index adeadd7c..280bb612 100644 --- a/docs/quickstart/index.rst +++ b/docs/quickstart/index.rst @@ -12,7 +12,7 @@ Dependencies Some basic prerequisites which you'll need in order to run Lemur: * A UNIX-based operating system (we test on Ubuntu, develop on OS X) -* Python 3.5 or greater +* Python 3.7 or greater * PostgreSQL 9.4 or greater * Nginx From 8eb639e366f7579c6fc8e8e643f63a7af8ef7b19 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Tue, 9 Jul 2019 11:13:11 -0700 Subject: [PATCH 259/357] Initial LetsEncrypt / Celery docs --- Makefile | 2 +- docs/administration.rst | 26 +++-- docs/conf.py | 152 ++++++++++++------------- docs/developer/index.rst | 16 ++- docs/guide/create_dns_provider.png | Bin 0 -> 88589 bytes docs/guide/letsencrypt_authority_1.png | Bin 0 -> 135302 bytes docs/guide/letsencrypt_authority_2.png | Bin 0 -> 223344 bytes docs/guide/letsencrypt_flow.png | Bin 0 -> 90792 bytes docs/production/index.rst | 129 +++++++++++++++++++-- lemur/__init__.py | 3 +- 10 files changed, 229 insertions(+), 99 deletions(-) create mode 100644 docs/guide/create_dns_provider.png create mode 100644 docs/guide/letsencrypt_authority_1.png create mode 100644 docs/guide/letsencrypt_authority_2.png create mode 100644 docs/guide/letsencrypt_flow.png diff --git a/Makefile b/Makefile index 5af8c758..1ca94e42 100644 --- a/Makefile +++ b/Makefile @@ -36,7 +36,7 @@ endif @echo "" dev-docs: - pip install -r docs/requirements.txt + pip install -r requirements-docs.txt reset-db: @echo "--> Dropping existing 'lemur' database" diff --git a/docs/administration.rst b/docs/administration.rst index e0dd090e..491edcf1 100644 --- a/docs/administration.rst +++ b/docs/administration.rst @@ -320,7 +320,7 @@ LDAP support requires the pyldap python library, which also depends on the follo To configure the use of an LDAP server, a number of settings need to be configured in `lemur.conf.py`. Here is an example LDAP configuration stanza you can add to your config. Adjust to suit your environment of course. - + .. code-block:: python LDAP_AUTH = True @@ -718,7 +718,7 @@ The following configuration properties are required to use the CFSSL issuer plug Hashicorp Vault Source/Destination Plugin ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Lemur can import and export certificate data to and from a Hashicorp Vault secrets store. Lemur can connect to a different Vault service per source/destination. +Lemur can import and export certificate data to and from a Hashicorp Vault secrets store. Lemur can connect to a different Vault service per source/destination. .. note:: This plugin does not supersede or overlap the 3rd party Vault Issuer plugin. @@ -1090,7 +1090,9 @@ Verisign/Symantec ----------------- :Authors: - Kevin Glisson + Kevin Glisson , + Curtis Castrapel , + Hossein Shafagh :Type: Issuer :Description: @@ -1116,6 +1118,8 @@ Acme :Authors: Kevin Glisson , + Curtis Castrapel , + Hossein Shafagh , Mikhail Khodorovskiy :Type: Issuer @@ -1127,7 +1131,9 @@ Atlas ----- :Authors: - Kevin Glisson + Kevin Glisson , + Curtis Castrapel , + Hossein Shafagh :Type: Metric :Description: @@ -1138,7 +1144,9 @@ Email ----- :Authors: - Kevin Glisson + Kevin Glisson , + Curtis Castrapel , + Hossein Shafagh :Type: Notification :Description: @@ -1160,7 +1168,9 @@ AWS ---- :Authors: - Kevin Glisson + Kevin Glisson , + Curtis Castrapel , + Hossein Shafagh :Type: Source :Description: @@ -1171,7 +1181,9 @@ AWS ---- :Authors: - Kevin Glisson + Kevin Glisson , + Curtis Castrapel , + Hossein Shafagh :Type: Destination :Description: diff --git a/docs/conf.py b/docs/conf.py index dfa96543..55bd20d2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,48 +18,45 @@ from unittest.mock import MagicMock # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath("..")) # Mock packages that cannot be installed on rtd -on_rtd = os.environ.get('READTHEDOCS') == 'True' +on_rtd = os.environ.get("READTHEDOCS") == "True" if on_rtd: + class Mock(MagicMock): @classmethod def __getattr__(cls, name): return MagicMock() - MOCK_MODULES = ['ldap'] + MOCK_MODULES = ["ldap"] sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinxcontrib.autohttp.flask', - 'sphinx.ext.todo', -] +extensions = ["sphinx.ext.autodoc", "sphinxcontrib.autohttp.flask", "sphinx.ext.todo"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'lemur' -copyright = u'2018, Netflix Inc.' +project = u"lemur" +copyright = u"2018, Netflix Inc." # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the @@ -74,185 +71,180 @@ version = release = about["__version__"] # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # -- Options for HTML output ---------------------------------------------- # on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org -on_rtd = os.environ.get('READTHEDOCS', None) == 'True' +on_rtd = os.environ.get("READTHEDOCS", None) == "True" if not on_rtd: # only import and set the theme if we're building docs locally import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' + + html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'lemurdoc' +htmlhelp_basename = "lemurdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # The paper size ('letterpaper' or 'a4paper'). + #'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + #'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'lemur.tex', u'Lemur Documentation', - u'Kevin Glisson', 'manual'), + ("index", "lemur.tex", u"Lemur Documentation", u"Netflix Security", "manual") ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'Lemur', u'Lemur Documentation', - [u'Kevin Glisson'], 1) -] +man_pages = [("index", "Lemur", u"Lemur Documentation", [u"Netflix Security"], 1)] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -261,19 +253,25 @@ man_pages = [ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'Lemur', u'Lemur Documentation', - u'Kevin Glisson', 'Lemur', 'SSL Certificate Management', - 'Miscellaneous'), + ( + "index", + "Lemur", + u"Lemur Documentation", + u"Netflix Security", + "Lemur", + "SSL Certificate Management", + "Miscellaneous", + ) ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False diff --git a/docs/developer/index.rst b/docs/developer/index.rst index 4c46566a..0033c3f4 100644 --- a/docs/developer/index.rst +++ b/docs/developer/index.rst @@ -22,12 +22,18 @@ Once you've got all that, the rest is simple: # If you have a fork, you'll want to clone it instead git clone git://github.com/netflix/lemur.git - # Create a python virtualenv - mkvirtualenv lemur + # Create and activate python virtualenv from within the lemur repo + python3 -m venv env + . env/bin/activate + + # Install doc requirements - # Make the magic happen make dev-docs + # Make the docs + cd docs + make html + Running ``make dev-docs`` will install the basic requirements to get Sphinx running. @@ -58,7 +64,7 @@ Once you've got all that, the rest is simple: git clone git://github.com/lemur/lemur.git # Create a python virtualenv - mkvirtualenv lemur + python3 -m venv env # Make the magic happen make @@ -135,7 +141,7 @@ The test suite consists of multiple parts, testing both the Python and JavaScrip make test -If you only need to run the Python tests, you can do so with ``make test-python``, as well as ``test-js`` for the JavaScript tests. +If you only need to run the Python tests, you can do so with ``make test-python``, as well as ``make test-js`` for the JavaScript tests. You'll notice that the test suite is structured based on where the code lives, and strongly encourages using the mock library to drive more accurate individual tests. diff --git a/docs/guide/create_dns_provider.png b/docs/guide/create_dns_provider.png new file mode 100644 index 0000000000000000000000000000000000000000..71d5a0d36684fd55149dd9bee6c8751cdefbf5ef GIT binary patch literal 88589 zcmeFZXH=7G*EOmjAkr+94bqe-Gs3^UK-jR;<9uScdLWj^p z4&X&9$p5XuWjvB?cMFYoNV<~ zpIy5q_3fLHr6aQutwNhOfP-_`z*Q{5=er^m7geIMSYCZAaB)$>Jw;7o?%|f*$=-0< zz_65P+hv}Pif-@|WCraoaMOEs-ea11g?i!STU>LK?7Rx~r{TI=(Zwi`tSHE-^~aL^ zUN9yLogK8-0zi{&wUoU|iKo3zpI#-%AhPNm75hn^Kkj2xE(2+vi5?5TWvM2Sgbsy& zgE?IJXOQ*m)Z$f6Wz!25I`+40Z|XL`Q+$fOzF%Sl@w*;)9?zzTG-Ew^v(@xO>6dU| z^NKC3$6F!vo0G%3!0N;&5+eiWPpP+Khk8$6P~x7{Ok3w_Iit$9HuB$O_%7!D@I=49 z%5m#*!?cgoU#}q-bi=xz|aJpaNpMZHUo}M69 zuYN43-zH{sX4+5#3MCX!n+va5x)pDYXizdpPO@pxqt3(hbMepXqP^Z%o0696ef_30 zC)k2So$2)sGX&=ZM@W>10XwPc$Vqt99!GWD(t1UF$BtKt6iz?mSQ<`EDneUH%lO4Q z&w=4SqD$^jrXB?C?);f_)!%O&e%3r(&u15<@PWRT=_00re!~8sJi@~Eo?@tgB#|OF zBm3Ri@DVE!wUhX_BG(@Ve38>SNo20QXNNwYQFm3bl9QaiZ2u~%U!0+`C|%!{Yl9c% z)3h*vRR~ZwMsh$N;YchyE`D48u-F!m)N5R2%xX0mKC?bUU5K3UUpN;o1J_Lsn!NhV zhT3X1dv;u3dHWkNxsk5X?~j3cKlid=-X5%*(7Xsu82}-wk1XX(J%p%VRHTxeJD(Xd zYJYoPaTIy8{|B7y=C$4(YZeikF3Ct#z&ld9rz1|2}cXFqy%F=mY+0 z0&J(aRH#4TaHV6V_xdTZqW4)psg3|eGj3>Tfs+QnONk?vd`u;L^_rFF4$(Bf@Fdye?=et?r zUj|bwPhxjYz~>_GLN6z`7k+tHAOznxNXlk`;b@;}1tom>HNZFE_1v6bBVr2*^tEeB z*VLXrHGFq{XOS$4ws*SgKx0^1sK!oL=|e&R`^(?gZhiUi&60Q>aun5bP%r%pyA5q)e*^9IG4xlh#j;OZ3udnZMbr9LeLW|L? zcACno+e+82-}u7|dl^ZssxNwrndm0_`)hxAc|~;Nm{QP305#I1*a zxYOUCL%0{||J>_;;`M)K-2ddn|EkshpVyGm`i(}nd2Sx<%$sL7$f%j)4Tg=-+7D93!V{eYW*Y8(mFbc6VIju#kL}=n7bsWn1l0Ao-5{BS{|RKSJ6k z`s-9kx25)3WQ@bEpmnDlbUE7rd3n&s=5@5Che}h#Ul{o9Er9G(m2HF}-=**T5vr8x zZ&sK!3^GjC;gS*(9`YMk>{ISJ>FS6c@8o8O46XJ?*9kPM+Kd<@SpJB!%bB;Oyw*Q) z^6&&%m8ZCjjsT_K?dLjP6Mp<108>*}uQlN~*R~PPZx@UCKWpM2pnThnsKsj|q16{f zx;vdBX#PBEd$N}r$s*&^Ul$txP^HiTnx2jOX_>bB(e&pc@9u*?f;#){U7|L>-Md*l z!ZZ4{c51V2{?&8~m@zbc2Vv*?w_dR*~-alJER+_ZpT6 zBjkozSgUSTxlQ+n>HQ2-P$>W#-IXoaE&In+?ULr#ZGZPx8hwEo@3ECaB_1;{=Y6N_2ro#wudll zpxTQMfhQzY=1tlXZtB1-?Y2;n zWhGtQg_9q#K**3I;~L^WF7vW0?P%^29EcYrWZYqUyIjEG%Z|X~=ix#2kQ6P^=BB`7 zXF?$yy7DYU+&K9us2=o>}1H;1+> z{}`-mVQIIef=_EIELz3~TOm1$z?UIQ?E!V-NdRMe{=KvJ$ns9uVhr!e}gGD6t`#UxZbJ$5igY3zmO0X=Kz}L z^5>A=oFO!euH;L9Uc=j;-Vi!ZyBWyXNlg!$4=v^wQr`ptUbK!MjrSty-i&o$$^{vzV&a*u4G4&FH+Ow z0asAdTIA-^NLkfb>t6zzsa^~Fi$N;z=zC!yr-p?^%O|UWq-vglpKu15;kQQB<%M1w zWicZE4vY_ZJbx1h5!#$MX>S5EP6Q|UqF@8RweD{$hlO|Y3t8oVG%VLwDN8dsW)k11 zWi@$8sNWEr>C@$Jl2(ZgsOch#cz%HPBZJ0@Um%m&+Ks|DMe6zmf!};$3qCY;t@K1~ zeCAP&hx&IAQl3z3&3zVqtCDUn5_$m;3EbhGO>2I%E|m1hz>M_@+*cTaqkh1q z+*@a!3#f+J@6I)VTKkbx95R`Qk&p>y+kFpi3IFtly4BkyHUp-hWB1uJg+=38^swOkCjJvWh(uK zd&B1&JZwp7#nOtZhb{X>Fzl1yEsupF{-n?05+);7Ol&P{X zHBrTELR_fakaJJ3|6BHUnLdas^4@VOwgO*;RG#IwA30}PYdBAp3i=9v=b0)qPGo8G zd-?PH_^^)+*uOJdXA$M-H)w!AOQj6irGhEkdd47Sx2Y~WPtP)^hZw~OI{ccgG;bO_ zq=Qse=vW7IeQ}66-kUIpq+xqtRBlKy199Lb>IlM8o9&(+tkv2>=uI1(?MHFB-mVIV zTgoDbiDL;9L(%=Kf}`JG-*Xzx?K}&@N4 zI!#g<)`tvD?kIY9!Eta-ns7D40MPo~Fr{eIxE>;ktuyt&I#|dAdy7@mlez1+^aD?b zTa!jXG*f_8IcZv|u(golC0h0DXhN?_MrHwZNwdJerkKIo;*IdzEn#;H>tn;2%+X!V zwgE<1&z8gQ93JqnwIPKqTMd^&gVGkfM=q;EXftU?1CA7~whT<$+`kD}HyWDNI|`~q z(?^i~+t$1+K;K`RCxw9GjEXfutN8*ZLfXH-vLaX}G3AT7-4UA_J9_!Y#Hom!AY*-f zeWK{u7hMLqo+}T>3)Kt)wz{M2f3=}C4(@VU9N(Sh>36B_*x@T-4<;gXy$QIzAYE?l zOiug3BtP+s6;-zMA%)Fo@9rD4jt)++D*2m8lIEJv|57oT--t}(IvydzIdM9~~4lPTos3)VWdxfJ?I#(?bQ(EBW zHbPFP)~1_i#Ov0-Z?;klB(-+D!$##Mpq9hC7M+(kIA^+ia{R_a(e^hb+F5qzN82$y znrqmyNFak2(`3%T3@F4jS0~9tZce_+Lytj_-?MNrYxd zLP*!*GZO?zZqdgj^lH4EDw`@$Nc{fpVD)g2zgh$4&SI(D+lkxrbegLV;BDe_%&HtP z=X|T2&zXU>b*KUlCYQeyzc)0lBUiuPLjO|{85EOfAWHi~HWT=}`;sGu>Y zyXL;w29dM|?n=RH=^)JWd9KV-o?Vm;t%eCh7NJozEGhc->T0&4Y?iz|Wk~PL%uIm{ z3AZGuxr9cHt8}5=z+L?^AE#3N7U!2a6nRHa2(lw0&EW--IjL<3p%onQuGFZ!k(O7N z{o&$J*~l&Vk-=pCiV-fBlZI!#@1(1L2jqkD1^%T`QmVLRr)J~v`^ZU%Ez1O*t^jKt zRDR?*)%uOU28(KM$&UJ8rR!fMigG@$H|5q%;ZwF^6_8Q2UOt?NRV-ojw5Z?mKU?m> zZXj~P0pyL&Q-ecP?+V1%ZEbMdZYYM`dpsczL>)8}Bf!FxW|mRHJBv88ZT}E_?$hWy z9FdT%Cpj{6Qty^?U2v<7f-Lq~GJXktZ1|>vAs5?;Vy(lmJ~ZSb`Mq{Ob9)90UDc}g z{8aMx6WS~W0VW2jHEJR0}*CjRJF4ZHRUA$cO<& zg%H@H!_E0yes3o`n;fUd>1NCueoB`V`-LtvKM}deYGQ^Rj4!7axi6lJxX$RFc^#$- zC7M`;YRxsFm!*CKIl6Bk`o3Noa_fNBWu~i2u;q)r3=k?w%D69d{Ik7P{+AhBL0R6z zBl<<^kdFt}tw>V-QE3%)2U@6a&#VE}FY6JzpX>(jw5-~h?P;pyr@AkZVq(pRMFvsP5Fsq3s;fJki zM$3IgvF<@T#FYZ9-!@iZh1d4qNY^K>uuRjjSw_*US>M3n&P|G$A%~A*9&kUY&SY_Y)L{{t*yx7bOkaiaCyR`n-z(f zwb1hP>Yc5z0%Mz_x{=>!gC8m{j$!cp>L1+fm_dkgk$%zg(|8*LA5*VV`g3ZQpnN(R+ z4H7W=$KVC-I|_*Jc#fPbMU|nA9$&UORk>9B#fJVKxHbCJgavww%KmKpMiv?P=vjJB z*g0ciH6D7FBZnFC&J#nK^62N#;ZAmZE%C!F^BZAg=)~DNK)^}Fvwvwr*r!Pzvd&Bh zuXJ>}qNkxCr-VEG30xXx-uZ4~*J<%8QJj?a0sDD4<^||~_V@bmLZ;=w^?0(4zMRlQMY%%XEMd>yFB>9nenlmS0)F9l zl#vEDMVYKmMJfwsW48rCXKiDz-=SLoadbY~)t6nDIiWJ@#uXMq)!>t2mzPGzQ(s{t zeX;(`Cs_kuDa30egQhk6ezcBNx@;I zL*zs)Nb>zo=_*Usu%D|}GcKV3J!to6e;c<(^x|8vDnS!;Pi{buS~|n35HoBs-)8HI z)305!85@WXmvdyvTC&DCjJ&*mUJ&UI515tlZ`_$(CtWlBZnGjf&Y=C}KkUqLPr`N> z%d-#WWUP~6ch&t8U~m0qWzp@z);gfgcM<1BEsHO{qh;(C#Y1p6sF2$=`RBJ99UJH7 z9s1kqQ%ViM>Z0FG8k~(2)k~{^v*T7s8q`)snoR+)tw$S*K7KK6*%hT zK1jEYSVdAYGK+mL*)=J%cmzc#6Mw31KTKG(nCz%FZyT=DiNE>WdD9kuze+>&dxI4@S(?_MY|RPq@jJ$0~-d zrlHMF(xgAoq_geVIjFsS#lf)@VzhMsAf3@K@U27o@?ZaGzWqf!9@Kbrdg!M;@Sj(xAA#4mi`5sClF7 z=4sC%s@-~hy9B?+H2iia?TObKm82SI+@8e^73t7G8M>`v(jr6k*t^CO-URM(nQwGE ztR)}S%s8oB`->{Q!!8nbUu1n!?kyIM%iV$iou>1DtOXv)c>_UcYp$(xLZiMc4X_~S z+#3TtISCX<3mf~7PJXjxM&+8fR}`z32MBxrMw_W

34*(-H4tKSN%$JcFgWy4Oo( z`*ZAkwz7_BLz^%vFYQV6VL zywOb-E*Emn9K$NFz9fzy&jjj-V&|;q^q4Fs`=_QsLa#1hIaNYp?nwusWw;_1q2%V; zx0|Lz58AMM$rMPdWk(#eX({g8=L}~1*1Ya^Rn`EDqxK^8EM|QJ1Uy3&0SIvVg!Ay( zs|FA1D2AX1lPXQBJL&9>+4N0dseUr&i-TqkbG+uj{&7mmKUK`Xx!Y=3huyu~h$ zXk}B8E!Fb%#{u=pYfqQ+mE${4e`@I0ztl6SK{TB)-e(o9*{yH%l^U0XdjVLj{LVHG z*Vpj$9ic&E(qqGl_~5hC0v+fAU}S(m2y6UJ(H@{{$~Zq3X73VdzRWx-sNp1lV?We& zu~l3-OdeXpBzo@6p9`@5l5SXKF`KSat)9#$q`^|_hZxR^vE8u+U!J%GO<}7Lr&~So z#bA|S`__EyN^ypJigD%qO9ExI?CyYvW-$1M$%izyZ&JCse%NC)Wd&Z4Z%<*6Vmal6 zVTdw8eD6{TpXgb7H@^S?7*`CFmsX!2qfC1aNEPl2cgVdW@S+x{*9-jW6#I=W=07$6wGTCL=J&ON8nw8c`v_m-V<#{_5>n-19Kn$Bv zRNNbIgRYO}Y6KEAgdQAQCkm-P!XMQ7A@C|$OcqsIl6@>(I}J{g(vxuW0+3g}>Krn{ zZeDMCM|yn0zvMjUza_LwGrrAB(eXK7{YVJUi_`_L{nq~ba&e5u(TtAehP>S7olEx@b3JdX z%sK{>)xMkdGQE;p+}QPB@)FmpbI=O#KgGr5TGtXdWp18;E-JPr5KJgC z+OY(>Y;MC}{;a7zs?<0uGHM6+=H5^KqPMhb0BE6{34*GTpQ~Cj5mC{Gwro8TE1)+_ zA?OAgv5+i9Kbb84U6$NtE=Gq@En_8LPJ zg_>}!!tHreyKPU!ks6653VMPR@l9P1rNMqIErph{edPq1swyyygK`OqhXzJ} zvQTf0yGrK1q?=E&Q}><3fP2lf0_)5o5}sgp>xJ_-CU2kR`|pn$J^4wJZgfbPBdYfe zv)F-DJqdG+IsHD$U8ceDR^RftNWZDq9)OYT4d+AddoO9YZWqo5U!G(ASrmg$&8q@9 z*4FsbmxC=;Em>l`&ki?YMDX804xi?_ih^QU!&DfTwAJT!K$ZUCQ+uV0}%5LddbYAiQy-wanG=!NU&D6({cgSH`Xka*ANL#_X+)&6g;SjZD6 z+Hos>23V}>(%P)m7ANF#?t@_V3)F?VFHQ`(A9%cq+Q<|Ms0GFk%y^Tz`@9_KdM(dF zR`f#;dLxb~3O3*1V#1_q=MGzF0k=l_ObVah7i)f8NZLW9sm3}R{Xm?4MV*AAnb_g) zdGB@w(<{fZhxGyNtNhKP#FFmnM{+t&u`P_?bP>~5zwK-X8*!nk%%SwNaE`5YMY}S@ zfOg&fEFkCB+|N7r*tO>pI5Q;NY4K+WBAU?(RTe&zsE?HPPTp@FffJ&Dwy2g#n~)Ly zzz8p<@?kAW>Nq_PRT(78A~Jo7QR*weB(b6C-N9YeMMeiHZIC8Fi-M|}FJ2$K;nFDqeU~z>Rt8?v zZ+|xsr`U8M8@knXU6f{B=#f>vVQ~!BgP$2S@9&x@*MfPzvxLGlxS7`u$OzMmttnQb zMm5o{vvq2DAM!$k0vdeR`|n`GDrbWd&@FmIRJl54~u;0a)oa9tu{Rf~P!vszD5jo3M>QQb0mE|`27$Q0RajyVuyT6w z6FYnIwd~|*YU!X>2*0wGH_3o@la_qw50JI6XUpN2Y{v69E$@EZ9WT-_IXjszjVYFJ zeT7DPr)pb5gVXT7lNnFETNl>ijZ)H@pBh(N?S7xH2=Kng%e%)58T!2m-D9Km*_{)I z=6X5|7xZO*%b|m$2&R<6GnpeBbo27Z+eW^Ly#C3nrlpm#))#ATNF#srl>?_oEDs(! zv!WNum!^`-gcbQc$dM(_Y$s#rP^ zGHoDb;bF?USyWkQ_EUw+w$U}8QmeWci%?TG7fXCy;IS7-&gpA|Wqx5mj0ZC(9RFA= zv5z0x;mYH2NHR0IpAA^5NYH!qB*4CmGlNZMdOWbAUOAx*D)NM~z( zqg}Ps6ACNUHW~Ixh-?Eh9d6n8{>_MFpu*sx_-Vp_uejBy|Y%&Ry`AA_Bhl z7MV^Id!a9B)GIO0DuuX50|Qll{6=Lvd>YkVF2ilUV%ulYbx!RSM$8Op2Z(@>`UT7%`CU&0>nvR z!ODK-Os{Hj_Nh*}PilsKpmUVg-S4#@(Cj!FuK?B*l5X5%Cby^`Ddi4A*S%#MP@&4u zD2ZVN3W;;3St--uisH+FTHJ z+@Yl&VCRa1!H$QP{M)k7;2b`e>8e`E5cJ$$G4!*;(hS2!j4Z1J2R-eg_Z7pWtO%OavgmxZ% z6;-L8{JC6>$*65wVWk6ar=zt`8ZVU`i4Z*a($_{WvQd(=6>Nj`V4t$9|Hdk>|NHCx zMEj?*cnq1jj`k9AF1beG>5;FDMgsY(-+gUSg>qREI@prD$=XBJy^WgsuVMhk+8Vq;apt;n`7rs|qLCk`~*DEyf5GjtR*6L_*nyX-*TfUx!}*TVztqilcN%(UW}?<z^?@|=@Z zaev$G@gkiz%WBf$laX7OP2JSSQq_37T&p_FQV`k~QxIWrode3p=M?#BQfVP3Yd$0_ z!l*oTK$WeNt7&01JA881Z_8XJxYB`j+iS=!Y*!3X$3Qi>J{TZklLA(H+BOr&CVDPE z+q^xAXgM(zrSKig&9-V1q5$Hj?BY{yruRA+-8zk>4mh2j7yeAzzN1oisGiElA8@#y z3{|@yVuhpGNhDuW9c&2k_)-31rz5yUy|#z4_j@#{>#ob>YetHTHG^WqOvIV^+K|}Tr`HIXXtIzTMlUz0&i{%}@R7Ce<(&`*YfVC*Zb(|wtt;}vBx z=%wOuL8%Jxg4=$LFD>sk2M!$%#J~Cv_6L!Nx6it)UL#VY=2{$CK8&#m-Nj5g;jWbR z&U18NikCVqtsE)i~WiXjG^6!Z>czOq)ooD3qY?O{YCjp-F$>g*iWG{hVAL1YdUX+ZqJ~ zKW&Npmadi|!4iV|Q0b^tZJui7@O_=M(`D?np}6^o)#lYlhou!6NzFQQP2eVa{TQag|IU>L15hD*HBj% z8yUITH=5p=g|E19rrh=33PDYk=bXLmEgkZtYIMFm#d_B$$CM~2?29YMfAK>RaE)MA z{2R#XJzv8boc-djTFt-~pQlwsDQmsP2(0=;AVIip+)Q|+U$Lc#;ruLM**GM6n{Ka@ zO~H$ILCb76>(k3}kcUqJqYTvL(Y>F#j#hmUZ51z$nT$O>rm>%hKnnWe$ybP_8$z;ahKWrtPZCRmuo+C(AN*o?~j}fN^({tB16L% z;4!>r+jEtvlT>LudI&%B$9NN$W}{u_<+vqCicm@j4trj;`i1{o>O){_0f>BcJNRum ztfC$X1!0t1!{n{QK^9{{3)Og~zM7gdD27;|=Iz^qaWJ06L29@uxLZ~bYc0W5g>_5$ z#*e;<+>Y_uSS_-gj}dV>X)>0FUQMhxiQh{g=wbg);p>w`SZ4g5Zjri?yv`5k(0#+Q z$$^!ha{Pq`r7-uaa z+vR>ueRMdp2%V)HuXOQG^fiJvSW{R5*No1#3S#B35p}vzo3|{?n%wQ}ElE-Uc5QEZ zx0oUg!Ue+!>k$F1ab3x=72cQ2b4M|JW0Cc#wUYJUOL7wJOonel6XC!d;Woc4CRU-E zj%|liRp^UgRHeTbx8`2gBI1`{uX?hD@=kGK@yprTJ<9faB?%90uU#R%ijU<(LR(9h zBC0kz5P)XRCdCdkGk~uqA9n8iVoYpSeXy*F@vvX znhyt2a`UmV)vJ#3_VX#4Bq!tNZKW2a>B2Q7SU5h)njh-fa0Mas+@~Jr)5jhqUjw*E zKz+?GX+1k41(>n(sV4Gbw&t9fI_Yj;BPzAj}C6I%RvUgSk0y6&{R zN|#AlkqFwYyR3U?-njEr6eZ#Q3e7PQu=j;lT3R)$delQDYAI7>!y7&dG;ea1;Rx$! za4|kTCd^go_*Mr+>iB6NSxhp_%*ThVfV?*b113}g@p^pvb=$pf(|U{1iiwEHBKwtIAiBO z*uRF8lztHh1-_;dZhyIX;pLUag?pyRc&1lO929+Zy6FBJTQ%!;j0yxaoblP)sDOvY zD?nY8-w8v&!3YM4AtenblPTevzkM%dV!@}MzLO)J5MSu!&w5jVssxYgTVs-w&U1$^ z;=);Yab#esyF5{JzL$B98&d{nj(&|rx7Bx{=%3?&( zW%^hea^D(`82Mr&5QNPdOcS&yd|Bh%=>@&9REJo9Vy-EncX~yF$$Gd*M^$kqjP8(} z1d48*F9FY4>Ob^RvlRO@G9o>r$=?*WF)PxoYc#nh89mN}=gJ?l{r2x21glu*36wq> zxI?uQF&@=FMZUM8C=w3r5-SKPb&4GMR;_D#SF);!THezW%s3dBay)3e=~ba$)GFpQ zH+KOv>HR9974i>gR$_;ce87K{`D*DDq!wi2!f#SlUwdL9`;2$=YhhQBUeAk8>vv>( zkR89p)?ji8D`CP}94tS3psuMY`&UsICrg$0R(W;XaIUE>4vY3r&Y>Vl=5HYm zG5}{G`)SojUSszxMNl}?D(42&q+jwG!_n5>ux_?Phtjah6iE9r)iGe41Lrw_%b~FE>gn27W(Jh+CBFD@bKo@i`^hot_B0H8pI0lr zzEwPdl~xRO3Yqvrsin$grec36SMK$qJ5l`xCx)>{$l&^H)>8wWkg_Js{iE zNGWDKoaqQ^3OlVu#yh~J*Xd)8l0FL&DE=s zh7$Z+7E$zkhBHR=73{As&u!X%n(g`K_`D*_vCpmM>YW%4Tf~(0!nM!2%*6yf!16;+ zH1ijmtV}Ffd}id^vnBR2CyS)wv*zz4u1J3&GPx zP0TYuuO;cqRLIb_FE@Rb#t^X7xFHhnFnX+SKS;93&}_Sc1`j5H>CeAlHXu9Zh=FtqNxQ<`Mzmd<6G@TmUn>`71f9X zicJ<8WqJVaWU_`QJVdKbZX6 zWGdz|E#kO5j&CqYZ#HSB%M#m~*l&x4~{WPtZ4)cRgW~J~|W~c@iSju(D zrB%KWix8nq^3Q`DZ zEtqGL>L_%>`N$um?G|R9#Mt_-jcdq8(pKz^t$C^1cmz3@l~=Ml^bdOq2QCVl*LzZZ zzUU6(D2zD6QBg|W$Oc|8KiBQRS@&9aLmDukRnTpVs_ysR_$rir*e7DNw-QF3( zZUJFyxe-Q^zAgUdm*<)LrhiylVFu%>cHQfr9ZCEqPg{MqkLcCihxDqsBl%p z%b0dtt>HuC%7d^+lt?FQtKgL)A7XjaF$BS5)T&)@eVW1Q-Lw(wtH-ZbPrDwvwAp>u z&)z^hTe-x_ueWN3aHIe7$#8t(S;O_{k=pq&8kO#EfWmH1NY1>DIULSvlWHWEByjxu zKU%~+z?iD8i2;Hed3_~{O%qe6S?84l6uJaJ-7j1(Dr%|)JFxmikn=Lm3^tL%V~>x+ zlKwk0r4^lCblTAHtsE8qb;mjU@Tqp%Kyq`l0YX0XB5lBCE^{E|16!j%ymUn&L(;{2 znDp52r6}#LJ#>!lgb)4nhS)22VOjs(ml%)1Du&ZeZ$MeFr0tjEbm-)BST3T&BEfXKGVT@K+vI47|da5dsDFLRCr*VBsV5RLir` zSI%a2?VXuoV>i$;1M@xbM*p)VexoW*9fd0oxL>o$Ui+Gm3vW-CUti39{*Y=EM>GR| zd#i4G;#${eUUfeQ4L)bB=HoGWejkZYgENd=AOR!_7<_;IawAreejpwg`B_Gqa+?;_ zPOw6*%KR3Z>F`eNK%QB3-w+^9Jy`3L-n7JkY5Xjq5@n`!#U0y0um{rBt>?GKl#1mk zvpNhAUK3rrzL!v!k%6xv8I5oxcim1uUAou;u3D`6(eKnT#2w@~SZa)`G%_7sRjp_aM*$cMl-(PF5VTt?u{G>#_an1Gh!l=(se zU9UQ6zx9PdSg6vh;c(7AnV=SOY;{g7>cNKQOH6~pqOZ1Wx}cZ#dkfYTLzY3nXU{&g z@2_IKMmn%FDi6SY;{cU@2D)dx>EgosQ!=3!%@?OVVsO9Z!3q%k^h+&ZL(G%>5R~iI7^?rOd6Ck@Cl$+AX%K zu>nSjiu6#7eXS|o&&NCYh|kY>qL0q}9A{<6{X?_|otO;+#5AH3>at5I22~NTDr}qlYZ6d>`U>9fJ zoUbpL;0pL;nFgj)kHHf+C%nyo9*F9nxyVjIQtKm7K{!k~Adg)D5K-^Fk0FgZD$;22 zdOO|b5MI=@6&E8j2`Eep4A1P!#z8Us=NVCo0hc$784a>)kV%z zW!!1&?ue5x|1Z=UOwEoG&~K8*Gy>ZGL5Sg9r|DAt%JiWu$q_x(CUb#9an=9(w*cy0 zoo7AZKa`y_P-pu+@i8(oJ8Lf0oew%RZwq_A>$7yY5|oWA?VfIK_NsO1)9$MYzxuWT zO@-OXM@nRt;@{>G90UhnBfKh>db)+FpY%kx60_$qvz5yDQ?*opkv5a~QnD2b-|zyahM`nbl|rFP9r{GL}ARMOP`Try;R zD_%_S*u$(YUbJ?LIWWxIf3DAedwuwJBQ|2~QaiCh5|L_cp>;{{b$Ycia+@%Oq)@ZU zZGk{rGLK(A6$mlD(ehD~4poyPqq>;hP+qYn9;aLHNba=oOWN>geZUqPYpi{VGcM64 zR`t{hce{eXtV7&GM^}c@vM((fMuh`Iv-bjoonWT) z_?E5<_N`A-lMMG%GXjFD6P-h^3oQN!?!5~-+IXdxGv$&35o}rjY`z|18Q)fJK|c_u zB}pDk1Cv~~)rzVCGq80@yvFKCNyi;j)-dEW55zE{NIcy%QIRC#=D2S`9Nro%?Nq<; zWFJoeIs{&J`0Bvi1$>!vNS6uZ_mf8db9T`MqVFH?%C_2hPcZ$+1$aT?9te|koahn- zGW|9F-h7I0>Om_$ zafq~lNT=Cm1s$dmvl|LKPW)lpUMJ#;BWA_aI&o}3Qd92`?2G;x0mR~Ihz3W8{LXPR z!cmU9UMU73)g8r}5&z8Lmgyo?a;KZ+K&mh}NyupWLU-McRR3pWgUHS|d^($zVH}IB|4V1* zFHUe-40J4Rw%_f;)2P@?=3xx1MRBn8<5U9d#t(uu+iZ|xr6flvizC^Kdj0J7;oMKYHgR~v>de5r zPGBcdf~*cJ2Hj81|BjB}s06Rb5@S`~v^mDF)=tbI^Pj>9$H3BpbYD16f9LQ;Nm9-~ z5|@IttZf_~Pm&vFFnv7_zTnxkN+S66Hnbx56=v(DyBX{z)up}H6Lts~-1UqXEAPrc zw-PDCR2|O|WeylqLQHz_q_edSCFpZO(h(xF{_qdJJ-4hR-8jG(=ix>w#Ld+mR~2+0 zN8GOHPtqQwB{sSql9)FnTwR`#HC7Uz(fkKH0v8b@u9#OdtFy{O5mR)I%LiG4OwW&6 zUK9gmCxV}Bvt~aQ-Hq@Xn(7FIw4uM&)S5mkMO!1rsZ$3FKPPI@XNXetfQP>kEFBr{ ztpi}*#p5>8MJ_ZY5N4zk?kw}ssYV6N+J6fEzKIL3Rj|A`*sWS#Iuel*iS+Sr<0lyU>=Mi5M3=5KSqjPDL}#h|o_ zRjEGLna`w@({#O4WVI#YI2hj%s}eOmw3Yv~Hzw83hh@40w^3F5lGY+<*Vounn8Qu+ z>{lYUZ9nJ<{ony$9-zVtv&dC?fH?~+Uq67POb9qrxjIE=5ynn+P!A^0R79rtqA+S# zDr?xFY20q1D9t8^Wpcu``G_7r#7*^u(`|_tHC-vu-AWasjDvf&* zubtwGDQNIOc6b@hna0P2KU)}8dR=b3%VL@yXw4vewm3LkCrZ}Pa%+Ph#OAZGcJ{T< zw^bJ%Wajo9q5u`?SWgg&+1uK$mr>1Q`RcoKG}GMd#^dCDP&%@h2#Vvk-yZKxXnxy4 zP~*E#Q0I%AW4BcPiAYL5H#6f2h!mK}t+f$-kXWJF{QDbIQF&fZkLmSVVQ@kpYska( zA$ped$DRXOvi>%DFXy!~BvPxv>D+kiDE>3)_i^)jum^R}yd6&`Uj#)-*5?9i=cS+# zViEg8SUyO$n;vRtSUr{#^TfKICh7cG_^2X1iB_(rgiP*ex#w`pHa3X$zIq;YgY;qI z6JsTTB%HrqPxQLA&ego#tx%z^WD%iE%}Tqoc}NO-aD+xY<(A2s;Gz~A|ry>0FkDIA|Oo#g;10#C7@CwU8zC{Rk4G# zKuQ*yX}S^MK<;wD>)Vi&|Kse&_hrWRH7<)d;>~me&1v<_dcMAE7^AkWB{lLBA zGusgz@i^pXu(aC?J`gVV$((F&KKl7eQs?x$7WcUK>*SODJvN=YlDAYz#Tbu=^n=1C zHR(~;K6?#j-|u%D>At6s;bvZ7*)UNzq@mYmD{K|=Dqnw#A?HC!DiPdxZ)`PpuK?Mk z+$Y+*AZo`}agSEOyd=Wl>fFXk7N7O9iwk{sr_{PVIigNXjY`6b$aTdanYCmz34Jyh zuCrJ6xH;&H`L8q@i4=9vt?ACz_(%F8u@bLvs)1J%H}f6U>1UVbpSMy;wukC?hrYey zx)N2nD_9rww&d8So-e7&+#0T~rAxT_R^Ju_N$i7pvthnB4hr1LBJK~`U%c>`LydLO zY$OAuXI1-&QAma*70tIF;;g% zHRV=-+O*eG4?_7AY)g+-#ywJxOn(yoc+M?XFO$J|YT=#n8F|aG%wL(X(0p)==*5hV zsP3l~3C6XA=!{H3Gh{gPGIxcNLtRh|p?s=bIgN(P`|w0W!7;=Q;!vvN78_vIynY2; zy$pbLT4Z13u&7Cp7Pb$nO?oViBd?s{90pr}SgmvVY`QpF>VlU7TT4FtwGt!2E-;&I zDbel_ChYeQkHZ-{jYUg>8ebmvp+B(M3U#urS~d3NYuH|P$nYfjpq-5V5|TI&h|-uG z_*oO$Z}?2?-#6biiDK7T%jMZ$uMHJf3`4w7Y~Kt$&HH}>@z1RHD0xP`l-$mrKCn+I zdEn0{07`{hvhNCiC;ym_XdKAlEa+79T*`jWBYpEq?35W0A{K0Z8}qTCU88h?R_!{% zL|e*uHjRjAP&o*T+=t3wGN1F|Wekdpa8>`E!_umBY?{l{gAI#qe<=@BS-~amiM)~_j`CIk;r`rAhD;0qW zTQuh|Oj57NUry0~7&_nBK91*UqUtlwff_0r{U9^;7~H&-VN`TjuYv{;gI1Uax zms|o2?-D_U&x`7k!RMlP=QF8+reR_g7PyKh|eD!zJ~44q+s znW%TaIj(YUl=*}){rnbfskp`@-@WeS6CjiK~Cb9sa&0Uwo^BS*aaKd}cOx zTvM4N8tz?tQ#5f7{PfV04I4;E^+4bg?qblI>l{X8HrM>z#xnO?s(8QhkM8n7(B-EO z>1VNZWCTO}E1CW=W?=SDUno&)wKk9?O58=P z90M$TAj1iu!)6QIf$8qPy*WQ%rTY5{{9%*+@wamq8K_>JRIl#Sax!Ti0L87r`^`h= zo({3`D*86oWY%pQPVyLTVcy3v{-?=!+o*BZ8M|937Xbx$I5i-n>i=MmeD?LlV2>lV zwE4C_;nII9)8C9umDNKdc(F0O^qo>D@UwHkW50d!1I_b70|*Snp0i{5!D9aF#|7R! z46vG}i#z=VqyFb{j|&1`_M@Qm(?8nZf4$~{FrYIdYRdKr?)=fyQi){(&?IaTwm&?{ z|NP)}`T>Blb+K=s=-{7x`$v268Ns`8Ki<55?qCEE=JppR4^9|SmS3XouuC1sbS5;`fJIrPNK&_%! z;CRIG(gSQ87})1}H%iwF(XJP#=ls#o1h*a_4e#bZf3pE;V11L4eMFyvNZ`X!;x^RIYE+l{JzptC$Zff*}o9a`}{8`#pKzCo^axncA2 zC1VCqQ=cSBYsz=A)mT9YW@T4Z-7YOHO|X(Y^x|epVWAGdHaAAdJbd_YhH;TaB3fyj z8z}eJT_idLPY8;OTe!OVX!!Y|0m?|G0#3rqJfngLVf~3noaY!sJ#@VCGp>HB#UZ1{ zr#?MBvyGdcI59=6{=t?$z6I=&<9m>9wV{f?uQKeb$=NXYozN(N)1^b-mS0Lrs?7wt z)1g#`cBvuQ%6)$kdBcYHTyZ@EKv0Zu>xmyB*otp=tGVuabGvVDKDB&4n+~>_?J#tj z$Slsjhmm(nO?bNf1|UEiw%xg#^R%~_yKRsCoV-n1lePYm5^!C>zeWTfPk(zX?=0@%uV9TM3h%0*qRWL;kwc-+n$s4AtpCB zmt>zOBH{cb+soP5gimqDQ(pTAmwW2XfYY%1*RDDd=FwiGUL_IpmWxDa>_~e)On~&)py!*<*;SU z)aQ_PB^ZhGe0}Pf{Uxdx1xUykRy}H-5EVdnTAK`}b(RlX;B$!8hXP4lcfsJu`)TT| zPU?)9%}{$DY1cAqC@Tv!$l~WuFSnLJdOkEViGW+^-RfNbp4Gu^xos?7_05+!KA)@` zt*f+g_n3m?`z+^6XL+mWs;T@ZesD|tCV!;t6VHLFzL|AS-Ka_uadPo;=&xT+=f%BSt}OY~HYV$+%27j>ZZpi*Me4dzketW=2zM zb@x65m8pf^p(@zfJ4c*A#TZWoq^8iG^2j0qkwGf>7h>lfUrHfiQBg+`fsCvgdU_GH zJR6C3A3|(icEu~`c64@T6h94EaFmC)j%p4L4%#+Uz0|9zGw7?$0yhQRJpDlG>ZGE1 zy-&H6tZZk^+j5;qX=2`OTIoP~lLFmqIA=uDwqfVkN6Um=u=iB+d;t&MZ|ei+NM2qZ z=pe>E?}m`joruWD0;C@bohX3Q?@#a{XL=-ed9Td3cW`C&6`187JyY>+W}fC{{A?Lu z4J+AAc%q6&!xLTU?{|DRp;>?o57|Lv4CvbqqeJDR{_xB=cIi?8DqKbcDiRll=8zDyn9Fd0^h5#tfmCQZqry-ba#$y zwsXZm1YYqgwYV)-j0trD1h@&MB1H+8&S<4jO78GK>sacVje(qpCQ9G8?b+2SMB7HN zVal0wmR6dZn_XM^yxJdf=C9~#YilpBf^CYaa?kPLlwGX_jFN$L-}$Mdegqq!`r*sb&8q0^LuOZ!<~&U z0oa~Ur3)z&c}4Yto4u=TpTpj=EXv1G$6Wk{ie+6U%;%T0lKu3~!nW@d*Y_xYVk~@` zn3}k+&qKSnG3^eG->2@-FZ3vFl}QcAUZ0KA(9jOoh%7{~2_=6OR;CR@75qc!-&>S3 zva?6rEI1INCYOW~p4S-`b|&wxd^k0YH+M$^EdH%m=(L7R;MF({Ct#k3dleON!XhI5 z?^_GVsVN(n_jY&cf+nJVKwJ0vdf@tN;!k`xAF>|yrSCX;?@+{4-%#Ce1>1U!Z?IN> z?dVJh2%vj(f{okK^9rv9zMbFJ^$w*}&y!PB6${+)JG2gx;PK}eZ%)P_A0NNoR5e$i zuaz7qq)aQ4#ZtUx=B5L(_aX4rG}VOfSqbOlk9<72!ype@Sr(CC;~D7B>Yo+U0W@IWtQ%m7b z;6!wwrB?i$@Db0Hrq(TTt?YVOui2ax&-43zdvoLadSq&A zoK)M^WHP04ndUh<6J`@j-04u$BU9O`nXL|qb*lg9bFYTynj^P0I+L}v|v;^IUcYilSW#;fDb?4!Q(+rS{ zPXQLU4hU_0mZ@9nxo1zkSI11eivx<0oFP^2>gQ+a{VlNJ!Pa6uZ9>6yWrn^|_PyD- zziZh~ZO`<0^ombhh}XZaTxj&2MEwGj>9_|*fi?_~7+Ne3zL!vJI#K{~o70k8d5QdEitmd+V8=7uS*`y1Zpld86Ow8oI5fLBJei1GDT6f}QT!zf^@2y^x*-3dM8Rd-`K zrZrzB-7T=)cQ?5Q_i83hYqGl%@9N__nC_5}k_5@7%$|lO*zk9}Cs@jD%* zwa!TRdxva)U9~h#-W5Dhp=h`HzG$XXB1tsZFg`pX_T-;+I7jc6r9VoJcsA_|TFCTWPtq?u zO_NNR4*fp3Vz8|6oL=vhcp|a;XGeam9aKJy#0(w-Ws<3;E3U)m|BY|>llK!Y(fGc; zjsK$02r~mu+4GeGe;?a6pq$#;A*8>;*?%y4_TQI1^2VS|3QBZf`0|?3Nru$}$bn4U?q$+*}cLIhTHfuff=0RF%-mQ?M>^13*s`OnuY0*~ZFmdO?O33g2=VE`(M(jAAA0!AR;+f?$A{_hQzt(t z+to$VdfqF02aRkkh{>R~m)r-!@5Jwjn%NTluG+la-W)%&q`zDO^{+>q{(xyWlI%2I zr%D1WJ?0|irV%${m7K?1w4{)W%bqKBTQ109@Fd%k#FY9M6Gfv2Qt=^ z{Cg|uMh>_KfI(*Qe9GH}cNtCVe^re4#Fl30)aU1m zZV+@iqWyP>`b5eBc2Dk;M?V5ZNTy(S`{v+d@^?#7s6>Z~XkoE6W-7q-S6PsIaK6Gz zi;*Y&2cA!=1>pG%aGd8jekHJ;cGix;a(X7Q5)R(jnf>&t0=u2stlR-Gc|BsjSz&VR zworV0{1~pUrlZ4ixLm~9LH!ft6{YQDb|IC8I@;ek%8ovr4)lJBfF^lcoPGXH$-A@5 zVsJ7kGXsq1=m#euvXZ?`sS{9_Y9a=f4h>77O5yDjS;_v5OLoJZZfIzaMMQi&5m~;N z^92(;I%=9_tBdl2I0%dMbH|EENYD%vhD2L)0u{CSeb)mMK;O+b79u$kRDEj+}N?U8E1%RFPwd2 z;dooeGIOqbL&X{AZFoFi4=I|MleV=)nDC8>ufNc+{y0u6c{gKwzJXq1TWHr@M5mR% zPkvfjx7;UU`v!BPr<_mjXBkIX=$oEU^WmhL4fvLvzo7KhA=9TWA56rDbqIvhoAJ~Er0T9*&bQZ` zXx-Vb_lY9__sD%<6(p{NywUND4q!Q0Sob&r)jCu`HZww-+IDZY=+EYZdLlX{l4qjo zcQ9sx4OR@W;J4r#f%V(l=;fh!iWBYq<@u?$+fgdEwwX^_&mp zII?N}tl3b~i_BZS5P@&2d3%_+0?_Pr+GQo4z2D>F1^C8oraA^;F|qY4rbm{ok>RSyldE4JbZ;)16nSRd}0WG%PL3npNFttVbult zGNw|p5^cL;#qaDaJtex~;a+3S7+Ue8&$C@1jC*Lq=Hd|{emi7fbF)s2h2N}=5GC1r zZ8jz$wKt9Gw(Mz}U=rG|GmL+pm>4~6KC3e&Bgx2eQXYz5pT{TGDWCv@6#GP}Ne|j8 z^}sA@|6sAoD$tte-R`v+wwt0Kfd*8v6JMy3A@Hp^*6|HAD_I)+^RDqO$SK#{uelkc z#aH`vu0>tp43!e8a~*3c^97zpuic<6zfztM)K~Jgw1AUrs>z%9j*J+{6~GHjGkInw-zcwI=ABzrh)5FZi6oz`E*$r0|RVqee1=69>$taoupkrJUlwW zTf=4x@erVtLip^n__N$<5mV>73cN^Dr6zpAo{#bSK4>`opb zHF-$SaIj`b=VhHaZ8=VgU|&rdGP~i_@Ywbmp;Z~a=ju>)yk>vp)LB+;ein6z7uNM` zx#f&Miuz0c>~&o^r@90$@MrDC)ABv(~K6z2vsIM4RZBT>17G^&(q5yq2=iYgAy@zbn$COiOTUuBVOjmxTcp zr>?8L(_Ko)Fa?j+xLG;rZ}{-2n6W!L$g53EeU>P31zG}XwhM)ccy#L(cq2}7W4J-N zyd|$(MnNI7p!|;9{&A4ol|V3c;jFR`zj`289BrPd<&8h=UgjA;=2HJz)M}%1NQX75 zCtewE%21Nl6c`XPYLbGAF)Da)URP<&I^NhEz9!AIHfr#8`*5WWx|_+psEmqq$aeY2 z3NWTU$=VCEiDW~Q>vo2eJE9}S%WaX17Cr^c0UQcTPvu;@?o(}sXdlntD!@ANhz`Ie zP}14dR^@FCKPoCs(WCb3B)2j>BPtI2wx|JU16RAJcUGs93mbM>bE^>o5gY(*t}@kb(esQ4sw^*xs&d!WkW z$CHlnzphVyC%u1vw0dz7&oVB3zKPaq8>UE&0vv$exSwV%zh|d~Mkp(3Pw_1nK%k zm9;%BOi8fe`i`;ZHT+M4$0V(u~pYiUI(kMCv$Y+;8Nz4S_5 zHZU)4Q((lx%7VHvRSMR$&rNpXX(Z5hpCx8jg`4ufb== z9R--P;G>aVbleFFZC+AD1e~-@M7eIW2{CP-78MpQ**%7ZCD$vGsyXBpGD&N$_;)vc zaJni~60rCzxzgIZl;f|D1+EeEGRoYMzqSeit#mJVCr?&AddH)?itbY7B>)g1?4 zR@dXNt&v!P`q|@L7c5?Rt`&UtQ-({##L*-_i&mOExPH(5O*=KcrbgbR)Vf4U+9f>5 zk2xYS5jSV^!Iq;PaI0!YP1~g@#?H>pD#wev)qlD7Xs^k|bl4G_mAUS@M2Zz+qU*?K z$mp=OzJ8>rY296!cqJ|__9(z8q(T;Q2eH>Yq*YJtALYL(`o*vT#5r>0dt)!qlJ|D9 zit*0NGjPe(pZEX%ag`KB2FRL%+x*ZihaS#*KWts|n^|{L5YXk;QBp7VSM|T$j4JDU ziL@RBV*e1uZ=)SQgapL<1If^MxAXlPfX8PZ9CDwbn5_OGRzs=(ZvwWco zSflHXcBg-|D*v2d;K#jO$!P)h{blp_!H+BL2ev*u=>>Z6Tg5Zw?R3-h;NQ2l|gY+wuCD3;B9|!;dIPs=E$4L3i zqU>$8{|P-X^ZT;^df@kG?Fd*SWrr)Bsi_U_Cw?TM=UkbV@CXtJ1PT@^{`y0Bgfw<<>c)R~y)X_XAR>fcBz838Imh3uh1Na1GZPAe{75|D<} zJpSDOX&$L_RZdW!Y!m@mgih$n2FgMdLV6#HO(a|$1v z7Q*uoo*;Bz08ETSHvXJ+w2hvLg+1=7SlX+TQST4YySw=gEqSnkWC7UY zUG8G&qJUu5-Q7mqC26sXeYINjAKoIKDJpQthkWzxdTd{gYK{Gz#r82@3+Fv}-0c?u z+T=KB#${et=*HL%+sG_c1R(_OBYnz+)#r(RaI5l3lfPx zn)dP6ya%@`ZMG`so>JfcUxB##8?N@3L;g3`Z$G*De+?*kDI}ZkU)bGq7npKIjKNm7 z>IY+z;4F8|&2Qdb6*-}B-&kLG?M!Zdq0MSnUaCllGV-MOYkv-E`2-uK>)u{(dU9+b$uz#q>be=2>G+)usS(_^L2J zT0&QOBR$`|{AQ^9@@1eSHSvVkU*-GxLzrsvzUK7n(|b@$0zj}{u8B!{ZWOQU^=s@!>t9~z+YDs)It}5lSH^+f zmdnj$c0*S1<@VNyaO8P_1rHpfXnX5c@VI+FCH)h+sb+o;oCG6RSMs`bRf!WBW#4KY zWEYQ)7XP?LG6{b1TAS?^1hVEI> zHv1pS>9_x?EbN7$Fp^iyHov8Ds42hV^Q;>8=_ExUPAPO2N5@2aw=A{3RDK>>Nt+xc z7KQ_4wesCsH@0~9kN}BR>`Pcx#oDJ+VMmrk@z<`T1lI9G>Bey~E~A+$)0r$LQqVZA zabny1GA9`xW}!15IwLkf`AME&jmV&gpU#s!`iGEyqfb^4nI9{(t$NQF57b%4cf&F{ zM_EZeD(>!a7QdJ6|2APj|Ni=3#kcE*Fh`B{!Nd~kiedF=Y+Il@?_ON{ao zoVH_Pi`h=je^9P>Y>P0}2lNydr!dOC=RKHMU~H^UaK9{6)X21~nK14AhA`y!OcBP_ z#(~$B26a?C0DBHU`bAFTI4Rtph+SXJ5Ib>Y)8 zCudt}KhPR=rb8p99e~d+U2rhL064e-ly$nBtAlOtSAMnN0O>PBodE~cB(1-F^c0)Q z+1iaph(`i>QO%wtY@cjH!^vZpqh@9*Xovc3MEtkEvu-^5exbdiBjRhSYD99f1REP$ z64~ydb-9DBx$n({nOSN$Kw^VLx`8qrwUO^ftX`Z1Qtm)Jx&Y>I55O}Rb+xq>FPk=` zrvB1F^ml)wSMTEFRM3^QQ=czxDfT&pe4)8n;2``mXud*98vI$EqW2b{2I7>y3q;%} z>S1P1MbfH6X!w>%GQeL627rZmXY;hax%5V;m1a7ASj7;<1cEj5p>Fbo7!#|c@{(uo z^yA{=9o7t!-7pE1jQlz+07I&&(LU+(+u zj^7*G)EW~6eRjI#@zSxdD7@;$ZH>+a;T$_8dG3by4yeqIs8z*;3QK0GDa-fVk~qbX;vF^sPtdhe zIezhcPCS(3vsmVbZd*13?RH!BWNL3|j)lnPQH}PTOq`>Jjy7%LMq9X&uJ)kmiq;$w zoM?C zl0_YLGf$Nf#_zu!eq+1sR_W76;HypWUUV8Yx}EN2_Qk}7=N@b_5H?Fhf^abpHlhYj z%6WaQcHJa29Of}MMcIDXCTE_Rd1u$pP}tqs84UpNbdD+ezIW4MeAUXQjFB6nQflY@ zswaP0Ni4NYd={cr^VPl@K)}zg^km7;%BOv&H8f!5cjwLMK;N(WW;t2guAe!Nihj&W z^63ypfn>+m750*{LsLL*itz@?Crokk^}?O4)n@-Jv*y;b%0nQBW$Ku7Oo>%kxkL$o z)!5Y`qiNzBoBo!tjx`l_KUlYiZP@MPKnUUW#d@2fda747eHUiEXn+SfKuA3^?Ufzj zR-vU5voKb+U*l75!Srj`hVmgBX@-j8O3zhp$E*zmB*=6;oQzTMI#wb~GNH;8R_mYXtUf8g#`g*}1y8O6bP z&l%GUhU~DC2uv_USK|4aqge`0*FdL8JJd42;vFiFcJb3{EUMVXM2$9h!9))JzUiv& zqrp)wJ#<~ObOtO0`}t|Qn_qK;9L8;q>*zIchZ`VK(pzD{Sfi^?UT_^P?Obw!$ar8m z=qoqe)UbL)cNfllNTp{r>Ll??{jqWhhv7=Ha+TwhD0ciFaiH>6^P6^DKJs-Un~q_= z#TBkRZZlP_tf@h8zGIKl%ha7?2&uLT$0mCI!xT7L4`GHSBNZ#rW)At&N@|}4^6n%- zx;HEGPhrXs&8rDlSpfj7jU8<6l#h;~alRyl;+v-xJW>-y8Z|`w*F8>rFu3eY1yN#H zl%xv)S8|h?qoV$SL;QvCNk0Yt$j*)$2?+H^seb5$x}X_G!t0|>$WQf zL&K{3Z*eGIjx4OX@Z_E@QJZdGlkj}`+48LM#DQ^Ee(i7+M(HB(Nzq;gS6O)#TD|`D zNY|1iC5&q6C#T(_d;R?0r(z-^m7DH#M%u!s=q^fIQaY(N*vCiGkGN}{^wx=Y4{r!S zczuv82XOEwX*tIdhp^%E39{FKXV2&biwHr^4_kO?$CGqopX#aH)0NOm1Eip%>-KhG zw@mhNi@+!{GU?2d;!ox2pq7yL>p5$}*qxLB`wZo-qbCMd4EqmEgrTyKCa&7(n~`qx zpK>9VKOG-Hi4f0Pznk|JcsJ)uV5(hmqgJvkA?R1ZaXaKs^BzW%Z`JTV4CYji%>ry#KVZ%n+hvFbO4@QUsvJ5SO@ z=26^vdEaQoxzxQ_?dmso?Rh|%PX?S36LUhUrV1(T^(NYW)5@^8&iy1{0qEI#BC$L( zr?3#y6;RvT5VzXZ&UU#U>kC|CAN4IPEhUC=PZJG{D|arIVN(0yi=*);x{s#a6CwqR zJMna+cX}qlCghS%!wbskyIYj@&LZ@{D8VyYnL2WjlzS)R+u;Shm!5r%uUPQ*-6vxC zH1c9F(w(-WZsoTnVbl^^6ya#q`z3_U!}5A{)X~S-(&{lsQ>Z#Ydc#)TaJLaska3c9 zM5?ONo7fjC&zNWV{j4nlcQ%3$=bdpdwzL2oGCE8seFg5)5?6Q-WgQd+WWdp>5NdQn z-Obv#a;*$S*weeY} zsGVGkIn;UPszE7iO&0Ytyz?+|IXlVbR=2j17?OAcHKE^{fo}w)8mb{YR@pGu=iQ%} zlCnF^?7{Qo#>(Lkd3vnsR$X7+8Im@SuLxcqreD;1I)V8XRTA|xt#sK_VY6_fAq{W< zo-@Se5RiStU7-WJ3rigpdf3v9&}fXz(qLAbPtsXiyhb9MeHLQ&NiZuPNgk)ym*Lg$ zn3zV1OLPy@R45*+e9Ao!ah3R<<8QmV!*j2+5`KDrD9qBmgZQF&g|ai_zIP6<5~d-$ zT3e}G)pmXSLG#5=ho0mF0W!UwKlGv z3-E4Y<~E(BpjZ4F%bN~6S4~wOWSLKbCEZw?>0kDj)s}(ndPBW-zH?N*Bj;5XxMSqz z+b`N~41&e0MN|AyXtsn$Wd#-2xz7eNaNd3G#hfg?o{7So@?CK;L9d&E!1+G$vO`Gc zL86g@ht$u*l}}^2w0bAa&}^5ggROXOta7NxamQ;}ilkJ=TgN_xgka5F8b8*nF$$2= z!DsCc+>T)#oD8#wjF`*ozegmkEc17ZDsdY0raN(Q@H-Wf#Bg0o4)x3L3J!e2?QVMH z<)8K!+xkkda7U+nB*zRX*1uZQklZ@R0_I6S{bZD4T$Wcpkfziie%@Y)^opLZwqP``T%uj3GY-%~hzQ)VGK6DUk()$t?iTOjgHes(6nSxi;Fs7)<4et-)lIfqu=5Jb z^`5ggu~i-$+U$=JOP(Luq^KzBLMJ&|qPdoTLDVic@PSuDg$IA)eLErcY`ZT;v_bGS z{%$C;F+I^pA`uD|>xt9EsaF1>eJ8 zgUf3C!K?argy(jU%0ia7+AsROua2?J%&xI#TN+{|Z!P%S3MPot7IY#7A*3bYtgTXl zlCev}8bV)<(H*+ID2gp^QW(==41#aYn63G&Eedca_y^W4cXVPr?Cj8|MO@+($>Vn0 z>)%}o8O9IP($qg7y7C>pzcpZxBad+2u%R~D#8`*}pu$j(D!d<5F%@+1S+t&>&kO)Mc3OFkCSH#=mnYTPR8xc+(i zjhAscd`R%D0!9J!t;#A)c58EE6A`s!7T4X-q(G^1s15QNwJ@^H(dH;U*?q_mD{Pt*p)m_LILHi+VEb&MyNAx<}*g$3l6kO7w%PTXgER zz7>@ZfGQhCI?bI&<;O%)oPEtz!cSewTAz5vF3A!vmZd6zdxg;yeC?P8cy^;$1lCFL zDYs}deP+aB7XNU{<=Oj%K)i`q@qBBP)1;luY^cxC=t% zbF;f{QwB1~+rwLV^s6uT4d$sxv90^Dhh|#z4}UzIFWCv*%&MCZ+j<(!Y%==nmlu)S zd9YBA5KAD^canqm(q0ve?-t1mv#}Tc*x?w5@)1R=7^%wlbQEU|PAmHOTPE6w1*&E> zS_4>mic-`>rhnLtC~n&M`;tVL zUT|7Lz}q1Q=Vj(aT<4=jIWJer?0nMw`NDpm3BK%S^AO|M1BS(A?b5gG!B&x?k~=OU zb}B-qdf&2Snnnr^3d2L4g+Fa6*saVVOdKkDzRFpSVvQPhir9>LA$Opm6%Gkqwa6!k z$GCxe^HNyThl4rAAKs%^9E^4sy1K^H4R!W~R2>H~ER}GBZs9hA#m&&7lh%|vguq3I z+qqN=3!&tMxnl8+p9X8xN{F5RO&?1+@<_4U@ymUv6>8H@)rfATlg^d0N&5 zgxq}a@oU`LBiUMK;)zV^GiZZsK22;Y%gNfB36IaIwcJo2&QHNi#1k**H%{Q4zmZwN znOF)&rzT-0j>`<%g1o7)kTo9(L6t3SQoT}5I=<+TAErP~6#@*ZW5sDX?c28kpXhd* z`RaE?)NI(UIZ@oCtEnUh7c=T^$XG7kI;ap}oQ*fLC0CCPpY~1E$U|VFqu)VUnbzFZ z(-hO{KqOc8eL`pe#$zvr_JqIb_$nP&iC->D1rbH&Em^TQ5v$uX&5u$r>L9YOt(^P9SPn&6AY^x4sw_6-7ivHM)A8fg}PH zvz2rtoMNG9Mjb|lDQqR;duk0VgRxuU4{}ZC%e%T)n0p)}`)b|C!te0f1w9c{J{5D6hxchONLNt^ zsG~kOyUSd|n^o`MJK1z=HaE{RQlzL(6DYNI=^lWQUqTDuv?Zd_&Twn$US&cf|0Xyl zTGOHsXs-vu*6&stlfVM)Oo#k-S1hl$Lc)=<5>K(6+uPe8$9*2&2R|Hi){2pZ7@Pc5 z+=EZ3c3ZkKL~9J3Al?v?d;L5RaNQ)gqRYFYI!MY7d{(LAszkqlB>3UxV@p_AFToqP z;@sJS0QwO~4@N|B-gvEVOpNEsJy=nwnPX>yDUeEu?U$9AFkyc(3Y!`ncKR4a4g?eB zuky8B67Xh($-&=s4F(>ZIwoajI5>OrOa=NPTRTo(N=F#imu_Ub+ZzZo!BDB69uJ<# z%4J#G=yFVZ^f{zcjT>^IL5adpo4Zy3`)+sj9&~F> z;u{NX;`y{oQVo53{)}snqL)H4w}lVJO=*CY=RrTTOdo*5)@^QfQa`N~fKEq7!cpNj zgu0tXr5(24nJS7eA6c?%h9>!S==%@lBbR|bm5Mga&A~TXd2h$4+zfJg(-N0k>g4IS z(!w%d2Wx@2%37Xtal-0lh5Fk(h7`C|eDdE>M|CY= zA|kqvYAF~NnmmFk;x}n;VZw$z?M}Q^jHp?WQx&Fe?`8>RC&~uj&fV5D4RNa}^hw0l zmaJvS?X>2YnP$2<)g!e>M?qK+II+VF^q~;WsIS+*t!vim`B7>?%wbvqRX z#)j?pl@&&FWuX{H@97T>W+;j{dV#^4jODgz+$+*I5CzAqGaT`UFBeHxS-)PYrzy=% z3`!20&+F7MW!Jx$@6lC+kn4y#^Q#IJ^u=dv-z8lsaBa)EHf8+wD>ogG@($rq()3f_ z1pg*E*EoW)g+*Z{&y84xt?_1M5jm#w5=tv4HnXg4Z8bf^&bqGNw-OM} zSu@eYVzI7WRnJm~JbmV{(Dc$=y<;Qvb0#IOFZU`)slr4v#&@l}iWvPCRd=@41)Oe2 z4E>=;jo)XVoXchxsgxjc3NFF;>Vu3wwch{iP=Uo22P3fvDb7PnmXg1i zAO`9iOpVFxRrP1e%*!pJW z2YCkvW|g~6urr)GhRy2EPyr%~`CcW}-tV|#MIcgOmZK9wDgM-mSoz>GFZ^R-9hvns zxAL&5kr957>mB+?EKo+c^l9FY6~`_l=;=ls$}=?eM&?1u-9SRYhWp5DJ|;P@Wb;nckkUu4V}?oT|7s?D=%c^`g6V*NvHTiher z4a69bs*AiatDmLAwX-h7eoAR`;j`t;~=^&^6iUk^0@EE<&Q*&f6uxdpRC$J*TCzHZ}F zkC7a3PhrE%#;S~0^hL*53hc$Dq1An|cwhZhPEqZeuU8zBe|b26`Q5RD5FfWhwqK>= z>c2|?F2)0Q&ls``!ci>jjKmqDy;eZ>IOs@jW%>5wPS9eK4^q+925wVrm;$P@&mGnb zVWU3jLk93jl|`~xjl*KFvs2q-D8k5G3Kr*aBvGwmVT*p(sK{4CyE{tzjBH!Tq9$h* z_F>}i4x`7Av8j5e5{#T7B>kk!v1xb+CEx)LU(H8Ak3#VZW1giZNqW#G>Dcf621YfF zJZWWQlT_IGp`rZ9OdZUDh$(S1zh#eBD>-G0Tqtbur{wB44nudHEB`~k|T75~vN1o9PHgfvf$ zcC-{%I^VwZYNWJjvZL88AAWASOKbV8U*HI-_K5MmRf=Ea4kErTC5-8K4RRUlMAVY& zhLP9i{TiEW!mrr?ens`UwV4DAcyP~%GU z)=Bc?pt}SeB#Vc;V2a*ef>vIkjz+t1gxS?!s|#2banp#oDGd|hN-VoK74*W|bRIzX(NIPnH& zPDxRO8MJzvsVWjpL15|Lx{IEDDoIMZPKIhg9GN(RQYLMVZ6!_}J}wJnTf2E7sJb-) zol2UY`M!nMDt7>9(&M(s+eJY^K}S?2&($%n0krGd;Beb~@{TcPd(dPo-+$mI5>}X{e`i^kM^QUS#<9`HesS{9$>W2lfg4N4N4bT&p!VMG#HM`(tjxV{d<$;3KK}=bL7|V zWru=Gq*|q9|Bs~r=MYfuqajCrfOouA%f>FFgPt7f-vH21r zq5+A-c~KA%;*23=&SI@kL%k*9yfsB~P}_UV-t=;{$vb9^B8CC4!x(MbbFuDzk2d##lcCM)a!R^ zpwy$AH1JKm(p9ptsP_*1prO;aBI4^}EqvNftyZ()F{jk3*%hG1Z`A~P{@B1XcQIB6 zOEc?bMnAgZ*kvu$Q<;o_cRFyc@-Yu^+0CI*0RWyt*Y*n)3u;TX~{J zd0vZ0AZ%T?Vx9*@=uBJh2h2JC`as}ykbbJoMdYD{qmb8U1FxW3EeLP*G{73ssi}cO z5s4&1y+x%v;RL}J(M=pKO18FMP;w`Mfh2L{Y-hLMBipc=prCJLHtHzeB6=@zM~G#i zX4cV5r1f)wlBfl@1vg@nw^If{6J%2I_@#kSVdT=1C;{)U@&y_0RGv0JTnKR(HFaj* z0Eo24NE1mFR@Inoq!_s67>@Ymwe`s3iBC1`(L<6UxGvY|_PHL&9lc1C&=Pcud4tV6 zE`!Cz4B4^%;SXzP(wmlQW9);FB$6c@=Yw*GCSF1+Vx@{RRC^aRx`!g7Y7A`ceK;J# zn$$rT$#+oW8G`H&*-wAh7r`UxjuYeO*vpCC_@6Wmg3c=RCt0M?DM6TB2mA4XJ}9TC zqf4KF^_i{*8F4Bp;^17nCRuvLghn@qRFh+^c^JtWhbkNuHLfYSMG1lLP<>tOvtvH2 z5ep`tYtohi;O8o{oC^2DtB-6j)hv41;^Q7FB%jwksUEnrPT6m^+inz)E zS?u$#?$nVkkGL>=H!JwH%8obm_gySp_b(9j@N8xL@(BoYvjNxfzAX z-ugq4rSMJ@&rEi@!zx7?R&w5dkSmlP+C4HoBl7y-M#b^cISWO7ArRks`f>B7^{L zln$ZyD$)a?hnDa@?0(LMz2EQq_5L{ja*>Cu^~{(LO#dyJD&0bmY}Qk|G-%*NRG(&Uo<88c_N>A!s{)y>pV;%U3KG_ajti+z z*jZ^aZ5=N*4H}_GLR_-vfQv4%Cp86}mrh4#j zngX%t-SlIIi#4CVcchKR@H&RaDoHGUjl+lS1Mp$TQ|S{|WBb>%m34JHpus7fb)dC$ zgXj@(kz>oGGQ_eIz~4p!PATV*dU~w3^b2`^h_K?0qUQvf-*Eh zFA?9p8<}>?`R!Ie%4XqNBmm!4zPyMkKgVKO^`YixP8Dr#S3rqv5f|Pm%JCg`XDOIR z=(Vm56vq_GPMRS)3K~P{d^5c!taGQHF{e;3GjmHS0{|{2K?z;w!z+A{@Q~W@KE_j5 z@QxNhSimQ_sz2t0cam^ExP6j|`EWxH*+z=}`tU^4@H~1D1(uwl?tIJzl=%;jM~Gmv3>n?&Hv4`GrR$AQNOKEZ!P}ws6C9ps9sI)p{L`TexK^s5&+

}rkwR+S})tlelPQc9@07a!S2>J2UXz~wU-{XiA;x-kB*Zbe+@s~ya z<0*z~jll97d#ro9{PU=bq`;{EQ@X#(@_$PAx4rv6rTg11`k$ry-E#e5{TTjd>Hajp zzkd3^b+4k__xz4LdQ>!7@x1l9y0Cwf?SJ^c3=Jb${B7HU$6}4bf7`476+Ej1#+nvTjNx0wI63cz<(;C0iF!i|J4Wvf(k$NmeNSA4~in)A-%365~Yy zI!Gnpa8h0@%WpgP`0=h4|2z|;ylz??B!8TXzkhO)R!&_dCz+Eiw-AG7`g@h}R zhwL93_?rGxMCchB^0la87k>v}sA282W3alQ`^=YqtKQAPj;~r{p{Su;H!uX8Pgp-w zQhbzQJtM9R=)PEsJ6O?p1nA|JQ6rlL!B08$Y&N}9i8}xTEayJ#gJFBP-%K6(FPv{u z?%z4z2-F{(Z-kkb9AyqbPXmfA=ZD^J_1z`4(zjqqz3u+O-GCfxRgd6xRqS z9y}M7i<-pG{lL8a438<&9oaO2n)D_oQ~mtzcpvIvVQyhz0rrl;c<eoVh&$eb|5{gDwe_uok6=cFiFe&oy=YyHgz{Ff=4zQ$0p(7^D|cEjJl z9_TNyHN@KZ`OIDL>lco|0RUw5ZyfaoLm*<`Ux|6C_b=1C55(9a_gelC|LKoU7Xv}@ z(N60SJ@o(h}))3!DMMd^?M z|3=6^2L=fF3&kpLoKD3n{2>#D|CiLRt=cX2_gVmdH#z@P!M}OX|9cf=fHD3?E5Cis z36-Au?ml^Fg$PEM03{j{grft)t}enhj{O z@W9YxXviIZ|EbHC(&&~M2{PY}z^U-4fHQSYPb{G0fp(T-Pox0JI_WUn4VlWp=HJg;)M(m+`}2RI)XR(h!SUkfso;*iGp%P)Kttk%T~QfULs*(DyM@ye+e#`n9|YFJCHJ#6 z9jP>f{zX7d6K9DpUX-JpHH$psoqxi^F&ZIsD<=hW=36eJva-2-|2fyg5cv>rM5-5~ z?a$PQU%!xh3V5Plh4828JmphnB#=D19+*jU=G*=Hh3ZMq6YJ&Pe>+{UopgA4&9HPv zV~XEr9ncT7WJyS_+5WdSL6?EbHHx?8YEA_m{pn*E1c9*Nf8zV^1(yDw`2OaC02Tbd z)IPv^ytzt)!pwG{FpQ@D$Cml0B2J%?l(r;J2xaHq7tHRm(9Q zDgsBl?k^A60PasadVr9XF>(^WXuAsODFt*pN8l3>h~#%1ZICycu2dG5c`qne-=yF& zQ%Hy^n`=|3E}KJ)s-7HP|0`ketBIGLZX^j#&WWHE)=3t!Vt*2Hu<(NscoEOGclX_K zM^jlUQ|{zFs+rbtdCovHi1v@GbP4|c{*NKEJYjE0j-TQ5>c($%0jL!2>a8E~l8D3o z_VpsKTSSzd2?mJC>gBDSuU38La^Egf#QZk#`J;mn;#=!}8E>#V!fIL8b9ruKEiH0X zQhuRI$>Oa=AoJzHLXHXej$E%4og-A`tZG?~mwh${7B+{H1}1v zuWyAlL#Ml<#+50J=$zX7Kwt{?wKGBp|4DCffl;r7-b_`SQ%wiC zc@fZj!H^Yw#F19AklmU(xRB!_N2^qg#eFAzdQLX;-ng6(*pA-H)sGrIxDFH4?M13#@4Xq>4hv&pK0Yry+D4$dKg!zhfeIq0 zJ;4|y1gy7!{#ANO+aasH-i#VZPy^7`Hh~rS-J)oHpUjo1}&NDv~fOxtxp7;`m@3?QGMnLmcfMw*ZpFQw> z|L$GXY*W882PFJHTkus{zK{d#CIwTU_i&9X3?)@!c=^U%(~iyjJFhQOl+~>7Q~5Eu zv31pYZZRE*Rl1`ML|oZ_Mir}ODHNqcU8C^!i4+Q{1?P|y936SSAH1_tHzZijijskS zup$35N1!jZ^LjOA(tGX=jlF{PQ0^eK!62`PrO?Ee7pnztHv67=Y4_bhpl*;IbwkSj zEJ^P%!f&^<#&ty@Y{dpZ=sK;`U8UbB9XIPp?#o=ba&r$#Y6Z24mpJ|iYZrApO2d*J z3p27$WdT;9RRSYa_I}cDM zF{14}*BWd^P zeKH0sy@JobJSdLIT_-DnW?jf0Ew^Vx?|_l^TMu(6=c+BBRqY25I)KEL!S5oW21!{- zoLZW`GZ_G1F!X4;xMnY`qFw3o8p-VKO*IumKs29Xm;E&5<%NJ00aq|`d%EAMuYGefsk;Frq?=> zw=R~;`|4r$203eJ15}^sR6I7|6`2%_7E8XAIiTw}DzTP(_x4eJ#YMy1h18}B`9w|- z4x(3k9%6;wtNRLy@tjTsJLO@5eKO}Ad1}`SIMU}vnpqj8qJ$#EO_9C%V9oPLmNFMB zy>w-gBCU-a%@i4TZ^Fy9)KnBE%@MqTV2Rqm^P?I;F@v16h6~mc8Beydu;dYk#LG)g zkSja=$I#f;7*=cENdfC0a+&~_TE~6^8XI~ZBV_;Gb@zw>{CW8Xm%%i<&4&%d)37A< zHEY4;$-;b+z&P1y7V0r`=}hvp$8+t73qGGqY){Zzb;+KL>z!*~6Z+T` zdL0YdAa1ge^2VS9ac|1w_qQgdEF`jF3WZZk#!aCQu_@)ATQaa!nOU{^gu3W1am3-8 z%)H7K6Afr_CM-eNcHD@b-J-*h>C9b7I$UlQxwsiHF^QVz|2#c1v`$JdA9TT8 zQBLmd!UvD`B)%*IA9zOTc$R2xMcfS@ox8N7?&a^B3i1ft4EVo%d6KEosgu!8#==~v z4c7oVFg&31`c&bYd|^8k*)KUAHA3&zka%VP1Arasp1t_($K}PXVzmWxHvPQV{yf*9 z5eRqGVFyTMf3g4sTX^43+EbxH^~hf^`r2;Dq)&&16pqN=79l?JBJhYE_wC!n!2 zhe^&O7uP$5ceDG@Z&)DuhaQJl&Ff6xsm|x%EaG_;J;sN5&9yB;+*U>(+1KP&?nj(s z0gHaY1)o1JwaDYTGP$HH4g(KW%jZc#%39QgQ3)#;2*)cv zwZD9c&BCqJZcGTCe?)NEi(t`J=mW@VlBa%e)F56HvwdhZeQlPW^l|%1%>DA^%;p59 zFUu~)%`NR^6E30ahfPx&uF!LF<_4cz$KzbpZy0Bw{T?cs9(*9rP2>G`jbws{L$k01 zJcRy?b@QxpH(W3~qx;HFc<;zOaMjB1tQR7}PQt>R-r2&nCdDJ^tu_ngw0J4af_0Ji z-e@ZnY%;Glr$+)PJA&wt*ej^1nq0Ft>r{bU5*WTnjgOC?IH&7?c$z#l)eqgsh})t( zvhQ%Vxl7IIU8TYUgWzNhn~HU-G^~e;7=ie~;7e6g*er?k2;^qc2*_M4#CZYYnuUg= zM_fgTTB;LLeH&UVyZb;(ZB1b`DFFm_LwisbGGoqf3&al>4ZtZch6N< z&rH`bfibnBB+>?#3%f4Fit*e+@ng1kz=hg#6y^Q98(AM@A{WEOmXm{bsuwYq!zCZw zK>E=xLu>jO`~()r{nnx?zS$<_Xg;&H#BYRaHn%&Ir&hd*Lc*L#hhgQ>m|WQS0Tq9* zcnr`ybnS3BtDVN}8Pb~O`gPNe$Q5p&TbDpS;LzBM5eILu-nJErg-FBsBV%{$j~JCv zpf*--ORdq7&5bK4D;=w&E1(OzKS#Vu+8G2}59F~`O}PbM=O*K{87hp@=C?==7O)gq z55oNDYhC-P+;wtFtg`*{!weQA4vUh4TT;*=o<;TRzS%X>x*44Ti-!=)xg3}Mjmu0)6#;P49CH!AG5-kPtL zo39$0OR7M{o1(o+A26;=mzgPcpj0Ew?sL5OWJLJ3pt4R<@a>Fba^}t7ZIZMYf*R8*hfQ#CF{8a(2*if-0Xb z9ko$7DQ*sau8Xfeng=|uei{K0MHvqq4E_WeQffSKZ_j)eGxu7Vr>#7C zp@zu|ev)#t9p{{MT^UUAwyY7|tBT~1IGUO>RF0-%N_TxJRG;-EUq?{ zx4oxA!zCQw?mZXBU3SEtFeC2Rfc@F`04NzA3_X&`xB|uRIp8%VOtHwPe8*%6s!X1=!gYBq=^pH>AV8wP} zwjH&#H@P^KP2*5thuaff0L}#b@vrPSPP&tNdLT;Ej1XQ|YLk~y{na323j&!ZPg)Pc zR^~pE1Si%j!pu9PxpXUU1~gI?forD+E>WDCtN==lT_AImeZ$usmX)L1)1YK!GN^1d z%ZzIzk-DRvuK~}SS*f#wvN=rpK-Htg-0h2NfF`fW0SGvaK(qlF<^8c5yZl=cUN1BX zbe2R}OWPe~sQJUh)r#~y#q+;&rHQ(-Pnuk!y-z_J)jcC1HJE8dP9@-x?yIjK=r|a` zk1*E~a&SwVU@1-Fe4wrOR3-JkeSAdP{h2%!rQ3JDyBH_;uu$RUAe*RGI7eQo`Zc4A z`56+sMcABBret8WxWyccIkCI2ttsK|?&<{Sn_8J^>z#@)qnSOYn5-Q}l4S4exGkhq znPuxkpzqd!rg++gGsNJ}e9s(pmm_6S1w4`v(O{54uiYl7S7)1rWIl-Yer^2iy|=5= z=x`&F-@_60mABYUg9dWWO%vf&=Gb!w-uc<-qaMIn=KGMP5LXt#8rzyCRX_CdinPwq zmgwU1=OvLCGDr=vm65soz4lpwp2PK=7YyJBJ=m*2oM=XpuaY(i#6hJ+bqQuJg4=gf zcP}63O1i#`&y3ix=L^Z-E^4G)%hJ99_1* zIlnW&q`rLlW_?w2SeCEg&f0{m^Sr_yh7H6rb*RlEa6hLp>Z23CGP$U1#8St14U)=u zQbtL>6#KEtu!hOF?)zteVz~oqnyg<&RDnCx6P|qlEpT6=Rqivv-|dYRbLE-0NviAA z6_$TT#;ivcHbq?Sry=mZWmnh!f2qQAqS_KaL^PnW9 zmCswShx?biGLpd9oubVKEEi%KivXgBL%jeAI z)L^jj{o9Xb?Rq&Qqu!u7dl!%e)xyTluY-IAqZSwMrtP%ma@wHg zKEY-L;<$8`tp^I?7S~+fjgUxDvnOPH4D-GJDAR6C))$)Z9jKEl7d6Z*BPb_4dpKM# zTs7&OA|PPCv#;>_a`P(MebWO)tZuma>C_TKKz7~d(u-n)N266}21B2X>f*+tnq)zn z79fr3JfPqHFx$q{sR&YrM_t0E$<1w4xVzb8=*SyJYDZ*Hul8YA+4iy}>56q7_)Vsx1Jj;8GvTeIeG2u-`C+;EV zcjOeWdH5t`UNM-JP7v>vW)&(|Gx!D5-DkBPdg;Cc%L8RL80ECp9n8aop9eBwFCd3z zqTrmL#dlT9Ma6tykN9hd8KzNpz*^S z1Gjc9^Ku>Eth;@TgUZT4-X4BxU#CY58ALtkT8= z2H?W{SZ>gq`nJL9k&ZE3DaH*@z*i!x+`S)sWrw~A*$&?Y$|1&;g-#DQy*L0D-(xCR$} zwCxD2I=p@?CxQ6pu;DyTsCj#3L&JSmyY|k3gy%-qLT#WxQ4Cw=TmK7nP@heCcrie4 zW{rx9a?|Fg{zNDK<2<}byKg?Do=AgzRqL{Q*Lbx?1$KXAA;zrJ&<0gz!D91F18#kc z`^=D}?s<$Q(bC>uYNj{C9Z8|e;(B=NwtKgFmp>4iHiy?2T5oN|>)H!SxuOwHvrV7F zt!7%H82MiI;=}7S3TZlgfM)KiHQz!KT55Jyfd3Zp+|1mcsA#KL8MigtKkK3}nrXdF zv6yMN4pMq&L4K7U{IKq5nP^DT_n_AROcoMNADtDrRC2E-W3pxAKRWAEzsV295D}5kqWC? zdt>!h>#@5uJ@m-TsU$<7nLVLy&G}O3>RndD>7<;rgd~`G62H3ZpuyqQ2!-IQDsru4 z%w|({&-RZ(RLlTo(fsR^h*H0%cBbYNw^}a$n84H2`v%-VhI1t>cNWOF#a65gw$*xG zBCLWOj&bRgch?#uMPAF*m=k4e&>J3p1S=Om9N*%zx3zs%)NTWrn+c|mKX4ZmzzYp% zno0=cJ4_hay;+~SnYFz{i;=K7_8?3$8&Au2nr&_iIDa@7aD$7PIpWbY{o1_!3pd z1elH&Fh<;&nVg5|&p$Txe%=sI5_1pNT>0KqsW|~8q4VkoDe!lTcF5&sR#pr=&ri~G zPShamd1fi6nAh?&RSeL*0}C@=J8PTTV6?BYYW`8;~wj=WOqyp9eha=IN~m6xM2Bb@??r zkL-SQ2b|}#+GNs16h9m)nz*>UA1MD?^oo)1{cXmjL9FW~A|hc7Y-M1DwwNRC5}S!% zT2Nr1Ub73jnp&_(*g|JH00>Os;fA@-a#hVt=7HLXT18XefEobFImEZ_$t54bnNWQy z5+eysntnW%CSD8IP6*D*=bTFHw5YK-5{1p8U?Wbq8can>i2HwSNsrQ@ne24as zCvZ3|%SZ7L#D7rQ=|`6& zzBZx>rcSSJXSCF*ve!2usi>_>_8!<-9Ye0*T-X*U~#}LCw<@FR5rsZ(lU@g{4Oj@{gg;IVkX*PL^&lwtEv|E2 z=>U_y#Zjy<G`kvhy(5>5arfFK zGX&Rf<3tgA86Am|ioo^af_U5+-Psr-O#>bh5VeAk?#_^$d^{n+O1aJE>(@+}z_)Ii zD2SY!AhN-%LFQH6=Grx{AxEUx+_S#!xEyR-nTW$PpM40FpDae!1g^00)vz3+FP(2K0bbo$Tz3#&oLHN_H>Q}TkCPGSIl``dYy?Clr zi6{Yl#M0Exv!zL*^GVY-(P3@&M%iU+r4tMg}l0jmILku z^t^HUR;gZVlfK@aD~n2BzM@#Ic5=ZyyQ^c231RQazA4BB;1Lu&@;M7U$i}i>e=Knu zNDY`Mf8Bf8#5p_RUCOir(Lg6%LNPCucHa#6ap{Q9y>inUfHC_!=Y;8aP}eeI5ln-6DXaFZ;$m9|cT1K189f*rlEyQha6zs4MSmwyZB9MvBnUeG4WF5vHq zOW(Z{*^5_0JQIqx@NmsP3`HENS`>S+pFX)?Ko<_^a(+u9t>YR${|`S6Jo|6wuFRS$v9@PT?btB{P{(sZ^>+<|hdVgx%pP&B!=$<2UK%)OoFyJ88 z5trp!i~pXe%pjNrdxKjJln5>5P>0MDC^Siv0t*WvKXOfT^f(z^2UVXr_*&qS{2 zsr?SFBYgB1aNX`L&kiT3nC~6^(1ZtPnc9nc5x)+dn#{=fJ7R3(9wcR@VWBD!knSYZ+WB5e?^+!d3jjX-pW|o z?quGcq`7(HV*IV@EyehB6xp?FKLHKtPPax4;Ahf}R?jIJ=f;=FGk#%3Bntqb(=yfM zcL3^+-Xya2Qqnwi$`;)1PG8UYB&Ay zYiGF36J=dD2~hGA7kcyN3<>@rg=2t{Fq{A9);aZ)v?#!DNVrNz_UNp`;BWrXzkzP* z?49%Le0u``n5D~lD#=IPZ{a261s@mp#3Wc?62Sb zCR7GmfDWQaNb>IXnU>*KF-aRmp#N>Xm!ttVD(WFeOnS;lf0YoPw-4bzL_H^u$`Jwp z+}_6o4|Eh{bQsMog&{Yj}YAa@qGb3e~J<)e>#W@p< zxvOcOopk|zZBOMg{snvmDC$X;Qh<)?M@xP-NR@fADE`-*Y)^|@zMW{A)Q!C*^tm$o zW2B$>!?P{dG!I^df>6RCJ4OyR^M`H5ajy^5bd?n~^Ip``jAubVE;6K_len^`U*jU5 z@$n5lv?9Uq z43FP99-pyor3?H8O;n^lW@KbqX0vCY<6X6teTVc!9XelR-6=`Pt91b`5|$nqp*LLT z?{WMnwz#kz7#P?=<*n0di`ZVrr(!%yAVPRGy(wLzeJU&1(mLJ22Iwy|1AyuA2hHH| zkC&j-*JTO9=e>cp7CVF@`r|*H%~g=&KX>v=ih~Skz|WZ?Y?M0#Ocgr2PB;tK4y&TGGN!*xTbd6DJ($l&sQpXiA;$r{h81(C>p#i(L|fkgt7baw#Qt|l z{DxjPeL|gapg0~VZSaVL*YpT^*`nN4;cdoI^uwi*82&-;xf<@ z-4d&kb%SVGO+9#Kq@1f=Uj5Hh2i!^5<$9=Wa>RVYQm6C$&%B2@(Ga&p?aI@NimU8&p0OK-!&9g8$oNPZ1z;gti}a5_FO#X=Rm(28e)m~J{?aeRiFklGf%$_t zp%BNe2mnWFM-TFbOI{*u0gQAw_1f(wV3f}W?)dMD zp~dt4r#nVzGJQ*Y0q;;kU zw|S#?VhYD94{R}yo5MH9`FZ5RZ;8}~+ZZi2rj=FBd<_|Smi2BC7q4%omQnw_oa{1m$v$l-1QxE_q z|AtyeFNTpcb!{r*dCoE;^9~i^g9WWiF8g_xZRo} z96?Mcpk!aSf1}t0NI&S z<)IPUyXybVT@wUOVVr=whM`Nek4JLS+NN}?+7^3enHd^rFVYE^ru#^kDby~YCt#XT z?=@{Iey@UYua(t}N)TPQwqaK~bSu#U*AnH3%*$s73(gTyiFFy=dKcv~)@Plzxr(LTNVf?Ml;Ay4( z#2sY)dMMJUKxmL*6^4}H!cMO6V1&U9D?X5(&$c7RjETZ(xV>5T;@L~;uim^-SdrCf z1(-M+LVc1$g@&yB*~bj~n{y#mv}c^Y7*2|aT{<*f{K)aBe#;ZAi1EA4mAA9$8QPXl zeo<^I!lt#akyNaxh_P9_D$bq*9$r*tqcB(e;2CmIu0E-_9;YCAGj<*IX>V-}xCL$s zt#d~;0%>Mh1Fu|I=`CZco-D!o{au^=z1f-h4FIkfiLOgw2YMvEo$%|)*e^;qSkN>O z46lkBuLdas?2G-q7}CwY2&v>%@A|{=qjju`!8vv~49s7{WQm@9M`9fOx|v`Xxcgxl zD28{z6yYeF-J}bjP6$RP=+&xxovb`wwN-OA2ToaS#KDdmBF?IRoi0}=D_K1h!+)S! zUT!niBo5{B7P4}|Al$~Zn>b}Ho}oMG9OOd|brk@1muXK1L2r(O1OMkrscnB^dfbQF z{dwKpYU}Ogd>){gCM{YFy)BebGH%{c;{%{4K{udX!?yrv7V4dfeI zBw}Km&DvXM8iUQYhog2uhwF_md&`~phssicij{F)O*_7w-Q|Q-Dl*MDH{_bzvlH?} zZ{z1GfMxc!3#V7>5w*Qa>5Ue}VF+tkgCuW~sW`kiJr&px2e@5*`MW`MQlObnw#}S~7||X+HZ0Miq+?g+mqPO9h<6vh7sX~P zW1Fs)jH?$zc}?sReHFS=MdrXN5?qwr29ZuC&51iiH@F=McKwcoEprH6N4_@GgMXHk z`YiW+X(*>2#k)H`R&E7*_4;-Dd|^t~Zdk>tP#vEk7@LJ4&MuyXy2}0>F|5u-XBoAq z9WW?AMbj>;15~}OL*TqLJwL146eqpY@W%Et%S&J;Ky+1`FpK63f)(SHPj!b>ef2k12*p#r>PouVrxozWG{B&LWh) z?30yU=cyC>d6zx&Nt6>BEpS5d6k%>cPID2mYY8R`yTeim#u<9eCZWC;AkBgf%&SA5 z9iHH3#z&ni$JOBXsEFZd!8dP1WII5NAG@?BlF}%7AXSI`Hil1o&4fQ@AQdX^Twzuj zG%}%5$MmLlN$ybDiHqyT2m7%RDmw@n6c|njLVTlDXafqgVG5AZiY$jIzcSmU%T(() zLe;G|pBa=NYEG0fyCI+0ZJ}VuL)&HI`>VsH69eQq41Z|h^fUWlT6@hpNx2!IomM5 zGqW|OprG7)n^&xuYN8pbfSDNjxotG9!UkBb1P=3fLFt_KLLIBw`Oly<&jS&$PZqR8 z4a4NL#zBEPm_pJ9*X$EWFlCA`97&1!S7a?TvZJi+-WwG?MG z9%5uDTz+R;H&^v@K=C6t$c69bwkWf;RdY>TN^4w2dU;JL;oioQLXEnnQNFPP7)bHxBdcRf*m zbn@PC9r_(dS~QGOd3Zr5iG1N|%i>%_%C}|*YVOy{k$wP)&3s7q3H*F5 z_5i`UfP?@sQGn!m0Waf9TkX}Y6FldFubCH(qMS~yJ6f_gUsGkMI4M0Ru84Ebi z!2p2ztD~)1(nEE-8xvT_J>`Yn#`<9wTeVFdX zE6#C|5etBO^cp(M#F!w?DN56o)Fqo1f2pZ@0|0}s)?i^~!{6sK&K15=3vGUhbRM7a zZ(lP3`j*1fc@AnZAqjWp^6N!eEh&dU%kwVrGmBYv2q0=2AbM_Lb{I96EB2F@3MgQy zuP(vWMmSk&gCJp0)O;?E$FNJVNz6Xq=7PuaK#0_*a?^SYU=!-(e3o=&Qy1*HPQ8p9ySb)SSME4-r)aAnkbBh%Lhr^jo~OKP z5g|NPSr0k@uwgFX=E5d}01bA7+E;ONXM~l}cY)LIJ0q$O`)P;SpY0iCpGj8dqx#5< zj?&_cnPfCpF^KwJXbiY@xtqQq`;uSRB%hYch!O|3Zp-A76s(WE>Cm%Pi+yv&wRoiY zW0=t>zR7Nig>fLZ>=hsFS)@?3-8K?4=&}{oqrM($%LPZ2Lgws6pqy{ zVY?n(nl~{v{*ljS*h0Uxkh1C`RdK;xaNDzs*F>r+AZSuJ0;s9wP&?UaA!Zmi7%HJ( zR;s<_o#ncs(L@%-(|!^Zx(^g{aM9Hwhx_ah#cFd60a$Ycx2MRr;j!YbPGM?AbICgn z7}QHB*a9&&S21J|m3d*W(sqRNgx+&l*Q0f#;+G>*u_l@uUUcm3YlfcutnErlN(%50 zB~Y_F=sbr7poI1dD!46y(CTH`Y>(3K=JJ@vzVxiPaJn#}ehN;lmb~2q&KGxuEIr0O zBx`J~bh13_kF5>!_I)w(MnKP@v*`Ttl6VKM~ z`*|$^F-|b{!CQL|mgASET@(0)F`xk}=@R?u-Dn=~OZzs*#3nKmQ@c)AoUv2~6#{wd zWmMC;6~gvQa(0>RvO;RZ>tj2BL~~o=ZM&>z;e2H~Ee}WN>iP$;b>yW1 z!4}I(@gWI}VRMhXYw(iyL9vl;6~-94_B5_tGemxn=?#}kXqS%*3oQuy9w}GENnqMI zV`zpc9hQC#yRsk3Rbo8noFr%y?->IS+CqK%lbgL80S;*_0NA^E$ou}52pP)?pi`3) zrC;)*vlLp^Nu)MLiMo~xjt{XNi2m~N*-mIQtBS!&H-(hzu?UCAmIfsJLnC=aI~;MS z&>rtpKLsE_clpu?N$8?4kvHiATS)L;^(s2pv`tfu*|5&1$Yg_pMFna}Z#`A_5#nxs zQfHshW3M)pMyzT0(OP7=rDs~TW=2=+m?`Q}&X@a7uh2|#(Td@ao_PS;Mc;4VtWp~& z$gZ0EBKQ5u=NngPMVSamzHwFO21rcMsPrxITXd*kBf6YJdCc0ACnPhG!l3#7WAHhb z?C-$gc6G&Vn|G{7QtphBCPjB64;*%i?&DKuW;JA&R5y_&I`K#hiUFto&ROiZ=P<7)6bEhP$mpXu8&?9+ zaW9F`poy`jMvxj;$NGFre7H6oF;h0dw(fW%5Y46mv(A;ApCtOn`6tx%dRWCZ+t=MZ6D#uhE$>6 zIdWfuW*3oWHbXxehmtkS)Iux3F|#+wv7e~PIP75-^5m}2X2}fgM_`q6^|H!PBM9AW zAO3GwZ!U*69PH2xf2n>H?mJ(wB52z!O)Xd>%dSzt1dx~FUeaF`L=H8Ked?ia*Oizo zO$IF2bi=uQyGe(6rz@7rg0qScDEN6eox~mfx)8{!ZRaLzbSO_WX3v_U+12v=Tn-sGb^KSJnpA)Qwit?gRD1Z1|c|!b~}y`#=y4Jbt$SP5Z9zk~O`bIXu4r zwj;o>pyb-_7JDS}enhG|=yFPMV;9OnNS4=zOmc;q)>mb*wB97L>SBqQZNoiIu)S<$2>Z+9wFL|J z?~GdjWE(Yla`xi5Ye6r-H+3^Wmg7^-j&-_Xl#bOz%?_Vsi*&Ksy?jGWeV2DCPeozH z`fwpWOqEEPA6AsgrX$%WbE2`#*L!@*K;OE5Du#PvG~ZptWTZcA{#iZ%7lVi*Q0PW; zpHqX_J;YD^yGJ}!W6Sp=M0D~uzf2Clk zx;oqiy*o&OmT+Gi=W|y^b-)!9xaBPu4SEh!^;>h0u2-o7*J=VMh& zC*99}%UE<{J9#yoMN7JHPHYnjwVg z73v6DO&pN+tcZb8O)^b+Dz*%5qE#=k@WKL=Q2au<>a%zT4i3S*p|=>imHr~Tsxicn z_uvQL?iQu}c@Fq+k)2p8bb_{_>Z;7+)UqHi9bxnS`(mGR=72m(YfF^LlZ4Iwa*qu% zZCDeQQa^z z#+NaUXn;RuaWk}i6-Eiv=JMWxywnQnIU z6&A*J{N}aV^2KIZ0F6&>RkT!5QO6aUCOlANrm=*okta6}l7`y(#JReqbp6zq6i{QE zb>X7mx>ng^+ioSKh{xSEfZi(`@bEP%Z?<{j_}LdpBU2YcxI16#|!zli(jV*UV92?(+|icuL8QfpgYJM5dV|H-^kp^dtKF z+}1qKLxY}B#UUyzctPt4TPpu!d(KIP%Re}|2vxqVl2go->SaPpM2<8^z}7Lo_E`_z zZ+bUY8~Uw$L9h%G!`RuhNxhy&}`0>sQti*9YPTR<9m!sG(qYy5bH8-S_ zg7Yv=&|#u42Jo85jvOL@%*AZ;({SPGj}uR1WZGmKgXNVv%-GEF-vb04uw=1nrd)|W z_`ApWX2Q#tFWr1q`4Yk!Qse4y2OJ=DoiwAz5KxxNx2vX)%5Ts)&C75Q)u@`)rZsUh z;QzGu-BC?$S>JcCVL`AU2m*R(DgvTXBsBFZN(2-o^cn@}BAt*RdPOfSO7B&AD4_)i zAt(aUr3MHP0ci;#)C2+~-{ahwd1u}WuKDx*)-r!RE9)uy?7h!#*K-cKGR_ZJ)fFc? z=F0vh^-;54@@wi6w|ZMKL(Fj$<@4fl$48_vy7Qop<$BkPJqHG`ejptKtn;5cD*#IRcS?RUR=s)=8gf?SBfjQaM>Kl`+B zM2{%yZ6~tR_8kU5fr^$l&`rA2N=MAf&(uBFC{oTJ*SkC1O6ULM9iFZmLu^$!bJNYI zvu4eWWS}Q2Cd0YV6U{BQ*`|{(wg23`){vt12{v$2%5ppWaFHF)zq@2pnwWctxn zS7-dUR`Z7yB1ZEE$aWc&piyW=pT_VDp<`wNz*$!*5hg7vj`NckdB-$LY_dBXZ8KGc ziAMZ(Vm>4RonFeVZq#q=e7^5G0519)wQhUnSKeb=)E$!Z+0DX{o$^q*l}~%)O6K*? znWbjUU0(WLlpto^l7LMO7dm@vl3eDcU%5raXyFA_;uKC&FgC8g<7$^EacSkH2VxtU2))sFktz>avifU*$l8Lq(D!WykVuObi`_RkUf=#T#Azrmh!0Ci#yI|ZEj+1@kQwc8UT`*BK z0h`^Dl9exl@87?FX*gE@NoIVxbEOXnu-38t(8EBPmZ zT~pYk(WK{t_xpTyvAn8FW#9fjx8KS_boa{((19awZ+^X@@v8XT3#m7CZ^r5EIj4Ue zUpwb~f#cVNRLfgOczAdNZgW~$+%P<9ef{?5y|XJ``SB)ZBUK|}W25h{gA2?FPB(5fcMimyXn$1dhJS|DQcuu)jT^+d=xq&_rpczgc5Rm@Jkr)toBf6|m6^lR_Du{gcieGeuVkw? zEH=T@`(lAeL8RIK^OEO)m52;q{?gWXuxw*pex?*DnJ_(qas94-WmEJGq z*G^Th62_<5DrtUaw-7Uv>X;!}nwZpNg}`u)?|T@WHt=|2Vj?UX0Z~ib>*Mb~pXaD< z!Eb2n0YM-~Wdu)&Fz`=rTrlijn!&FJvy=9|T6b%iT;1n*Oprm!tq~MF%+JS-^8;6; z98Zpe`!ntIjWUwb;ELcIoqmivl_y+Hu0)3HE%blL`T~-XbWY$N6c-+C0~bN~)UmJZv};5|dSe&Z2~4L$g`(XyLk z+~c{~$sxNAE1eBjC~Av68Dq)}j#VyShd*m3R|0<*YrD`Ky=bcHJn9>Ry5p~AWwWGu z!_=*=pMtgRpM_qut+^yBvpy-?^Hr`&Yl*)@*KhMZgimF2L3CnCN(OwR>f8h2poy>) z9!*^(iNM3c0t_~VwKk|L3nb5d$#wA+3qBdV~nc6izIcn8Z zRCIGWrm*J!jR>Pt}{T59P3QlIY%6%cid zXRaWTU_8hiot4g4ZBZ=VdQ;!h5JZ}|<(H#h9CL*Un|+tC&uKAgr7I5R;&A+G|w@5*L zLx&4FHCMe3U5DgLY&ao|RW41)wia+Y_T>bEh)Cu16<}|iXcK+IdT`@DMRig0*o~+8 zQ9qvxXAs^Z^*gX z9<$sD!XGaz=%IfB4e4^^9qhnglsE-*uVzZ^pKiGZt)2MBB&{+1Z_-Ak*-m%DB=ZmZ zKmEWx;g-U_+m^VlC5HUAIb_h;krE#rQMbHMA7)snmb*5MW8dxsw`3^t3t_`Z(&uWa zp5$rW;B%Q#zRj@_)NDSpNW@Tb>~0GZo5kIYu27Ahvsl<#j>#9kYX4iJ*E|E-DvdFo zH>?`}r1$yiYT7wDpQV?%W)uPMk3N50n^I)E)RKi_MEu9Q96g}jMoX;v&7dy2 z%mRx+J9-1;En)?m&XxQN4xIinxTo6P^SRf?mSWUs6r+2%$#}#DQ0P@(0|e$_)mU_< zzNZuE1H*70SonZcR@NDw4WA)7{MH-AlEr&ayK!aXE_eNWr}M3!(zKUeYfV-wJxic9 zq|w%mS7!Q|i;;Kg=gl@g#iqfUnnC8H=XvgiiNljFu^4hTYolFXGS^3Z@4$V)FFu)p zr8v~fdx}b<5R=dm%El*2uj&gzE>(q_Bl#<_9mhOBzkbuMN!F5@4KFAxdx54x3MpZN_XAH6{CE!g~#H(6U z)K5Yt!cOstCInvrqNlAt*MVMwBKt(dnO*{h}(Ca8jp%ZT4Ly`|Go$VBQTr(fz!#RclLS zY%vW7tT^=Q*xE5IhJsummxB-6mgw1x=vMIAJ@Y#>><<_Jjl|0NctOyi)o{1)x2t^W z_$zA6cgU>#lM&TR(JP9G&9$}FD8hTSNoH8BAlZb4C3Mm9YQbP^7}EiUQe{_A=Z^i= zCY=6aoLacj@jjX_w_MEzS$E*Tl?JJU$)QzX^Pn`iLMUP+5cSokowf|~VTmrQT}L5x~GC9AQKr zSFXR2GDL8{t!%Neo2+%F=hm$4ML05Red)dHOEx)x3`ng-E5Lfc>}GOt3Lqws#lt;`htStTPUgV`ka{BA-^ zau3FZr0*MJCVw6G2zBReWxCaP%V+|98$vlzR|DuNvlUZ~-&%GnM`^^!NYQif{byNy zPE;$P2l+o-gMTVU&Bh+o0F2Rz23@GEYicy-2e!yD%)MXO1D(+-C&cdX6p!J?p4~79 z_NnTZ<)){7t~^;nANl7S|Kf)zA6-zvhq<2;{VOj0>z{i(fBRs5P2%IlfAe1c@)}~r zb3db+s;RwWE!)8W&}$4i5Xr7EWtq51yIe0e0Gq*&0snEXP_AAPnnu0SE#jS!zrFn| zIvb|3?$SDmmnS@X^a5v0CMWz7P=4KV!3{UBF*@?dV@-=l2O%7teQ_ zm6f>Zbak-B-$BRAeuevkd8nla_<7xsXmR_Ot)35~j7^WlrA^I@+*~{A$(naNyhz~4 zZB}&RruNl!;B24LCSP3f!a3C~Kz{h>Onep5<8VWfm{yq?43t-tfd^y+$~ww{rqXz5 z5EQsFc!5l*F65n1+<{Ly1sc$v1i9r^No#VO$;7A~A&Zar^lPP({oJc^+=2=2ARThB zG#HRY1?TYQnwRy`L4QM&K!?N~B(`@?c$DuxXGFifV1T7d`F$+U-#8qC>z0dD`5)Na z4Y6~RXsWq+IUSwr>U6T&b$h01aP1*e$Qc%$X>z`G11m|s`4fu?>%=2-lk$=K{{qYZ zgS5vVrh>0EJvQ8)w0|)5+wR@4tA}oGuQh)v!%bcQdp&GIf`5c8IvT|>_4l?H)t@Hn z-b26^+_!D4{2vPcO8if9esX6U2>PizKWEl9Y619sKXKUnDQ@2;J@Qe zs?`|9?HM-cUyp5far}N=5boEGAi13bMNXb4qW1_7hlY9w#kY0k{t?2aT5>?De=1OFnqKBka$j)f ztF$FwcHNSWu~b_FOgX>UGua91MUB$j&lNWaYpXhOU1i{ zY<9=AV&&N<=K2x`_G~|!Q*U!{=e&6bMJ7-q%$+Df}VTdDTw5lUxZL@@pbx2G<}^>|f_(|VWJ z+P(5-HoqxGZbsYV_S5a9QU2S{)w93Xtzzz;|D<#%-It>;w&&_GNr;PvFS(Z^e*z;m zKh7XbJjcvJm@TTb6|0IWm7tJQ5;_fHdh06_lpr^zpVgz5!A4!Hb8zxwv_ z|GUiyIM2yR&wHM8aC-^=NoM&8p1D3_!yQEOlR+Fm8T5Zcg8~lk-dliicBl0hygYH( z0}{YASIyY?^1-2xDmg*zE^RWFpDMR2=5pw<42{}h=F|x84pa8Vtj%dMLOeF;LDkh$ zpm_Y#J1VT<5T%J4i>B7g(`B#(F0NHqwg;P+%Z%KkAfmK5JUPUs=DB=Q#=)|hWU=ca zuA`mKr~wk4=qwP!B1o1%VTjGd0YqEl;)v|2&KvFzj%g8s`6U0yLG-5G9$ScPQv;!P)G&tx+x zx^kx}_QCczVOY;$rgd&qn=k2zh3ahG45>)G7aZ7!F<=ZJ^1EDHJK2{TLkHiFyc2e< z^ZyO$Y?hIh24=3$-QSQQc#Yetwd59eJg${Xb$(!FSnN2; z0^@&Zz^;iNp9PnohIdwrC@c9Qg>k0DYQ49n&JPHsglA2f@Xg=(S+9DGU-{n2_?9Sa z;>c^VHAUvHv~@dcpmwz?2Gxlvw6MdcS&&vD1J&TA^i}vqBz88E1ex<_vGeNL-b@us z?5_15|A3xlN+ThOi!{Rc{JxYhu0XC3bV8`suJXdnx_N_X#H9-0EE~h>GS@0A;f=J# z0y3TW_^#jNMnT?qdcF&Cq=Iyz;8dk^3cpH@zrawrGp`&18~w{-7D(6;rW9RMbwxt; zl2aA)Swmvga?4E;?OtkOty)FRT6hj+u%`Qx(PG6{i->f)v5zyjt;2SlF{bInH}BA0 zG5F)1eiTrv85&F|>~fp)EFkaGjpcEhU$@P#4gzH^Jgui86tjeV9yQcQohJ}&X@*5* zdPOz3#qopaC*xI*JnL^MhDBd&3|FR8%kf2x=)Iwn_lTKs#TkHW4+&K zHQPXdHHxRRcQS~Oh`KYdkoH1%;2cX3b4BDauwRG5r*DFIq1DXOV(W8Q3T%QKLy8g7 zJ#a@u(RbiE!~=)*3SOiUmFH_|V<=X@>y)rp!J}`?RTD@fUTtTvjSK_}JLj!8;MvO6 z6XieIl49-#tBQqpCaS9IDCIA!(J#I<j$}V$oVil;e>RMCNho(Y*V2!npMqHwag>Nq?Aa2gfSkkSS+O6(BPf;Bexn$Mp;W_WB zg>G)y5ChtVfe(Q};Q$$jypJNKjd8k!7KT${R84^`%8oS2GowPnx*$OLpR`vAmtX$$GwZX_u zs&Gb1gkc5?FejD+tBTjtQ_2pkhUa%yvKpV9$Jxq`Xnk;D3wb?}nPy#G_)=ej%ap|p zn97t3dHx25_opU({CZ6NB-a+R+^sCrH;Ibry#A^2RS^bw2Sgi83NetuT>+&r=o2L+ zAM*}!(jg-qaof)l4a>~k!icF%)Ycu%Ie{B{8J+ZqkG6OpS4W?^le5!pZiP=gE(~6M zLU1anp1?1aKDnz`*R9~(+SQez$(S?LTbp%pZl}L4#OR=~Iux%6DjE)c>fQH=*ZU!~ zKjFIcZh^gr9p@{qWM6jKgg1a|o;4JWufo`Ii1_H*t~_l<+RF?|v6Y=!$+!#ChtwX% zt+et5=m9(9vtnB!POBz<1Ci{Dj36m{VJ_5mcB@4R)6c5I31d59wpO=m{YFdv*{}B# z^Es!lY!8a3i%U50HLOB!QorE)Y;ok5nu&1Prbx-`rcA|AAvR322`M`4GaNmbAs^wj zjJpE$R8i!-F!jqXyl!`XyY_HlW*NN&$>_&%$Ag+47A9)Y zLKWH|czJ1=xLV$U3hHF0RMB~D$m|0vLB#N)5f$iq462HdXewz4P6s2CyDmO?x*{O| z>j@oy2VRT*lFWVac>XiM?TLWGtLPG7%^{w>s>bkFr@TYoS@0UYhRn zURt?a>KS!_B19jX!PJMf3i)nzX9uP0qMq$J2xm=f$$kBdyudi=*Lz=T;KsDQ!~n%~nd-cK3}?x8aFf*!o7g3Z zP;|yJ7U%BAH^23b9L0OJ1%R4H3=r}P#(427cfvdXhol^G%>@`o_s8blC9d-h=!H1D**mfpU8ferL2xgkbOB{Xq%+A2K^MkX zC#yp1Ws^n`%hg^AQ*;o5^0{QHsjb!ttJ7bXp#&QI9c#Az3@}{gXog-=UkD3>&o^S;;EH)O!ErBj=Jh$m9LAcSzuaAo6jD)x58ISncB4CVWdjG%Ma-!$ z&4JO>EYS#Szxf&X_{uPLeX8ApM^~He<#@>+x3V_2K|Ss@!g#Vm8M!=SVSHeZ3$qn& zSJkaU&8%^BdDRZS3ZMcG^jpgX} z1VJD>=bzLTn3#KS5 z6|fifRUo@W<8Rcy9~a?PJ*FK>EebJ)7#})Y2cETOB7L}3VplClrB^Sso`3f08_r8r zaYU>jZi9fGSj|rULuRB}vr`UQ0%>lkDHt+JQe4Z)03D)->r4$Vck*x6;nSx*Y1$j7 z)>n&c;L`^Wiui~zxkTPUR{8r*U6@8nPBQhLtIWNr?;&R{Pc?;5T%Hic0C>I6tf?}^ z1+{PpHy91xl-?l%&B?19oxp@PNhHT0=4&?RCYi}sCUd>la`{S`BP7CRW!t;@{@}5f z{q1DXd9nPhaLP9fh3&0N|J$)X9)CxX;pnY5kzF6=;`WrFrVa;Iob(+wzC>0mtPljt;rGroQC4qNtA*nX@05 z=J4|h@m@5X5u9Y^)!DNC(HBOI3iVn*H%)P$D@uRP3;Pk;U*hm$Esxzc2OG3F{c7Za z>C2)tiR!A6@`A7J+9)5=^;C1~RWlk1@yDMauzc2=P9Q9&3#@~=;z_Mu-A7=b3cN6= zUtr7anTua1dM0ots1$S9NNqdP*Yuw6ZxgtPl+R3vVJSy&viAk%TrpJn&5`Ux0XMd* z%LN$pjzW&u9HkpG$ylU;=5>%<-%VM2dbbvK&jaBWdh09JYmpOn8V1ZoF7{_lC{tEe zMkH@7N#&zkBhvqPy_&y$!^EA*e1E)T3uQ&1-$ZsV6{Fk6oRBUU;#SKNZvzV-v4>3q zIR`K29AG2cZPOqN6&!c!=--wvLrc5bY3f+kzLel5@PxPoHI;VR^%x^UYz5v2~zkwCTdL2itf5@IRA1lo_Eu7C{(@bY=jZ$md?hIS8 zYlE>Qb3QlrW>bz7ZXcQ?gZ-_(c+AN5pu|W_AOd$Q^iAOK1~r*UCO~IvrWluOLcy=9 zyFv85(Hyu(CXCcwB!vO+PkQ(XXMQog6^_jwHteYuJ-lXSlqh~P$`DW) z=-9XBE{C~+nR2SWh^Z@-|n1@FL^fBr=DhfUMq8y zHln7WG*Jfem`w6vjPkk7JFR+%VO{j^n9$Wl`edFivN-nz z4B0L0KOnrAVWHg?Kg?pBL%%|s>;KCPmy$VD0jp0Z`Qb{#IC_SP&d%_AR)WquN zz})6*yvd&;tb4#RVby_wIW$7iag-3+RJU0al%kTcE)-lJ22umD8iQpY0$s6tWi3_{ zE(|yT@hQ`>}BSp@iNUF_5;uUi%NrbDx8?gLo0f#on3Okj?K&y_(hw|PJK+%ShguotJbyA{jcZag;)g9 z9&5Xa5=Xp~m9^cZg%4Wib62>3Yc8!l>c)QhfUZ*;-T2OY zxj8~N=p=#kw#eCI1hOxhXvdP4V9TIBSEN%R?~Bqaa;3HoR$B|)#O<2+b%&@+>U41f z%NC`P-$#63SSzDIw0vZzZ+=5Xjyh>r_H>afREPa)kJMJ735_)D!p_sfI*R(5&3x%^ z)C8UQJyuuIU*l|{vy_AO9=Um|M8JH`T=g@&5GOBrLOaD=juSQ1aA2s*(`&@2=Uq9f zs$ll9yUq@A4QClIU|WN}$;B&hIPHm+Ha@j1j3{&RNMpp*s9X+57I=QQA0QLeAEs2} zft0F!r1DO~{SH?5KtM{B>bSa7fb|{EQtbhF%ezm0vx6di&zEXYfkIh%Y2W1?4BGd6 zX+R!O1k1aXE4l+r{#`2NK!K~T1Jb!YlKv^UJfQlSFOV6wE!h4;w!J{xguzb+?VL{k z$)KMav|V-l-su3Qum4fmFYXT?p9N}H)l6ny$W6)JkS3RA|Ax(u?I&I81I|Myd+`s3 zZEdTYfhrR6%uD5Caj=54I`Wz{P%2XjsFwK0%>2)f28?rt2$s6K?uCl!AIWJ44gVas%8mD*@G*czD2{-SOL$xx?9FWd!&IPCzHMaQo0Q zRfz+rk)Ay~1baPx==1H-cN&*sniFe#3V=!|^b2dFw#PY<)VwF0bZ^NdZxsSGb=bp? z>{Pi5NasAYnwir0{+urFW2Hv)0gj1^L9s%W>G%3drCSe@RVO23H|oC81(mL+3I%vz z1O)|wl^Y<R%l2K5lzWvaxu3b@_AL-+(fl57*-#nS(vwJ9FKw5B;z+*Bt1~ zCEVLyzy2ih-aWv~{Um1xPXFZ2Pu{iaph%IT1qu{eJV0^x;#S<<-K`{0D3sz*+}*86a012M-Q6L$CBU0! z-nrlV%$+;WeExu&IX?v^IoW5Qz4l&f?GyS{Ng4}-6ywE<7g(|~UsPYbKz(@e0tEvN z74Z(;WnssQ7sw+XJh-48E3N)g~lv1zVewKxS{W=4h#qGZ~a9AaN^WdV%%UBszVK7G_qSvML&+ z$O%8VP@2~67GwU3etA};G35F3+e0j+$e}L9jp=>^n`k%dx5f>#xh^+hzepR)ZNTQ# zFBAX17_jhtl(>zwPtDT`HPgxFst%ENex{Ah9vV?=6 zCi*ZYE}Fd!m2Uac-&s;{v`sq)Q2qP$rNp$@w<4%9%d4qXg>Ru>^vjoCmqN1gE5Kn? zetSXOmw-*?Ut>IbgKB^0%Sz*V(*JsPH*o1*TnHd{gf{D?p7*)J9Mm!|_I3`rQiptf zzQ7+^ePnJxu33C12f+QyphV`^@PefHAB5dMe);D}iQVjNK2yjVXsO$t41SDZbIhMp^dmVxo{Bln>Lgae6cNa0cCL{k>$Wx@Jp_#%D)9wZT3k`H(bKgJB%Wp83#*k4ln zgb6C%#J#V@Nre|{WdfT0+) z!F!FW^$!3%D)pNr8O>Uw_eS?eaP}J}xNZKr|F+h7gKMAea7&Fs*Rj2}o0kTu1L_vh z*+fcDXb<_N%lWnzj+9^GK+0a#m4Tp+6AQLfc@oNl?Y$<6QskHNs~=Y(b$~WH5dC9s z+X!e7Gy)n0{WU;}6&T9cf^4ujQ@R=T-q>D}>aO_``MhheI!LrDm!tYOTxC8WvEm3ul*O7UM2>AnxmyI&hp=eA424xAT`4|kXs91J$1{*?Iw|r( z#PB0NNtj2(bF1GrYF*|~Cn)cz+Tf}QsNt7XPmAG%)E0VXJ0QW}CXa{a?lJe)cOwS*wLCI_Q$2Kv%D#uhQ6CaPak5q|;$< zIv^tF$1s-7sJa)4@4pB1eRO;`Hu!oKh4(UOv1aKnd8`Z&bA9^u_@@1V-P`YR`e3=+ zt$dI9JD5ixqkRqPJ}WHxT=v2$(*I?47Et4rJ|^_V3(*&{U&PeCUmjuP0VpMnxlNqKl}IZqM~nQD89Cd zS&6wj>9FOcyB0uW)xgDtobx$X|j@;EMk4FYLeuv?9mqrFkfmoZ5=Y2ZYA zfd9n93it;g{)rIjGzc`>J0K2m-5FFFjAzIoJlEq#2Kh;Z{uKS-^47R7$6w)n(UJSk zphmG|ZDKR$m;ae#|05LZHZ;yVgM>Y=vsTWn2IFNXw!?9#C~p}~B||->9rKn)(s&Lg z3&P_$tXSl-gxexX=@GfJ9wDkdZG-P>f)%bS728l=weJ z=fih2^O01IiBSRP_3ulD7Q@MGhQYw)obQBJ)khtQY21_Wd$)t>12uOR`1_S6kaTde zkTf=o=Dwdm6}>+WAknT)9K0ROSM5(NYtFVw0HsL~wl$L6ub#<9omEM;O4GG4j=?XdpO znfl8@Yhy2csQgpzuU}{w=;*N%-V3zRNqO^v{a`ySoBkLv&Wkcu1C0Cf)D~x|uvPC% zu5W((A22X6YZ(JMd5|5~Ad2=B8c!~Q46h+<#KT!uq^4iKd^(vYWD@1JKs83fEZpXc z(^HZ-i|Po*j4h2(Ug~_nI;vBddYbZlPJZ#><$t=azdyW8q&Z$|&vIOD-0*YIrnGF1<$y;AHUh)7kyu&30l@H7{ z7&#S*LnaI4)RHO<4&s!B3uOJ33MWM1GF9zQs9ZLa)2_$%lb3PNK=YK4FEHfO)fTIE zXKDbSbn=Ni5fQ8Imj=fb*F^HK2e;4_p6k8US8k~#xOf2ceC4c6)GNyDp)yr4hDOC< z$~sTUnh3w>um9BSi{gFkNW8>;&nudVcK<0*tw>45yU|xItEQE8>HGLT>>;fRz*RO` zC@=p}Ix6?)HCalZc2=>GQLd1$=hU7NP%0-UrUM#9rsn0f0WFCx9r$nC$LKM|FS(h3`=SWC}9?w)D3Z}x3p(NH{g-R)o z%-d7(mFgwzis@&>E=YgW$%`}@{r?3*7x053s)X6FCtq>QOM#34Uk<6xkG%L)aRWa4NeskhBcj8O&UZFi`%+(Lp6AKae@I&mwU;* z)~zHs?|Eeqm4`Dy0_i^`+rKWY=;WufyK}QDMOSb6eEERzMvLxXG>L;(790iyir61628YQw=l+XB?ofum>?DibR4 zRA#7yDMGIa8B`-`EJsLLL7yqi*C1_$N0lmhy`6hz_z!nxZBBhnZhPZ)Rc$u*nCl``}kbA5~OTIt|bH!gwjg7=9OIr}Mdetz2#@ z`SUqtKuP|Q*r#{|bde461odCKSnk(DmD0CMI|y3H!^J0o{4B3 zG506aCGU8F%&Z2@GHA9HGhz1|SEEZ%)sa+i2G?GZu&CI?r14Xy` zu0pnZr&Qcw;N|r#0Uq9FdGXq$&Wh*xBu(>;cH?rR3BII6g+|$Io&AC;S5{xU&6LP= zwwM{$$O^Wd9%<9Lhc(X^^!DGq4?Iq*>ZKyMNWv`+e0E(1BK3|-3edLa%%!b#L7(40 zvB|_&nmzJ(*1Y@b>lrc2pP{wSc%Nw z$bJ^1RPNPNY;JH^GQ1htau`kLE72TDb!XX3uRI0Y&0^zw1b^9%sVXCDoGyAK zj_DdBz@FVuA-^-;F7(I`Di_2!!S`YUp)BPJP!LMEJ; z97{+%uN@Y8Ecg{X)7Io)cm3FARB!LYh?|}@nt?UP)n>ZGt``VFl%s>g0hz5{#cjtx z2fF8LG6SN$6p5)qhZ2Q_F!b4^ZohT2oRIEI>rD0th0gTr?=L^Xg+CFIK6CHd6|9&K z#HskZL@1DOz#e(6$K8R^+hpI_%zAaW$mnd!ucykql;hTA zC?dFec;BJOvuA-9{75eG+b(B)*$3(jYRq%wsVL=xYApxE_gK@t813QvTR1ke!#?p$ zS}b7D0uR^U-6#&Q`s}vv>3x9v{y0?;^Eq2awXp*&x8&XJm5$%-K;g-FtB+fqibcCY z3%0Fp1u2+8K5>}efr)!-Fz^5|C6w*p+0qq!raEvic0l&}yKwK~aoRW{LZ_5(Dy^V2f;~0*FE&6wAwPb@ z&tuZ6#O=JtS+R{Q$HPR4$nmM&rN9(zii{-Fnz-*rGjpTp;lQAZPmgKxW(#v%Ybuy- zC%|XQKP#>-DnGj+PqV%(^oVCOqqSYjDLSQScDW>7Y4kCma)pha@KRmwpG#c71AZ4U z{fmkWy7?LsoRPgVw95o7Z@x}dO<^ZI)~M4tG`&J;_7Koa6QE{)A1Iu*3|Fx~Z7p%% zKJ!yk!=VuVT!5mAbh;N$Cs zN-2#$0nud3__*$IU8i$xOZiu{ z0pZcA_D4BN|2xW}5ct6!B_J3GSoYn^xGnhxwk?hM@pxO!uu>(JQuWN7#xo`ITz;)P z^!xFt`YV9mDW;KDZ5&L-Q>^bZrfGh+^<=YFpCLQmcYBB+g^G2-he~`GLUpU1A=YW! zC+V{;2`*>jhUfb1BGCpjY`X{1y}nM63sqGs1^8&5-Yw}UIs6l zUP>5Wm-c)LUSLWbyKTv4HAa1i_t?HV*rKH}y4W6I05!jfXDYhaJ7MVhu@U&caU94W z0uZ;jq+}Y`1I9LM%=$0w{f?(ieZ@LAy1kJ!JDc2#A2=eprv|?Jy?;wHu#R|g`D%A3 z!dbC6&}DluUJgi8f=bJ}VkN_g_c@2m1FFUCs>M-i`2@KQ9(IV0(6QRfIUk`mHH|eZ z^(!21sm+6q)b^08qAY**gv0UbkD$A!zV5B002G@7xrE$Md|J#bo2-fSg<6Zmn#cP+ z$`YF`OGmC$_XK;X-O(w^@G>2A=|%8~G2YckmI&(yWDs}Ap6!P`Z8A@bzxrZ;b z`)3_Ab%@`8yFNTx6#I1kc6~2Tq$@TLW^>-DBNunTVV+MML{cb`v{IL~SVXYiOghT3t?8(RB2tAt8jL^k&D zXk8=wJKecOG`~c({!|jl!zLX4>S|K70gvjX0Cf5bFTkBI6MdbJbgaXL_rDacfG^S? z{vw0yF(%#x2I6@Za4IJ%XR!a6*U4@9d>Xx`AldAGYKnJZgK066njn0?-9FvsOYFct zlF1=$y^qC4e!Cyr)$ID7L~x%^o+_(;eTcPPPCkiw7X6j>S3Kt7?@jC##SUF@kLdnN zp9ygo2y(5x3KVQWp)L=Io>n=H+wsE<4gD*agMO>~TMz3w7211?A>FT-#G^dmnbl2R zz%TQ-Vmjqa^`Xlr2*`J(MvLjwCOHuVbbc?B5u+^dK6Bg;R^e4FpQN;CPR?rZ^B0`Y zCYLP~k<*HCPwBZ+a$H2FMxIkeBe}B2aNFrQmqO;!qx9FY^_}+R zD&u>jz`aGVWo-s?YQx!Lv{cBA(0<{#Z5_tZY!&WUBc8_1(spvXUD)Y#6(Vtax~BGU)=^lc+dv+*g*4}+uK@#IFYfRPr(W)! zT;+KjvDDelh&TH`3v=1$`%CdTk?^@ltjB&^afaCo&+Cb05pcQ*EVykG=sttJd0qrQ z_r81aqk1AZpKG=IPO#qP_g7?O`?r5CmB2H&7(z41XYWC!$|9&Z|5dAve|$INYmV%E z0=Pn-4Ak73Tz9z(daOFnh^3N)95nkxfOjzi$W(Snh`LIq+2di;6HZ7s>(24>WT}B9 zu2?;#W;)-+Y{h7*a71Az0^NSKx~|yn%`yJ?S^(&h*$6T`eOx4-eW1`!hgn;=7LMpU z^A0@`L(3tg-Lp}#LNs=XS*g?a-&rY6rVMnH!r2JU$$a)jvv4R~fOc2LT1M6GuiY zZ42&|roLx=DmN^f>4A9 zBRJ`BYJUu<^S1yXdl!aL=M>|1zw4SQ(~6W8?rX4LD8D@F!>Wj${zXw7*aK8Ydi{poV&(0k+}>E9ZCVOD7CIK`VoL+o%%G@L0l(N`8pI^J^eN=GOSY7jR840L* zmM6|Tm>o%bI{hYCf2%E|IcvW#4H6KzJzY^5g?35f91}DEF4zBpnsEsk-~zB`->xKB zN>(Hg#|MUxF2e1$bYEB#UveEX85#L#X9DR$T|jFWCJA0y-rOjiEn}zm8%O7}%xjrk zK%qq+X%~`;`l>j!9nSmHlcnl0)bofd?la#XYn1SG4?^Ih=H`QEa^vBT_RCYuBV_xO z-j(?b@;OR)2Dn1;c86r>p>G(<1fji$d*g5OOk}h)w3S5h|99{E z0tppq){_t`7{YVXGT9(V8O*nYx68pz2L+xiuY-;M)7NVxwHX%VFKo8IToOL&mB+UBZY+~InBRZVC z+C3#E{D&IhtXf1#P`NBTDK$)Q!#Be%<4>vW-(huzSOXy za<~R868+xfuBNifJuK1<~C-5$@T}RaWF@lyAZ4(#BOQ3|d|$Fd5C%6s)u~jHk|2k6pjt zY!k?SMjLY7*suyB*>h38_&q`9K!zwdevljch!c!SS#JOm#8spI0O{$!BE;NAEyQSxtGb zZ=m_t&*kc{9vi{>a-ABLTA~wu+uDmy7e6D*tmmSb^DVl@UQrnR_{r=()~ekOeBvuw zR(9ZZHYRqM|D&4>ZZ8+cxzoXoXH9PQJpH(;#=!tMZd5IBo3qM@8p5Z52u5!`Koacl zR=mzS99P`fE7w2(_xpoA)=YP*8^6^?6T*{)7K1sQdX`ubBQ%fWX*D26p~j#S?oJd1 zpJ1P8VbDkpIWFbnyGe6;oQ`tP*=v zmjqny3_iD?qv5hUn6a@0K2*P14BgQN8!<2oHa~ummQKUIgtn(yZ`0quT8(#*`&`*; zfu9EdxSVmItR|s^mk7|YA2d;mI_8+rFn$@KIo@Znhu~Ml*6^(d0H-EQnG-51h1)SP z;)R@E=u}ixh15lzN@@tsQ}G~_AxG@T&+aEN@NGHgnofnT$I92)a)Z2ZxBN)V0C0)3 zAYD4H$fggwD2DI$hK}f8q&wL-Km5*E!|q+C3M6H?!+hgnJk3!p&fmN~Y?Z3a9}EtQ z^Rl99CgDM3a|<~7$N2^k-kHh|m&E69gzXpy#8W^p&&h+SKAY}eUW)hWf4rdBRNpi6 zl-}E6o;T`nSS{a8OxMZUm8#SRaO*9!`&|B9Ogfq~rK4617L;iOB|Tp71z9YXvKu&p zeO&!3a3eB!eH5f-n4^P99D9O%QMiAIRDy3BuI_Xnn)1lEZ z^vo?GCGrd`RL;sy+fD#&pzqReyo#%eMhw>~2$ISovEpR9WGf#1t4^agn@OP7$L-6+ zANFITH(|KrO6h0ej(D20Td=*{@kX6S>qPd6l_oo<4C92M6(Ze>-zE{k2UZJ2%6EgV z?2%QnW~nUD(2mno+~dYe<>5}_vijs4g=mO11XX9gMN9BwIa2hq^}ZOtC;apE(QP23 zV2p*6q1Q4@I_kLSQ5?6YFD~E~M4rcb%Yk|Dl=<{JID?O&CEwA<9$o2J%gX!iPQd0i`^hX4{aH~*T%O6)W$Bh5m6<)ZdGRE3CN zh+u73kOiBKEh#8lnYr-TyQ?d0&jq;LAeh}3P027eAalOVjk=4Q1cikB^u3Kf7omsB znTa@J2CSdsOIyS5D1$?fv}}4KLN{-t@4syy(W=6u9(IycIGxAkJJ}^;Dj80EJxg$x z^=x*}L8H*d_NRx)pKp9L&>Fp3;7`enL=>Eaejawy#Z>}{5$S@2avWB0n}??jAjRB< z1nl6dWNW(U7I#V<=j775KzqM?#{k8sJs~QXBA3stDEVO!O7HhG`g!YhwdfGMWkn;4 zcWlb5w7!jO(|2PpOG+1_=RMI;V{yqK2nT@i{h}%YS>Y9nZi%}?9J5PKv$M&(zfPtoBg!X zqBOe)<3+GX=S%BarTQhP)iG&5c*ax~XzTyGH#)0(EAv^*s5iorBZ+ua8dcE$>80b< zb#I1Yt97h0NW^UxE}^%yPLp%aZ87*Ml^;~9^dXx$4i~1PLaY#iB|t6(-;@7EfHaIy zdyP`R9*w^3AO*~$!1m6nh zj2%3dV1g#yalY(-64_o?`0%Z+F=$?ERTxSWsnw@RQtHeXV;xKu772>o?>B!=7{=dV zFV|^_z`zu7T(x!_;6AqJuo}oM|Fv6V4>_212+`p%WEi1p%00+GPJOZo2HH&Jl1g+L0rlRB-nUS; z(pxXqogaF8e-JFR7bkmI&1h*p`hgNHCT=#IGOibpeb1w(5N|`*in)!GH z60KICP*IokG{Ci8dX5MVVWc2SUYnE75ujrH`%a9IuaX7Rvmz1F%US)Ip8%a2Rc;j) z-Mp2d+c%iqhQI@Z>hA-Ki%7~bq_6CF?)asgpjM}t${3gV6>or!IudcOIDYSWCMACu z%@hie-SXU|6gdb&1Y(SKus|+W?yd9CL+iZh>T*d~{hz9>1kjA*y@4sgMfDAl5wTXV z6!EPDh8y@g0=9Z~w(IkL?zZARu~E{Zm%BIw<}LQ-HAJeB5~x%MHHI{_-|ymIr*8TF z;F6gclFnsEx`DJzQ=QKuqhZ z1hBYTi+J#1VXzcjc-z;ItP$%;cpZiHqHP$7hzJ0A>ZM!EZ3Zz>8PyPA=cHAw$+iBn zoIP%L?=x2vK4r!HeJ3;kKp4=9nat=mhXfM%2C@YOx@-4tGW7U!_MRSFij`J+2ODw# zLwf!fbMtc7PN`MEbd%YSf6^D6C2*QH>t0Y@6-W0hYvg|k zX5;&?nPh6JICR&-(VDtQ83-94|42YeYI|tt9AM1vLOuH>jzKN%YqPoyKXCgGt)%t! zNSY#Rl($aO&tGmxiV_t!15>yJ=G-9f-3T+h$*CKp65vw(iDIB|-u>wH(Oh!# za*YL(nr=4DALBf`2CEX|ztnPb+DxXI+I$q>05_1$ftR!JDO>9EFUzPVj7q4PU%WMw zhvN#rH%9W+(uF2GRo`+~g|*XbzT6|QWN*VNPnazBbCdgstoPm*-2uMk1Pn z1h!xb_0$S^)JUSo{=6DFW|~gw=W}U~&FlDehd1(xymh2Lc=aKlqcydYnd1XS;+}+W zd_4lKFLymkv#WfgpK9KLo*$n}@O?DOW^z%nbc=mFJ?D8A-9(2F>VJ;{8DMo54mu3H;Y5q!_M7GI2N;gJ^+_Y) zpSHP1o@OEUyJ}$5hSNtyT_M}T9;J-M$Y0OA5%2EV#g&`bVh*tl_P++-fNLT(pVYvzm0?h5m`j;<3L`^<#b4 zH*}6WA(d1K(s)TOQdU?)vQ6jEZ+7n6=9RTaFCDdP%b|Pnb=G^S#VhR(U_x|cuesJz zNp%bG0m?FLZ@U(vS6^NPHa*9AO;l-5c%BL-E%k^HurICA%=oD;mE1O($`$fpc;44A ziNJdIkS(jPwU#H-oU{Be`PQOsBoeN!-l!_5I>czH{#4FlP(!r}llyjf5y)XLxKt-GzA0qq4R(CqL`7xCY zf`c(Fi`Sg>qZC=^W|GJD_MX-@)3j`*SPj^d9;_jX+~G(1erlIZCyb|mH+$sELd-Yl zFd+qe22bQ2fx^mJdA2|1U;aNnEuaW7TgVy<;t1RA4i|o`DNKjESPZ_2L4JA{HP)dE zJPiW+Tx`=4$;^}*q#e$b6~T4xmBYjR)>`~|SW*l6{A@i=m-!NzuTQ7s(J1*_9yu`X z8>@pc_n6|O6R|!{5WQ3$1`MJ?!!>$(J9hPdix=xRF`k3Uu_l2<3VL8m@UljO!(yG* z?JJiM6ye7Q95sZwQ95(~gdX1JocU%#nRd=R9fUgI^)ktf^4p@UfMcF-bim-Dw~kf8 zS;LkCy+%#C={F_Bmy%~TmZ%maE^Z@~=V{RDM(7tllfN!E27RO4vJLOkcC@nOGXH?9uMT<4thGPS2zT#L0DLY4aaUYnjM5g8A4#OYa$H5;~e z#0|rOMVLvnf7o|)ygmF>tPpL7Z4U`oq!{d&YnqUz64}e5ydXU{Z+e5!h}EDDDT(hI z7B9QU=x3d0ghjj+?86ekJ;9I*KKfU0T= zq1VOY!b*FdVEo4NjcT*K_L?x$Q*9~j@rnHE)g%Dye%cVGujwl^&Gon>0n8>1E>pYxF3Wgh75H_Jba#k;ORhqK#o&HlcF<1S@i zBWd(o^hM{pap4S|E~U=)>~oN|Jlb^_A#;xAQk_jkv(xg%p8WGmR(j00+Wmb2u-?CO zPBb?|MTGlFtXevB+goOTG-qCH?5V>>dJoRuF4mpM?J#N17!W5g&Ee^8ML{jL8h*9k zw~!SzjA{G(b2K$8)phDaJb$*G6`qns6KnrLS3kKeby;quk5*I}`YLv}GJ|o?n*q|~ zb~^nmHmdo$j4zudnIV@Q=sJ%tDl=>lFn|*b8$Rvhbyz&zKdHKmTPP%L-jYS6^U(wKK(t zXiK#%5&`!0K*IEx7D)hu=ESnYRe|+j+FW_g z9)|9hAV2otq1}EWfrE`wL(~2(%GHxaPh7vRy$8)L2$Mt)kPf!HT;o&AQY8~hy(#VE zVxXHXX^RmM?eaSU*1vniZaJK9>!y!4_k;Vri079`&GN5`$xd~Cep(f};oOd4Gw!X{ zCQ{lC-96aKQSPUceKlWlVsh)uq>s13oJKTs^WL2&JxQy1ed4@b0Vq0}R9T<-hAc7p z{wOzy;$#TivfZm}3BP_#JnND1n6T5ofol)p-4tr}712D)G*sx-4GuKLlOdaLPn)^U zL8QCiEL|arMH_-p)Jc_hSj!PGXnyCrLP)eI_k=CL_0*xAR-@F`n;3%dLd=#gOhXgn zIw>`roqt*A+;ru0iS=7HpkjTKr28IAxRv^&m38|(wZYl?dq3yu#p7hTk3cGqQ|j;G zxM1S$?nWNnl|@hdZ&_U4+Pr@er#>n^n%<-_BEqe50N5T(NMghYh-?4BbIYNeeu-T6 zW>Wcg#1ON{D<3grI}tZDVoTvNt?);d!BQWdTF=*M+gvO`>pf)UI}ht(CVsPP z8{6l3wsG(_-Aa8zuF4gYih7N*q~cWiMM3|w@%lYmkxHoEN%gCP6$IR>Im}HznvyzD zDgo$_RW8WOiSPtcf&|c>Dr0o5?D-qi2QC(=5B&-TxjX_tro;+^P$j?9jo8*?tF&~; zBqAI?`kN3x>bgdPy!I+D^hgepns8Y5mrz&s(j{)&X!FFVx{Uxt*dudllo4mXP0m+D|$R3dI$}sV{YFSYTjj{$9mQNb3ejzDP`RzKbWUXw`eRZwYH!ZdM*0_xO8&&JDx!7?S zVYqrl>#wugQp#;W=W^wAY8&;iyRO0IP7hBz!6`yZ@4YZg=L*_Oci5Cq zzt@67YmC&24nVZd{C1CUHb2#lyI(_zTxG|V%_oyh0$h&EUn1$IM|DjOW5Bta)Wd{+ z&mZ<{rR$w#s5YYQS}L6z&t!EV<=3AgDzjrKcV+e6q{Kkvyas$_`P!VdP2c2LwIth=s za{IxPrahwxn*!^j9E!9GdBFZ6W%8~wIQ2M1=92Gw0I`A$b3fvU;xXMYZ-)bN5(tp?IH2(JuglU?`R zAm#)1U=r>ngqCMf3J3xK-p7mC4QdwNyOj|l5wIE#mK8q!?K%xWpw!IGN48d&D(15f zuu2x|xz*>=E+l(NIgUT_;pFUW17wi7BC~jPp#JQ`=gR*KF&_~pp&;R$Vzmx30d#oX zJpR_-vWi}6riYMgH(I1iwv6oDkvg_mph+rta1`5l1;G{lgt^x!w9ImvHuPDNr` z24~mc9+7%qS`n$v@vl_(%s6Y2O`Y7{(k#*c!9x+E*4a?=ru8dy(4y_T#_l<{S#QMQ z?PHHT@ic&2anv#uQTzTC? zD1noz6@C$Pfv}BFH!j~_&sD$txV5Y;2V4QkT}^IQg1vA3ZpurP9CMgs)x~qfB?<6bb=|5bfqSW3WeQ#P#Osy< zisbbv&I^C|4UfWmH1Gjmj9*e!9o`!K;)>oW6(Uk~T&@fAJe|)~V&`XKrDMw2p$one zYH489O;jDOe^W0 z3Ed~+Iu3Sp;%WtOWkf70^Pp72LnU$!6dmZJGA z5K}x?|FrWn$&Cc>%VB1j0gZY!#%)!|z&pnjQ447&ze#B=0h4ouQA(4 zJ?uRsdo8nOm#&YSVYa+c$B7!$l1urjno&3ZqOAs+^?S?qoO_x`t zR%w#`#r`+r{C5W=bjE$V>HHGeabi+Zx$+sUJcK#^M-0mIN}cR!>5{ER*#YP0={J;~ zgGp7QWIT@3;6EBy3t52>>+>ID@3hmZWHxepw~-^0nDwHx_Y1HPzWO{W3Bz&P9+)sJ z<1!pNXLOm8cy#s+$DTq`91jCiLDyKKAOl@K(iaxBBeszooym5lR-5s6l zG`B}?0%0eRLYRLI0fR2({8nOb?DQ*iYU5`s4QPi0HFCHeT6$!8plfc{@#aLrqi}LW zab_T5((s zT)j9Yv7erInEq3c+tlQ}0WhWXW2cAEk7a1m`VzCGi#Y@tXFfXjNXF>qFMH~mq)w!* zvr>3QlPBF)tOz$Ug?MFgQTg?GGo8(`Wd3n9$U2O6D$wiHs(fQJ#SdH;wxB>HqaCfIn*sRxs>>nKEJ~%kzR@v{9=| zuyho~*Qa@aPu1!VAw1xanK9;nOGe590mz6AF{wd>Nt`_e3OC#Ijz$5^RT|_4hZCoa zTAeIwvx`KNChMq%h(l;u84IiWqE3Kv)*eW zEu08=jw1|n1*ll0OTwsjpD#)spWd!VfA)V8RO0}=XE@4fl39>}d6EaMNj#R`@O1)< z60%tvzb~{Kn+J78q}fj`+$?JN7DCczaO$q8C5Jn05RVxWt0LQaR|(ATOWj@jSBgL;ULE`u)d}ML!w>*Gle{gst^m zWU^TDOuvOkpUbqiub?phQ165)nc^H9{T_)O?9jEf2-c~&5ceKV#71=8xxlgEI~Wds z4x10{(CdrDM`|013F;f*-`&ADWj)0OD-j#G*Ta>|Ibb>Wjw?=hyzvF*CJ}RrWbEFU zN_Zl<;x$Oo_&Icv;OZkQGt*sM$jW*549M>G?sRZt#Y(aNE^04W8ZKl^w_V-nuRn9y z*MOHM8y{{`Cr+%@hqc{$w+&?qxqGl2%x7~O1VQMA2kl16m&ikATRBCvDu`Tk!5Wq6 zK5wP}Wu{ii({x|&aOD5vpY&PYPYHnP0*Ko5>JAd|%f}}FRswOZSd=3D_p+5D8i?^Q z85-JNnb9&4UlQjbv{d86F_L=C<8d^rmUPN^vG)8VT|Vulz&=>xaWtLK;+(Z1?tk2%?{w6Bxj>6M)>2@0qjL`wQCNG^UH24aX=1MHhISM>MG zY^3#qxi`WzhW@olOyR+yb75Y1aeVagD^he5Vlk`^c<*YE)ceH!GnLbpk#^odPw0f;h z;`J?JuZ3xU7hmyb@&2Xz*}7q=c6DlzVhbS4;I1MraEwXois{fm)BX3r(_7dLxs=sd zIsx?7LXIKxU79dR5bNFNPFZH@ns&ZFA2wALj(vNr7sp-H`&JzyxxZ`XeMrG=r0wmP zi+Lk>z-QiHCf<=GK}&PU3{zfi;qgQRbR18(#f)t+s29IGrwmEsslrt2OGC(mScl@F z87=i+A5lj_QE;B#*qeww1h(a}sqGK2Z3c#{Y{6g&e^G(q%`0{CQj**o8G7r zgUfop(*uRo?eV-KUIXYRS39m%#aFl1cC0F@9?tp%dBZK-p5jVM^uZ{i=>s5;;Gz-6 zSF@){EPwt!xC)A5I6t&zaIwTRi#KM=543Kd>EB=2EE;*~iKU*OQ zh(-><1_jjYeO8VF-n5TdKSbC#>RBoNLF^`Rr9^LsIRrcZd^Efl*dEgQuB&G~{pW+( zcK=u?*R_7|4pR~Wr*Kp`R+ZLfDjdMR|P@|n_fwoo1 z$Hm%p7DCEmIQ~EOzB{U^ZF~2KAWa1eAR8i zFga6I=tw~wU06|;`S$f;Wy5&W&vq?(30k8#3?f``c$B@U9T_g;3W1g_EIKrK`ia&sBn*3TYqz1g-dch|pSpr{+@HCpHnk;wq2Ul6VmlQKpqLLVd|mK@4YZ zQsye}&nW-)b!K1MPsEkB@a5`AE={gnog9f!_Z3FgXRqS!fV}qtHSTL8D;|d#T$B-i zZXy_*S7g@dnQ*5RyN8LOmeBLJ9$tMW%}NZ@&10qES>7uQ43XqGoQQ+Cw~Bl)sKVw> z{oJfNSSP=-+W{be3J~7c3)~9IhX|yFh~%NO=NeQVG~X+u1IUW1cjcv-G@eD67L(Xa zK9Td4FlGSdY4NIIniLxi3PoRjk9cwkdYvmeRo&3_-Kq`^bdnNx;mfH>HPqiJv zWetlxiEQsYcvoVf4I=z$ zXMBz?)M0n@t33f;WdMr){>ojm&?kTy;<_&Ul^Hz_*C)u)+LSx{y`Fpy4!=7bNj_R= z5M2{9K_u^MbzaCqg=5F-_S+rF@1;9v5Jf@x<-57| zGFQPN6RxWSTPkFk&4(~HFf&RlKf|ckS!iG3XLYZP*t;M)xP^)m6L(?7^t4PX6i5Md zZH?dTEVGVw?sn}EG#|8VwWXD(HX^L?6j^lvP%!b%Ft{@s-WT7Vhi~Mk*#(~~7=IW~ z>oZX~m{fiIOy=HRLpyO_iP<@2RGYcoOWttOT}BMVt~!rEz`Ihb#$abwhZT8(vmU3Y z#f`gKYJ*)fR_2vuf@t}2m=YnqYg2H;P#ZUZ_|8dNk)J-#As;Pc0ZmY|6i)TREyFBB zzq^Xs8z|4tdcLqRcooq$Zx?uZVTcoaj2dmRrq2ybNFL6k%@d@Ziya#3jt7M&w4ooP zxkWbHFD628wUwFc|IckG$LY8rAJSo>X#PyRbQK{YpIDbx&E2 zkeHR)ZBi;buVVa2?|fywLpNbV^7dvOUXnS3Uu27vPAv}}F})p8?1ze( zbM;Ve8s1fR)lIA$7GYC1rM?R@3cW-rmN8Tb15MYeqnU)H_b1R%#RditA^LE(c|C5q z8aIb4y%KGB#h6)ip(`t`uPRW z3;d4qRaa!OjtcKB*N&LNFQIB7ij*QFz0VEL2K<9{ck*w8x&n&_O9((p_dFqs>bd&+ zKZMmCQE28F#Uo4YJ|Aips>$W=S@S6wLFMWAbdEQSuS6he@G+tIh-)`)C>`T5bTPa| z5t-m)i)d*XtP<6A(Y1T417&Pw_JeQOkM8wrWCJNzTt_D?e&E9eM*FAp?+k0)27Ubq z@80Xu%u?mn>d+g;N*`>Ttzc*N_}HY$X)v^TiN6zI*F~=m7392vX=YaosERx`&(RmH z+ncJ8(V^fdG^)!Y6SW!5kz)i^WV~qlKrJ*YsvFVyM#|23768ZXbfsFO+99;m1v*vo z3d43abHOnuAF%Fnz^E!b0t*K-{Bk z-28}_Iqhw`lr}`9dK)`Of?;n{^_3aKMA-8jMRgN~-j1xXsY-|9O-lvnUqYdqtHwJ% z03_QxLB>a~)}$TQ|I|dTXLUH(C5oe#!eg=$=2wvGNYivymj`!v&B8zw%}!hz+u5J(<^$ZGzTGUeL1>6XmVaWV4L6+x(KLB@n}_ z?;4+J4?&Zv1{J5u)9*=0v31#7pM?daN-Vf(HZ&ahKaI`#%4<^9ThE-jLP%1%CImsc zf7V=bPdx~JYexFuW}?v0f&DzWh6(syHL1)EApnjtvf_L)}d|4k4g10=HWovUT*@ zxZ3>! z2Zz$$6Nu`{U8#V*1C=tbuX{foDpXrvA03+CEw`g_=x;237VQtPB7I1YSj9JGZ%%+8XDLb4-3p!%cqXM;`mkt3sqxbcJc` zQCniM?Tj?U3aNo=zu10ME+@b-@jX>WCX`Vq%;++dRCmve2FQ6I8~8`EJ=*`0Q|8@r zS8?`56nhT|N8v$6w<(yP$5*$u`Ezo^zLS5JQRiYo=|dW@_t^ugqN{7K<;;JEL>00! zTpH0>8_T)UohsI=q30C>a=VcbpY>)lB0|(o?>c^hSsv~Xp?;>W2A?VKzl9^X$y9Bl z=%|_m85?8qZDTq>gswD7ADaV4Fr73#(2;C!!7lN0mO?vnqwx6)*a?T3XVNd!!n*NA z{NY%-o6c8yt%R>L)r2Gn!jN`Q6Acy_i6ud^2(J11zz*@a2|$HNE8~| z93-lgApBllp5ls4smsbi{*1Rl3ArFs#Zaq>-(1DaF|*m@)JTAd{yEDA--sV|h%4&? zXrqM(elO1S!w_NVlXFS2oJKJn+WFSDjaOk{M3qi-y=I;0SSlm_n1cMy(~5TvMVyZ4 z@6e78msR1In$?j}=&SpTAl%g}))~>F9O?*tPLNXH>cALm9jZkWqjaKG|uH(Izg*yWJvPwD9WBzOED>6nXiT2=Ihv$ z57Ck@DgnT|nI6!c-njkjk_|Ra2k67jc6UOyQe@D_>{n!RYdf7l>XKLAMU8mdTd7`~ zjz8fc!;$WtX?2g4xlGEriVm8ZDM#HNW{gHGsryIz)I&6`EjL645uc9~%i45lBr>d8 zW?vk5_~ON~&nK~P>iR%`#>mLo7z~doP_M&Xnt2K49wlEY7V)d|s!%`jCW%JvPF4v_ z?b6glsPZnligQx3TZ^*LvOE27_=!j2V4rh1D=L+H!8IiV zxiv=m3xpxmcL~8Y7x6508sg7le4}_=*+ITHrOtAHK+{AmJ6>$KE_V><=5k=yrK6~P z+wHoc%N>$*+DRy(bc>asL(=jM5uC#1f^v0&Kew&Y*wQT zd-nOJa;j*Q&b4_fPObcF-68(ywBoHtsE!;S$gW4q#8bu&g}Pfp6`VUZl4di_6Kg56v! zaJ`#mKh-*vTV~{&rWKOGxFa;2rAcRql6=w{WunKL`uw+*(3pgE7N!R}ds*AxnikLL z;sZ#oQYFVR;LFxAfrEvLh#-U75 z@CF<@M(F@FWx41_pBe=Kueq`WL*?reL{?GW)9tQ+Kdoqjt^w@q!uiIm$sOh~lgDrnj=1IW0O0VnFyo&_|&Q2{Y^c`x2(q#_ijcR1qb@d93 z#LVx)YS;ICNwz+a<D!vBFw+A@J^q6Q6u$W4CegzH- zUnb*}V@~zGE@C~f7LUEETm|g*tE4z=DV|-5>o$Chsl)s21(ao@Kk8Z1(Z8)|Oal-P zgX#XA%LQUHH)(aMC?{u9p@$DG`JpH+&<74?BaeL-JprG{^zQk5yOEbKjex>J=28j+ z;D7;u*LC_8gT6MJY43SE&)vFRUKH}ld5Vrl0Be*>$sK{d7XGv2z~J-dL`$kpP``9%{EZdssC)hO&kx&Rce7)6EN= zb0hrRs(QVUE+~(8!coakI3YD1`oz8TkOStD0mY!p?#ALSKYMc#5N?7qCi-ayM zqX=yH25xX?jH8lL2d*^~Cr=&Eq&=Z=XZsDk57P&2!_Ekb@>a|#n~?o5kbeEBy)up(qym%b3}RWT<2ULR|4q0Pa<#L9xfaV zFoHfjmXvaw`&yV^t}Kl~rkTB5Iv5;@#!vanh!ylHW_iJtW4bomjce&V?piGOiCryV zG@Y1q6UFXraWgMZdsy@&-KEuhXYax*yrx8Ac&kWjlCo(gUF~+~^Xk$SUd0jxs?-{G zC*~g%vT-v@2U5?XnI~8e2r|29U2OC7-k}l|eqNTVIjP zX-9sqC-w=A#HTT4Z$cLj^Bc8SH)b)Ik0xYM| z=(`F9=XE0>%NOVmBxrcGp0cl$A6?@I^G{Xps;^t*K{uK=f5`Ep)>Yf0dioi`9gYx>(jzxZ7?P$ig{#@Z-Zj@!>v~qN&wT6Yuo{d4d8mxSW|R(@ydHYA zGPE@~*01Mk78F>%@NzO3C-w2>)`UHe_1wN@yt4JiUnW^X>OKv(xbUia*6UjQOPNVK zbglYS87sDIw-{dEOy@Jskynsskq@l={4ks7R=s!kKh)>&qzO$^c}vlr=i`}PU|!B; zSG6}XR#%eD17DgcJ5;u~eYh}V+!7RXnX+)Yr1V+CjHXnxo8pe`J?CBn594+HVXiXE z=3obu_oYtnX{1M(b*CxwpKWDQ*ooSko15lknFL@LDKA{ew0u#&qUJ0tb#;oDbFBNy?xu-vrp<AG&HRs9-2V%HH5;#jz}*cL>`|H3M3&^{G2S#=-i_C@QH z{VpeztRqScMosqVN>m1k3-=>*`BMJxeGGpEK^frYLWofV6U?qHHG2yoSOKC#=8B-EgE0Yzw%N1bb|NFVuN7EH^#!p(fB-P$5P4_6*} zSH;rhVnk!UIVPhOmQ*!|-LMaD>1_}A4sTxEfAcnYnOM)ByY0T8<>zij4^{(7AE9UV zFLwG110Oq7w~k^?0)?%GYcK4}eACAltw3vZoQJm8rmki&J5j2~R&D*<)|>a!&P%=L z6?8&OOan5Px6n(J@^68=CKeL#lQ8ZTM8Ir1f-_Xj0u4Waf<>1Jc2N}$^X{?+gI z@4XY47H>!A7SuE*(b=$1(=JIp-V2{QNI7?8f1b|n)%RpjswLYxLI|*jx4i58)wx&? zFVM=HsM5vWdM~&If$$v)OVu9xZY@x;kF0IonyiN?#+ggWg)>QO6^5>jJ5jLaNcn}! z^xmm(QHXQt=IP2W3BOBweFJhdRdJks5qRZGydsccB9k^crWAlMIA1>;);UO!_1X21 zNU7xVd14t^!fyK>7a#4QubL8^f@?p)`fYn|4GAtl7(;y9Bgk2W{7c6;P_rE4&U{ufN(pK<7Kw~ft`*=1{j z>`!{HH4Mrbcl>-(?N{P43NU;BZSe7dDxfYzbbzja(NNN~4y0Wa)aO7>J%Vt@>EH3&avR_8?rxg#g!}# z<_o}PJL^-W=QafahmCDaHFo6UIV8=xDt)e(BFd5U;yQZ)-P?eLIS;%o+Lyp8&@v}goJ#NhNOOuQ;c=aNWEtovw_i7i+%skU-b30A=0lnx^FWg zF1%w9U8qxhz07KGSo6})Q(Do<%3k&jdMukl>r6o``@@Rm)y|r3wbSm!hmRiim)q$B zg_afPr5(npJ_Zr%^Q@_1WH9-=zH0ZvLxfEK%Zb!Uw>40%>l5q)$FI zWIPI=3|j-*vAl`s<-L9NpL~p6Vx72Hz$Y&BLaaXh^TS`-9QZZgXYrU@;qs(7DdwwE z`LW*r@O-yWDkbHN>6cSp(mzE8zGAxKtB~6E{n_u0N4+^BfHruY3b~_SRUB$}M5DPh zJMJQ@;Vq}cvV;9oE8 z`{FALjuO83Uqq+=YIMf$?}7f-(ckmve=huA*3sWf#!wWIBrSGZ0A zR-yYKQlE$Gi#g#PQy36zX>z`rLGTBBo?nMGrmS2(HH1nCZ)hd#gw%~lWb8{mYl$Mm zY5H#5NnJWk-cCj1*+&0JTDX!7rdCFh={m$pDOHFy0o5h#SHG}rUPX0= zDAm)ilw-yrgT{lyotZ1j_;`SO$CX?b+0 zu}i(-6-%{DZU|Rv*KbYl4@<_;i42fg4h^^AHbV>***9=xek=i2r%#4p#wbi zSAPm6I#>yp-9BwT%ii~!3#rH513FYLPrXJzo@viAlN@v_&2yRlI{QS#fUQrv>1{TO zm~1Gf0hj{ti&RD)6Fd#0<#rRc3gBGfL|A#Nn!{dnV(l>eu!?DuAGc{S5m+EP- zJw`j1ox_anr)ijss(ZnJ%!<4l>q&v*_ogPOI*1@v<2`f7mN5q=DyIj`ehMT(Cu13_ z_whqG64f?ANCnm0{*D+Cq{9@utgmc0Igd^$3(9F?1kE$3>y(k*bed9!(q)l%@BA@< zo_AQ*0Z-rPrw~tI_VBRz&82v^_-K}d*w2@ob~E*!2IUuw@u2E~N_kb*BK~E!OS2^1 z86pw>zRBY@HSF1+XMTNs|J1pqC{arwUH76N7t@0@7}0~s|FP@%pLfXel9Azt8#!0l zPMd3g2<1ls#PJl=4^N-+|Il9<3rn9iWyY8AQy=6H9di*AA8^TDJ?$0#^DPX$0RjTv zkdM;93OsGy|1RcNYU<^@-=5$4&sVTKqa-4NlnFdNc>EtnVUbOf2XFu6IjGm4V+Sj#9%w|VGY3DUq+8U4XzPhdl%rB& z&B(lyi00eEgP%mPo5kJ`fAz|FeR6-k_K@Rs0SP5-Gva7ljYm)8*sHYiq$9%Za~H3b zR+@*2u?lQ;UlKGuz_Eux-CY1vZrXG~>|jG7?6j4#@fn!@%pLPAe&Or_W;gI8z9crHy+8?-JBX!?Utxoyl&G< z&XJ@s#W+$UetgK>x<`H`{b6uGs-dIVEIcA-^QTn)=9wYLUo4WJfovLob_8qD8p+X7 zt4MJ_msd+rfsIdmvl(pRV)t`n;OLUgG1G9fa8aj4ug5hMvv>5p;6dKY^ zEDlcp0FLkpcQl>pxJFAGLuWsntEuw3b!8^lNjX5|G3Q^aZ^h6cR7r|~A zNq+Jvw>R`ju6=#aEN0ZJGKs0p?thxulNBc20=$ycxo^A6onTXE13s#8MIOT2cVMvG% zj=b(`vlM9%u}ynOC@A~zX<FV3d%cBeM}B{?aewXdgcV^KJ|NLwZB{d2zSaHMxGSz-(4;va z?w*(Vc|Kz3IjS2s?2w-jii6pz;ljbzTR$tX9H~H$I3-RV@_03%IjCt1jdoW!jnK;8 za=%0CyTB_M7O<3JJp@1+%FUyt^2tvH4hIUO7g~jCSG!?Sq?&+3Bj>kAj~SaD)n1bF z!iC7a9SLBPI%xogUF;5i(PR_~5c%2~A5xnB@RbvM^`pUZqTKGSlOkrKK))itd#u>h zWLfck#(g}EX`-Dp9i|qm;%ZbwgH#xet`VtJ$bH=T9AFLBj2EX{tdyxEfP+1g%Q_E4 zXA5*pHxh~!UXTxKfD@kfChmT#J?#7P#EQIEtG^zCUmnO*G&*)P8vxz49pe^UFL3NH zu-VnIDJWwLZJ8UGta6&~7JEcH9o--Ac08;Ve1-(u;kKGl{~@>rF@K5+mA>P*<**~2 zV^qoSryAdY0ZWUiAP;;fRqLF+45eg_b~pP_+O4FNi^9}46S3Z;J9K&*A}HJ z`P@~7D<5&>>DbYp-C`k)?SH#)flu#iz^AgNK_Y9}W=6O)dzj?h zvgD77Qp}6$_YeLWs-z{5=xI4E;X-AdXyUeyO7{?AFx|N}ML0k~T^VvW9?J|pw?E*o zwt?w|88T5_K+N5T;k)9zu3y;7j7nkjasak46OJfswkX+F<~*i1kp=ggE3E8M(|7ZUl5z7e#A%^=Ank) zQmym)0blnJfCF9WnXQm-uoClUplB_jhocif6JVtPWw~Qg~1LNzpNLx601hRB#mDi z)UC&b3%}jy_mNJ}Mweg95V%lFuILC>M(y*WA3R<>}-$saLvSBbh~4PC7=rgI!AhEm)IDonn;A-T|F zm!FQ4vR$h@EiFFG4#c%~Z0Rzo*ZtZQrf*=N0v>@WyP1KzZ46PV2I=LhVfEat2yJO6 z>p+!5)eJGpGgVah5)fKw@B8<>4n7YI_fm7|xI^@?h}Pq8-(_kK-nL%M$z$~^&7XIi z5f=qcWBj=7Icn$@I$@6xIy%fov;wCp=;*wMt82+PF9$nf>JXh`HWl!B#V*SnK`Qhv|el8yakr^>`o2s3%nz`o@cvgJ> ziDKN7-k6m6ybqAa8{huf3xJu)ihZ(b=}u&-m|;}2n#2EdAyDXeiiK>v+AMLb2xH%$GsU$zw}BTuSxkDd?pt{DOUtu z@;WSl_f-4)&Xqe3w70bZHZ!KIu3X^kdUotF&x8-Gl5{4ByYLy=>WHEB58pYYQ#iTqRg|vZ1bw?;gs{-g=tMf59sV@Fo_fN6T9YElf>+S0d+b7GPtaq@j zUacSBifhTf4ni-;v!MHR>=q7mMOsWVQyG!2q0`(2k=k}+7Nd>Q9w>_iJJK6h0P6v_ zebP&g5tYh-nbz1lb}foM2&Q!bkGR>6iZFSjE%wXQ%?Z+=7APtyaoBFit9Ng>>%K`x zzM$?sf*|(ou{R3*T}EUvtQN41-tasizN`MxIYOWc4rUrJw!PEmqkNbr3%-=sDm@sI zOL}xDJB?DO@EX?4Dvd?DPS~aLUWqqv4ZSYKHFC}1_(-dqpSQxEFng>IocU^Z!{@MK zKJ%FPEWn7>_Vm=1?oPrNmAbxgHCFelk5ZA&lq=#jwCR-JD7?1JAY`v(OXKpb7GQlX z39SzBR1L6zi>dT&i_OXr#(JK@=x?#+3MamxrD@zEfkcP?ry}V`aKf2*qWqQx(BQTj zkp6o9?Es@IO(;g$*DUN0C{es~F|4vD?=yjHso)uD&qAz@>r1;9+UUEur}K(?WeEGK z9^025-H0UJ-|CQGtTYYpB!c9uuVklmoc)4TG~bOb=U;T2GOM_D)XJb+tUA+bAx=AS za7y0bSsoGdQK)r+bTstJS2e%n(LL8MEZRs^Vca@oyH*^UR}<8tAcLs<_Rxdy<{{Rk zW37J^`SXJ*a+18*c=nCuRIZ_JnnO=*ya&ERX=z6a%L0}t*9u275Fd=JJeTgYlC4k% zWCOBl-TWYHB-TUP3|Y29%}XiQTwyWXz4z3m4g|Y&=c}%gx_LOSIbFsT;HbZ4Lqpi7=LVKP+x8WAuJy?E(+A$PUJJ06dmxsj+Kk)*S=lqgKRnEx{Wp4J8b}NEIDuMraTw@|L2mE>aX|UaNq2_*mMk8^$ zWH;eQon-3WI{_ZuQ6FN5HEFYYQ=aK&8n6*Xag1WP0LrdD@Ar(r*t${AfS{vkP+iU1t=Et= zpvNU}m=Hsab^iI?b~&2 z$2$+SbMTd8Oj}2Kd8S`RDVxITcEj*Tdu?mAz^fJM@r-5ph@J-5nnv3sXO z(F@Ou9Wt4{JY#ayNEK6QS3Fzmzb!-*>>_P*-}nG$r8w2UJIzgXj(?qq!J*V~5;`$e z+;kBXBciNmZUo&oE5`DHTK4GEDn;whhPS0CTQtJc4!HtYiU{ z>Lt;6jE+Ps#7^f>lMy(r{ApG#=u{F%UcFy@RJHgqNeI#!a@#7VA|8}|gb6EMTMqYe zhrU2G!f%qe2H)YOOMK(g#Q!r{oE}WaitCmTidLD*67UVVKEl zEt$Z>l~0e4MMz(pA*@@C7pOJZGzZM?r`UGR9VvQt4{I6nI!M$Pam4G=>xF~>qm|6 zyymmR(fVY1(7QrZ49<^R9$jA6zO3_9?o)pT$~;QMwHUy@ifIu(yNFG?L)NM%!p3`> zsAM!o${z;P7NhuTT7b|hc~oY5NZsQostbpikwFj$RFkPo+(KV=rZ*pIq7h$crgp%j2 zJjFWAAfN|$oZUuzj8!tI0y^G9CB4VuqZ%f;RdfywbcA?5HnAB}d$h@M6u)G4nhE9r zCd#64LP};9G2o|Z@D8tbOz@MCmV@8qi#G{Pe1WkXQrC_X_GAsPLoY6vA7=iT*?OuE zgFYzbJk(A&a%jHbhzjOt8Ju4ac%*Q|W$(SdTzNdKSOPV|cU{^%dZU}!y=iK8u^GRtl-EZxbiSa(e&G!12c<>@&fRPOV`+YxwWzp18)5kA>JzY z3A>6<$@(u>Cv%(x8SEys+PeCc#m0FKU`D$&2djCo*0Vm=*Vept#x3UBkBQl3YFcO` z-?%d0M%Rr>Ur?EV)-am*(`Z>f1M4{sYigSdO+7`aHLPXJ)bPNNZV19n5)DQ53DIr* zZr!$4lOVTUP3eB!ZlO<1Da^>zflJ*JQNLYaYI=NjCs1s9&&{>cZlb)uK!115(BsIT z?&i2w)C!YL&X4Y;g}_kF!?`2z)GdW!sMGwii7yT>6(Y86Xb zrWH>WP0Q_u>us59SDe_J>-dkq7BA{fO?qtO?L@|xO(pv5T81e)BcN49=T{s$3&ppR zEp8GtvUD-3!P#xoOcVJ~jRn>P)yF?(5d_}p6!Fdbm}$5hX3PRs{oBkPj%3>7-Q7nw zyEzr*V)$=Vf^Vqn@CkNXCyG1Dm#0oMuh)syOf$MoGU3GIJw=ph*t*I;8jkG z=3a2a*DI=XaD7|)DNpBcmk~V<(HZaHLwCIdY}#7{HE69;-B$V~_8Q3ER`RH~Z4O37 z5W%2CC!l=2vE~4a?5Tm<)NrO|=pu-2CjhkJ%P;C_YsIDh8}3t-O5hb0NmdZLOGq_b z+X#B^XvwFXkN5|&B}!F>X4C+p32pDi9Q#WmO5=GeCb=Zs)<5mj^-h`YN(GOz%d~~3 zQa`$}9WA~=N!lV|17jG>QVrI(-&d~14Is9h!lbusfRw@t5L|`utbK?ZQ01!^xHxv) zARkczv3-86;#jmmPiG5f1C|l3xVQ52$~+}_Pu_O7iZR^P+rmJ@Xip(*mT;g>a|NnV|NFF`CA~n1Xj$nioe@eVcvBq9y@N; zzj|&U-vjI@Je+Z8b=Hl2m)`D}QiGze&=K{8w;04Xs`;JIF8Gg{Og8zFC_+x8`8F>M zvQ^H!w;iXYV}cug5-vQFTvFY;Bl9c*rt~w4|FDj_R8fXGW<=L3S+Z}m%4un@{b6jg zTfLVxt?kn@UziU!CeLMO8xNiA@2IwD7}j*iswaJBAiA4CYK>vV;`r>v0645juu41mIX1#-`ZRK!IB*<>S0-=d-6+>v! z`SKPV7ws06CQF_YMOf1oIa`Q?ht&_~%XV50r(t;YgpW`o^+l>POXnDY?t}+6=hw^4 zZdA8t_J5x0xPaVVsWo4#o2ED04mNon5`Sc%6&pjws~o-1>N#9(wtv}|m;1n|xCHFF zPZ&LZena#pU4IUw_ne|fIT!A;%?Ie_exq6@=z?@{DQGJ-LN-rp0n;*9-Jfo@|2oV{ zbQD;E24 zqfz^*pyB(;Sz)g9EMG=wLfl z@!2o(&$gO4Ff9vgIsPotS#A15o;-pRjcV|4Av8?Y_QI!F}bdf9U@ zy$koXPI#`HaDdaj&4*YE636v5J<>ltBv3A??WiXW|pOO6P7la*i0#7}nMjkezu zw-!_tF~vs*sFpwuZxGadJv_s?c0efAV{v?wfJtUV71r3V+|iYmT6+G7iLqkZ`;oI@ z)htjH+b#M=qInAJ^sBrdZ7@X+1wK=em`s>ez-IcByVv4IqqJT@Iyief<1XiDvsGuE zXUE!eL9_MB<#Kt%a{1h9C*f$=y@~nQ4wbK;ebjruztFJeoY!Yf)702f)hm>WJ}ODV zf(JV*3{svroi(c!A1R=A$G=9u>*~BRaPMHpP$aQ?bSzLkTP0M`wCw@XiMI^@)oVh; z6>X*~20$IINtbNYRDXR)yQlxrc=F@UQdXjUK&p|W$H-xIkHDR4Ic@ll=%sIJtCF7yEwQ+7QD#f#JqkE$|6tPUAuJwOSZu+g#J)SvJcYK6* zY?nbDZ65BrdgmX2>aI%J)?Kv|xh87+*2TJvU_Jszxo%j~9CWGwfhs-xNrG#JvREXG)(QSo@k z%Apnq7rxUSWMgYSIbdM=1erw-1>W@ZM`2!@D54yj!YWtYsW}QjWWII1uu~$|y<5bq zqj&JP*4I=1Q6_0nBcF7ickv&OxKiRIjhH#=wl-f@le1L%t8w*}jq6{!{f{VElW)T( z{HU&oW3Fx!$gDPp&|}Q&l)d^GccQl-6nTe&&VLOUeStEsu522{^qkAR-z$$Bj+C{+ zxEA-lSwytqPA#(`bu!3}FHilifi{cmkHOn4eY-2y9aI%hBopE61MKEwp*=M{w<(u- zhBheo`}54Gli0OX;tqVJx1fEj$DJO8QQwn1za;C*okl{oI z{kC=f!v!aDcIyLJ507wI_J2VmTV??z+HPlKDcb+|TfTp*=>KgM`Egyg98T99N%R+l z1$kz2pE_kMr-@oHFN0_LMYDkuL~K+tR&>dzzgqjLpuH>ziIQFqGNyI@j z?X-vtJC$DczR@IwK{z$bvtc4ajeXKKrtFSSxIZPGmJ8VB^!eN^#{!{CtdCO80RU`T zmh3*j#E$aAT_YybKs3Cs+x;%F-xXJ5+UV6}(x3itc*aWVlw>A=P79?t0gky9uYkrs z1Y-E z2HfB)Amn0<-qrN@tWUxf&$lH3!O9wV@2iH0WlP3xJ7SMWTMz17gNztQL$r928ZLYbz{2i6`qT=H*z3AERQ=3CsY=FSRwuJ*aR^)%}g zS;{HPg!i1{TmaQX(Vol>XX_czf!mb$L=@~E->9xNPoH)!>nY*-za&XhAl4gsEaUP! zIP%nq0Z8q48L(xS&+GmtJ9aW!*-fC{$jz3Sdg{FV!xuaStp8Jim#1tZ|7oMihJpYi zT@dmKdJ10TUmk_?67@3ow}?~Gnt$l{8TSc}?ylOwlYc$yU;jvZ0N7UI`@cJ*|M2j# z1t&nmj>jvfr0o7>Z2wl!-wOJVi{t;*8DwerG4^DI`7Lk?n70OR4m6XR;D$bL0$^kS zH1%e|-L2y0X(mk;d*xSCB)V4jD1XEwV{td)w(5|{bJpljW+w)~_#WBhE{4)%xe_1S zlsx(76x#>@!j1ew@o0Ll0v8212@v6x-$#7|9GaG&{`-_`4k{jesOc<-H1k<4pV`p(#fybfdaOcAB1Y;`Z|E_^_4Ao2(a`&1^>Z2E(xBd}%XxF( z`bCI{6H;Xz*}<_pdCe*W)xdT1xko@Bl#*0jDX%_KqHI63n&Y$wH})XsF{l(6Fo{`N zO5Xj@c0YqZ6v%l!a7rHM=&Pum=GkSH{h`huvGWt(4WH?{QD3xkyB|(S=&yD`Spr}^ z+8>H%WCBlB1F>|qEX78q@eFXPiOJF7{^m~7==}PqN;jYTSOi$KMhdUzB z$ZCZ*{qjBZ3gK=a$}*G_MaNYWT4LXLn`3!Hv&v63&z79elKNw%fwsYPb^3*jfb%U4 zu{a2GE$d+rk<58-1H|m^8_s}NLSDWmtSo(BWlzRpw52%61R(QV&_@%?Pek;_OBC=06TmHB@A;7g)4qh* zPgEzHN!Zxqoa0VWs-ND~$NKDl44@y}+pm#Ia**)xIp8e)%x*oKjDqdKp2qas8JzAM zQmy>}2@amjw2O`od8g)Ag~cu1lucVCNx&cLSVMg!Px(n8mRCp<-%K9|*cxyD$=3L* zQ~KkRuOufWj%&9erv^%Y4F8N3@VaNx|0`CU_GHWT;uG$_!s-11uS*he^OraG@5=Yn zI{~lzKOcDXA1xgq6b;~YX;^~({pMfCwUz^%F0H~7=KmH3XaHXKA9R+#6?Ce;|5nid zb_SIea&lj0*#J}2O4+!F#0v^%bzNt~E+UwxBnoMzKo`U0xE-EXz79U#T2Se%GwOL+mZ%{&jn3Z${i8XeCd6e2Y)esVts1nXD|{oc6-Ba^>Y;uJhUr96s0$hwM)CAMBv@Nu-8JPJ6Y|=o7s4j#vZM zhleOU!&n1@(g%`-`~D-Uz<#=g_BC~&!6 zswsEcGV@yiQa=^7U#7ChDI_IU-?yCGf65FU@M@ZWZxI$|T2Wb5s+ZF6nkEiteJ{B) zW;!lq(c|qXA?(x0XmG9FdHA}-{hjk;BgTPMjk!!L=6x)<6rY6ZJ^ z+0mgVoP)%&r5PNQpe6a;AfZRU*X+9Sj2c=nz%6dk#7TL%dZYB)TCx1K5mW9!#UpIK zezW_UxbZXHN3P>mqqTQu;YGs5uu*oI8wjtJq>}RRT1RE3+53THq)hXAO{6#!9}|on)NcZS|Ol)Jt5hrImoW2RiSP-U`X3)pAvwcv9hbGC_W#_ z9EUGG$W_b%`p6Xn#oH0auX|3vV&a)<70xq>iz7>|)cbQc=KwVN{O}9dOHyrpmogp5 z_L&nXxdMY~stDLFMusR79jpml|yJq9DCV@1S%DMMSqs6)B+?Y0`Tq2}Npv&^rX_kU)e$ z0tq4CazF1mXP>>@-~RcI@s8&YBUzcOwPw59ysrCh-1`7vvIQGF*0lg|El}4Y;TeH6 z4(v{+aIIN%DMh*?oRu|%=3Hp?8P!ODA3^a}Yfws>ypKV(E%ZfmeDN;Tv3QYbA_XrO zUElmz{<7tZ(^+k^pPd9UUPX=ra2uZ>-4A;b6ZaZQW{S(TSw?b5xyxE+>_^x&t$P?f z9}uy$#t*0}v5#VC2j?Zmn>2d$l}LC_#rnKj3lp;4O6m0)$ap|M*S8>@rlABD5RaGJ zpASh8wpQ`F3r%NVG4?%b#P;9424`F3YP!NI?eRN#An+Yirn;=+`oTKEa_Q>@A?Kbd z{eb4z`i_0oNrz;mRemHYdML2xuhE0&>GB7){-fg{*;_!o2f$Msjllug2BAI(r5)cY ze^fMz&h&8}0)^@bxqc_TYo5dn!f|)D3p*LO>8RN1wxcYIU8a|vOTjB&6Al7rAxPeu z&5K;F?q-3QzI^1j%}1ID1pk@f+DflO#lyY5^!9;mhNZp2lBPiO`0wYoA>)CjUZo=$ z;Lg#-Pe^07)G2(|G^0r6iuy+s++QO+NvcXeE+`l5EH*zd5%|LQ_3uOtZK@PnD2pO9L zcKTjrPxn_r>|vl8u&Zvs!f8|_N96s23gwt(fb!7}0Q1=!w_xsS{3m}4^j$_Sh7Thh zs>pp36sZ#ER+1CV3;PfY$~n3H0R{400AcngdVE1MsDL6XkcM&{g>5ggo1SNJJxsLF zIr1gbkGBKZyX3f_gD|spb}R|Ck4vY-vJ2WoN^K$me^&`2b&`=J6(A zIrD5aEtcrJ_f;eMmEVCWFEvwwj(iQs0E~nczJE~4T$Nv>G!9$)f?Mz=%xL@G{dqFi zFSAIBe$y6_^Ang9h=D-H%b}Bp6Z;ZN86=`M)=g1+%{#AkSoKGR(^|TVLOSxS>{2nGr;hbG!I}NnN_iCeMt6nv&w`-ASk%s zZfWKu+69@)o(!k><5u=ZW8TSIcZYvlIA}*+e#r+g0Ek0OVeMXWAm88~rh*tQ%}`5N zphs5lhmmO(m!_MMQpCbZr?R0r%`_RwbD#ypD_kGk`TQ}JO|5xs1wtUg>~JA;Xt?6A z^&K6q;RJ(J#)D6StZE3GG>@H++i29xHClGJry)1Y9AdU!ZF4d*zL9`ywpUcWXnC{$ zJ*vNZ{$A>eDZa!aA8WqJNC0S>yzmF}jMWtl=%O3Ugm z-z0PxL6gV8N_^=5z4^pXiblu!!BEB5C^FA;a91X+1i%XM0_+)9MRs>9 z^OcQkOqHm++qelDVixeDg_c(k632dI6j0_4G1-`wn5ui4I2SXF+}FqgW~_gI`_dWg zfW$2*ZJd4U+ie4O7P^eQ8l((a4bzC4+umQK6v>FicVd-7k_-MFa{Tv)uPT5QQgHoh zY5{d{cV&<5r);^>xoQ`@k|h~CXD)xUP6u{*wyJ)yGB>0lE}bwIuD7GwB%J@9aVW>r zl1^k#yU8yaWP8*Rit;NfC>o@Vzi^w3N5*>+ubR3(FAn+@c3r1S!LNwA&o=6J8e@fX`?6fZ9bTGt-~EA{hV1 z+#c0b#bCOD0`Mf+WmT2|FAjGQf`~V2_V@{85FXKb7D}qHowgcQ4~2BSG2p(Tos{OY zu+4WZb^}1H&r~pZ<~yIc1*9@3RLVb3;>Ry(132_hc%&?ThM(;aosLsw-G3KSkrBll15cVrh59pvh9*m zb#vB|GA@9#r{t);uR8E>BAU|A=b>_?Q<{?2RWMcui=Q`R>3^w)1v9T&OxbttB$l=lhmfj*@cV3m`>qB$ z!cFt7VHE@!Xdx~8ZcdZEAA>1)v$`oIURQPnt+;Z0UKET4fM6eK*$*Afz1B)C0P<|t zRPL(uVlVbvkwN8-JFf_0!#jINuIH~#_bB&3gH&UV-pK;97ab`LiMMiOO?=-4<^YDg zxU+6JHG30e97#mFr8w?Q2&6G-gV1ZKiIzC=QndooMaj~;R;FMQwtTt3tiRw6T%ut2 zY0<2EV}+Z&d3#N&Md6WsVzao=ZZXMn2)t}I7nQJMRWz3)@N^&fOya5y1I)5w-jf?8U+5XNLFdMSN&ChGJH8zcOwN= z`I7_!v+->Y5{4MNJ*)9HRmm*VkE5PXz)Q9Q3yX%hYNj@V&Qz8uECn~OrUi+Q-NhV?>rdI|r7kRl zp-()?$D)fX12OgEF1|RAsKU|)0bN7;oT8E-%F#RWAWdhav@;}5yNr9&Rc{6>sB?s6 zr+i{H0&wbwt9+;`R4G;n!Y0Tt~LtgR_220L6X zT^TnleYs^TJ}G(*bjWKkYZ)aQPhCG|+USzs3lLKKY`^C$O9Xkf$N8cNGw!9WZmH>E z&NZ_<(h?D~YeHE`P2SAuLj=pNJC7{T#Wj(ZQ6xXFBLE@7iye{?Uiq|HSxP(7bnt2qB?{bX)(=jnG$shV=zkFTC>+DPJE1R!{b zDZQieqCiC+Y}Q{v6;D&CY<6{4>(+GWPxYorKpzN7mku;OFtL9^8uJv%2=xSriFC1h z*rr$J<$YJ%Hy++_*68Kt8%(^~nWA4YZc^Uzjmg!jbEQ8>(9x(!b6g*a_+(LzsniJ* z-`+#cT}(IG9GED=GA@3&IUYMJB9y~x>2&MGYydeW7dcTS z(Dl@*Dx=I}HNhUq7)CEAVYG>RHRZ+7;ay8HN-2M2az5t)#}yrpdjK-WXCAo+ZWFs| zr36mNk)6^~A&vax!5{!<@zyj5TcIi#Wp;?^k?Df49wOOEm`XF#>yj2eX{Eg>UWNf> zMLtEYC|I=34?xMQ>z>`Yf^N8CqgM=&S=%jc>hId5rAvFe94-rbidcK2oTHlLtkd{U zvX7F z0~U>}AHYU3fO@Fqe-SjZ;!)_nr&b5A>7iY9VfV&5%H_ zWL_Q0d88?|J|fel=adr9OxEab3<4}<8^qhIQIJ4^Fu=z`BYxpy)xKs57?qXt)3c-s zJA*J$2xA~s7ofmJTZ0=GoAyiy7gmxjc=-yu@6v8w)+2KolQS7Qxz8mmrnH4z15I9J z_{ePTjgP3+17iEbIcHq~@rbc^iJtiZ>vO*C^VWtn?j19=xEk)^JcN`GkT#*B;dc*? znX8YD&GDPKwX;?x;9psei)q+DE&bw0JycXE3g*wbd`boUMm>G*ULDlW9B(Y7ehnm$ zoCu-fnRbOgDwm>r3Y5_jyQ6WP9YBd8@a`$Xo+^R<56o#^Xy<#Hr7OMT8e@o`z=*uot%QvyiS45`y?nO$E(2fFZDw`~N@t!dE)e+KiTv&JZ(UlX1 zm*_}?SF{0QeET_R%yT{)l60;t>o5t3%z0LRV2s5KPmBzuvPyY;xO~1)ZJn&T2zi6F z$D7(ufw9oaNl3l`Kg#!To6F|?Fy9D;gbas?KJsXEMxV&cS%s{a@%Cc)a#NFiXotg* zRcc!^^_5wcopBW__6l}F&f-=|MGYGcKVhax&?FlfIZ7+tWTT!WZ}o(a!C~a((#^k~ zyk`)pnJF}7L{jnllX9P<|cUOy~z;=PhlP3HlsBSX*rlg&y^s6W6jEc6(Pb^@PIRmmpnUBzL7l zEr7ekcjpJqUe1*mRAIW9jv=bVlQi_jpZ-*Kv#L-I6kjt9Ge1`A)3h(?HB)CHPD;7| z%3{VJ?Ygkli0uinvZg-@dx(hNyEJ4}^%V&*t*}Pu=CsRzY}CknrGWu@eQ516p!f!3 z#ZC*LsGZ_RV;>HDO=$2Iq|9Pxa#X$S@ED>zidS%)mgq+)N#53zf$xg+g{pztMSb1K zpPTIkTXU&A^T`f{_dhpvH&Ne#I_GamghO z)WTCu+0~Tx90?O8KKpQDifpOe*K$$DByD|_=MHRC0Iu6;gVq-dC2q457X~I=DbPyU z-cXV6L)75Cjl@lLh#V+CHhxr%yN}3|dLiEC`xR1B{3xF~wn>|PA7fhXvD%Z0w%P3q zpoAR8c5`kvdKFV-ku0LozOF01hf3>mp}yA2xvF^$$1DqY=+DQC9z3qhLF8+2_pb4& z*UtH=UMDU;>UraDf5+rZYYvo7JJ28pU`R{AchlMZ?;${kKOXtc0{ob`RS=(uy%rht zK}m!VFA9|?JR)R!V836n1}u7OLj_6 z2Ysw=a30T)(+TI{Yp?(M!_0nT^wm>S%(#6>YejPLPL328^18uTAbAde+gCU+S>}yW z6KojqKaz1R;fIG+AhRhnc2SA9dUJHmdPpvaJzlCRpQ1%7^(64xw-@BAJ z8C0oab4uLnXxeogc@{mA?{29sH`tJ`jqYpk#Kse>my$UaxERe6AW^dTy8ACECwlO8 zqyuE>2efOQfGh5m&5+wyy`zmD%x8w9<=~&en{SU8Kq+y!x&}Fc;kNw+3>a~*t?XJfJ+Y&%T(0o1@IieTaL|AlpPo&MmN)!T zX=1+KM_3&VdPeG)ri3$G?T@IrqH2-bc?6G^5cMGWS4I;6@vLfCulIkjo9-cd2Lpb`jSDV7?>*~K=b47yHl8w zxu>C=Q7W7vV&gQcrO$jb>R-74GEUHr(QfJSPI?1mFKjEP`Z}zgygcNV@-vN!xHB_D zSroGf58P=z4eYzjmBJ*kRop}ypb}qmhyu+dkLTS$m77r%OikGm?uIG6-Izd8LtD~8=I)$)K{I>S)na4r8J89jePBC0|H8m zvVNjR4EK68p8h*n@b3@%Bd7HnaM}_}Z2OCs^-Bqn)(}@xvj3*Re{aeBs_x^_^lD&T-X*R*^ann5GDeB^H}VV6Bn zZbCkr;^AX#e&>63VyRx0@#P?&J=#}`?wyx65VpL!qR)$jf&7ht48m`nPz$}~Jip(V$jM*!mUA`ZE}!ykW_h>IA{lqn5}wMPxV7S+ z_BG*0|EV)~ZEmN&xRrlbGV~;8C+BMKRv;NaBr9ufj&ngCB5V^jHa4~j%=+eWy{bBr zR7UHY4{C;mSo#_SO6S5Zt0((dB|| z3vKud8=e<$OoCi{M!3D|U2U3QPji+Jwyz-EJZKEK)S0p1kG}p3-%h`x=}nSAOz>5` zmO$jRD#yqkhxSC0i8ao+lgA35<1x`|P8}MZrtkW9QhL16Q?sy9fX!;T+Gz7~QUjWy zGU|Z$)E;KCq#rSzsz{WCNzBf47Gcl&3Fy~7_4v&)BO83pF!*t*{hBmFbU-Zj=Idir zr*8cBpSNkpf?)$J?tUpDRICoOyn`3)D?^H=w?H+I=Js=v{hOB94*VxyFdZ2QCRT2{ zqpQesi>+Lkrnr2g6*pe=-qs8p`=-Jf+HXL+*JEmK?Y;!n*-IvGiMY) zY@n+*Me>nhje~&e*&y}LF)@AG)+*rmZz)9bb2$2{fazUL5IHs=dALI>-8b<}x{1Xr zpj_bRq>a;06%{)Z`tJMu_;la)JHPmpBQ@HC0hHI)Z=#HsbBCCHKYb`+KciNlt4fD2 zHm=vo4P0f%7}dQD?oEZ%Fs$bEB~MqLa)mC7CoiTF=HmEmZOyP{DAFm6 zAFueT3vKcN7?JP8nfEeLSpfW>tJ0>g3ehM<3wC?biQGtz3Op}B@wfE;z54SQ%3U}^^<9UHJBN;#%vgXnz!YHK-0o&i8o2+{U4REqnh&UK9 zZN={(hK?d5KYaLP;hYrO)@hZ z1{W)X-;b%VXlLKSPgAxX#){HyLupJ!kBPpVpIwopAK?^Nt3umnc>Ks{aYK;_iPlk| zqpyec7Vm%qF2gfdLZj;yZeT(PvZ~)rXGzSYDWLSQayct@ zRouYTLFeswEY z3%P&v_WgUr+ta9t;}*rJYDb0ukzv+ahEK{dT*-@tl|+{<@2p+4ci3}b8#$M`rYCYo zR#io2=~wTMSS!%dFR*Lrdo^+Vx9s>`l=#q{U&G<&PX(*djqr>b62$}i&f*Vf)~IHS zUM;5$<@A4&&+DylYj?cXQZ0t}ou$18dnrb`4|{n94n3~QxR+APyCT0r{H<*%rk#LD z<6uGhO3;g%25eWJJm}AV{zIe2aojZ5pHw_~kN)L)ApvxBQxkMhWN?*{k*l=nut4Up z>yb^Frmd?i!GB7!vUL(9IGo}M%HCD? z)3d0t65PocD9`fHgT)SY%#GD1YWGcfpzxpbV_VP7nR->x{h>y0!A9y0VefuyhZ>yq z%Y#M&$VbnEK~{Z=(!!8K9fF|?SDUWUQ=%A=5Q!ok#a0gM=ycM`yqN6nusl!V}{dA*#jG7 znllZ_G>9z5k5{p|=e0krP*&MwQUVr&ZBHe4csrII_!91dFVSF*nyA5#@x*c?MqPG*q6s zNpZJ0WYSGcR_;9wy=S9lz%3*5GPO%d)7K|E57uAfI3~IY77IMQ5AJ@(&n{^Z`~2(t z=`K64n0{UhG6;TeM9!A_fLX%fwprkI#7K$Gv_s@LDS$vPY_k<{*~D~M!k$I+>G)~` zi@&CFqOkQ+TeRg;$?+41XR9mvZ}?MYF9dG+_1ou)AgFT6PHEN;4;kpm7A@^o_UxJz zR|Sm;=k&YlRl|D{Hu3e7jJEYFdzG({vxNPCTotAV7O9eilJ6EdIf!^egHpmvMfN6* zYqZa7J54(Up(8b$*7$stINm7{gUyE=!}tosX4|e09Q)WvnW?d*fBYIiDbvhN8vX4o7Zs zn>D3f{>ZaFR;mBH9Wo6~hRU*nfx?iu|d%b967Ujv_n9Zu((OY|Qr6Qd=69u@IA~xT1`b3AB zJYg+0{6^vkpqO{I*QQh;IBPx47rJ2^RJ2+;CL2N0`@OyIh$T5Y|}=^~%5 z{-9FK6u8vzYEhw*AE>DHS-yo`anBfeM_h#tDR1hPK#9GxJE>?NvT0~5I^I3y?=t1giQneiPzR&U$f{2Jm2uS#tP3UvF-8TbIi z{!*j^3%LH_ZPd4NgWh zUy71-#xH-{zT`H|N?xv&Y}pg^ePfg1mdODm(y0vb|M-#q^v?1dq1wm4xMdg1ajF2{ z#a`{-?U6GH^3@KbON&C55u8uL!e~e|bMCc3rUgw817rXRKUpm6z7k&Vv_i|?#4B66 zb*JImt?mG-hU@4{nld~9t^Kn!wqbUwIn{f{dA0jj<8@Uw^DU3YG7pY$NRb@P^eX6Vlg7)Z8Pqkds5{!Z z8ru$w^Z_oP00r+G_v#nQ?#Aoa$tO?k*4tOv@%}WWxbgI~0GfZVW(V zK_v>mSfYQBb~ns`ZYLjsZwR7}WD)SH5mcoE;_?xe0{^8nxci?}1 zu+jt&%lN}O=D*nOe;7OYW5NDF!g`1xM&|NAw8H?)1ChqA_bW5)56}N32y|oTUF!S) z*)6|Z=09iqlPoZ6lek5PU#`XfyyDIipc}9<+^^T{f7(@W0kii1)Z%;lMgFn=+sVz# zlB}V#dC0$+ZeTa*!@CXPgDa{WZtIyRiEgLkr+E|xi=su zi%?d@C_DFcp#wLge{G~2m#GWlRYV)1epo(0WuAUQMGXJd9tEx~<-b0;$p<0sF+=**C$E=A%t10tJlRMV_HkGJy6Hf2S`Plg8sZ`g)IGj2!?~HA z_z%r)%%8t8u-beh>|g))haLZiE&kf*r~b0ew&VH-*?A7w{O8VA{KHf0fW4n?q!8-$ z53=NcU+&KX`M)FghgJE1*6jbTxj${*|7*MTgU3?$?^}8)vw0;1VdXpYe6q?l|GHG# z?e6?;-+d3ZcYk`-e|$TH$fS=hEixvYbOkKbhuAkSJh7Jtp2cha{@g z{fIkBJo96IK=kDKQ*y^0@0fj@*cpOtyx?ZlR&8x2w_>&F-K6XHVUON;+OP-1nuu20*MU3x9-ocDF1|aV{@!r|%42PbRzyz;kkOAreJwt;VanOeNWmp`pTNQem1Q{=F-R@H?J=XL|=7cT`A+rOO2vUTj5wJi1ee z@Y!&!0#rHCY-!;lRe%7SOj6`As;!F?zyg?@H{_g7Iu2V~vvXHSzbc%JG_h26{PnFf zUYuDw;~-V2i7G~!044+dt^s`TYHz%4u?}EHuXN}DB2hk<`W&mKa49#d(eT;b^@`Ln zik)zPh`=AL?*F#kPu+b%-YS5%I1Fe8#f(~!6&Q{~+pDkfLf zx~|2qd(5s4zWBBA`ZDXaOJer_m_(kIpdm+*VA!tJT}9wwZpzJ5F-W2wXZ`Lqm%r1d zZ>xS9FM?!saeeI{cs_l_@W7j|U!$qtkc0S2*l$SvRkr;u{~vI?P;WYwbnd|8t$aGe zsrP?Y3V+%Fbq@gT%A)X5{GYTdhkOQ-_U)rz_x0anIRm6?LDFYEX8HAe{d=H)0RG16 ziPoP+-QV=%_a9t-8XUhp`x5d0-|W;r;7sCmeIdnPXcJ>-gb9T;U9Y84;U~9 zuV4P1jrxCH`8CT=JI6fMj^iJCAqO}^{_^*K6~*}1n*lG>Ob;}hxu;Tf;U9Xz4wy}H zH?IHFgwX)aT&{6W{ZlWZjs?rHpFj5x6ZSF$XeJSU(fOZxp$M#}|6O)}nAc;+B~CqV z$c+BaymqMvGl}GOC&pTRjsq5+I6+|1O=B+=Tjp`m2|55gavn+t5J6Z3ZH~IBvEYsN7xW(bcRl}H+6uzLta$_xR zZDD^;_m<{kF9^sz%dtq6*!^F-s* zt4<_C!{;z=;k!NLh=if#GH9J z6vBH(#Gd!3HRo&RWn=p!x;`#)^|C~;kw4xRSL>Zqf3&m_Op%ZuD>ZY)LCFYn>z z;8yustHqZ#t^}r&8l7f8i}Olyaz43r>sG}R;a3Y1rsI_TMTVUT4oxK|ry@OH4K^oK z`RvimXfZfgsWYVsJU5H}$dw-N+7S^$Cs-$+BlZ%{CPJ%u(cVSm3{gIm z&KTy%K!9ib!2+Eg0v9+Fu%1_8gGE*ly6z|!O{ktxYzP9?$S5uamt4H7+mvAJrVO0I zNM2?x1o!r(N+1gh8{V!mfcyoaekPkb{lReg!mGtk%x?cuDe(bPJpLqB^3K4G%$VEp zSQnHl>ATL(`e)jurS&YYM1kdctIpe{oOd>3Y(=KeQMjcUD4d=B*YCw27V+zZO7sqV zPH%91F%%8AMwRnxiRQ+}a%;8K!6F~OXWR*Riz?r;ttAhfne?)YxEvwCeI@+0bz;gkbJ8#OtHf*Y|jL^6||!FXg+2dOVOi{i>ks`AZW`zA=En(ulk+ zeG51nh&Kde-1FzLdWsC!@86J0b|q|?cpa2mc0M@)N?KT0$bR{~6)(w~wJPB(EEl7! ztW{%dkgt_r@Vq2N8r{z(?WJ~(O~y_R;KL5tumferIDjfY3S%WCqn>zfN6bfEqn=o9 zOEJM&0&sSODA3`x22htP-c<^FNm^M7^dBQ5@r@k^GyD5KmHqMj$=2&TIa;@C%Bt+$ z6ETx9H8QShnReVd!gE+Q!wke1HYtxT8Czc|Z1HAfc(^D!(RsRD-C?|dqdJ1~dG$=%MFDwbBgG+c)7NeHkX7q)%JIieP zk^={9b1CYUOXGX#6XUVGy4osNOKul> zA2~q`62-+_;t>TXYqm6@_>JAYstt1aBG(|KX8ofVJgME45(phH>hbcVQ3~skFn}ar zS@6ghS3MXXZA4-MOqpJ41ggiZx3&rHu~TF(tq2ka8{H|r?$En5v!PS6x&9PuYOlL3 zRqA$bhPTXR*6@HZ5Y)Z!B#bOINEtg4emNh27mo5q1>_YOI%DY9m4e875TKf13*R8I zpG-V5CDk_JiT4tvZ6g;$t1wR(Ma$?a zgv6kw?~2PCg_N7AQfA9~*uqly)c|`x>?~nkkyu?OCNc$LrC&QxM4^X5>kD+oP^Z^YMoV@+4e19gK)o-WPE#DL>W zzmWF9R5#C?-Y|T?@y1l%)>nr&Zf#U^R%B~!oos4-iQ=ydGuyLizLBmwl4y_Zt2a2B z*~=RDq^k+wG!p>xz4)ZbyD^lhTB{L9SeXMtC~1@2ej3=ltrQk2nuFcL#Moh*`VhVued9ff6)lch;dlk4arNT~B^oS)H?8AM9j5eQjxY)$xlFBmxx09*#MtZ!2(}a==lL%SV{s$nf4{YWR-uK@) z+^el>hQ)i=^#omNn8vQL18=kDOIe#lj0l|lq^xmnMx6e()K7RwN}T34S#{!mrgKxdD?9u~x@o2Rs=zZv z@*yb~pm=njU^p}b=3#6!;2g!*5wzI2or%pc?F^PXVS1&X&5j~G)p;&;h^oqOodFcc zzUC$DzM|IH6hJCGwKK_V^{3Da@HO}i|H~t#$1N_PV)tlyXvLApz0Z+F<=`?O(n*78 z(LOm1!K~xKt`z^tQ*x=Ig(>&l^#hH`?C^~X=5wmF4%Dfe60b(xy-Z9B+;>7V6@Ca# z3aAQZRbs|42b|F1z-9CAl9at6@incll8GLP$SB6cbfyo|sDN5_V~*kRoIV8^v)SGL zAh?Z>U*28skFNzpdSIZ;7`n5O*&)ehm5_CB}N95KKmvpL!u ze~$oY!cS~_4Dxl04Q&$RDY&be=MStc6=G@HMa5i3x=zVM`C{$_@@mQ)#Fn32csI5z zy!4%x6ObJ!6Ve+st18#2D5IkT&B4T%NI6`SXyZ4LFm#w8s1`C@clCsgN@}V7**f4YcU<_%+q^6F@Og+WL2VN?X2H> zS#bcl#T!W8^Qk->HFupFw-yyKe>i3P0Bv*}7Rf3#3DHcE;a2Pv5f|6Jqmf`$LjW?@ z)TR%UHs-ugL&-ozGPtTy)xFMuY@w{3++^1p(70b#G`pW%r(e@WV*)8>;vp|jWYO7|qV=JY=gNo-2j7&yc?joj6UV;^r?_B4d}6+LT6 z9N`JbFPf25t-+YNq=TGK9*EA5l;cAxMAw4?9~1QYHVoXO|-qL-%we>6~nk0G)Q$G5l61?!mw7>t#eIEvTUmYVL&h)^n6=5pNi@~ttJ zUbB1SN!HkF;Ici;Z?FtGIxshX;ai;#;SHPuZ1v-T)$Sr6# zK}@!H@|jelGnrW3!RGE+yn%1*PxJfHLc%P{CvJ5>f$CR1Kaq&ubAg&0T718y%fCgYR*}w}IczOMt5%I;CMHZJe$E`u{@z>IQ72Bxr7`bs@L0c!C zqk&U=hREuk1~2r@tj=(8hCKE3I{;*~YCODfT_G_}%6r4X8an5>-t~+!g>Q14p6&d*@F&S&R$0*;u@=5W>>Cz_srxwj5;JR7MVh)U}aYMK5%4e zz>d=jkEOyi05+U%S7(uk8{;=Zyl%t_BZ^*s)iF;gcluiGLd&4#H@uvP|AKEuy2K3Z;L?$b}`3X|2CW8mSsVrh<{ zr{Kd>?n33xJcG&=n>5g01?SJxWx$Jj15$V}{BRcBFTpCAh(~X4%vSa;-%MRK8n5e~ zi_su>vOs-aJ9(aj9?i*ip4$C*9pu^Nn|OikY>aUEXm7e1ZA}6A;TiWtrBtTcSHlYV zCW!qCn;TCQ(nvw7%w=_iIa;d&pw#R0=V}e!qlxaTDtqCR!RypS8rl03f)lv}2lJEI zB6-wGadJ*hHZ{&8ND_SYi;!wR(^BQsK$9=sAemY$;|utQ{SVa>CjHr!#{xq9{Y=Ro zG}$!z#mmxrfk__XlK|MD!3UkVc8x z++2rm!ukr0T~ozTMmpkh0* zB4>f%mJpTolg)}Eo1-~gAu0zls+3%AkvfnV-EN8K$y{tKy47Z7-VhAiX>U3qwTGy| zo!MPik4%~27kYPkcQQkur^v8+=$xJXfC_w^_MDGRuc1l4O)so+D00(C; zBd{(i;{l<&xOUd2H`l?Ew%vp2;i}f#F?W9ydPLHDKsM9ZP=1ymC427j*TJ2k9K>33 zVkE1qjkR@qdRk(wUN!m!gk+NzxM85IGIcY*L#-s?8s&!ddPN$EWIA2!=QB*uLF)1fGp( zP7_(j$kCVBA9L>rUtd}6PEubhg6JLGdJ_79CN?03hRc&lyLzm~Q29hwzrokp>xd#l z&s~#{0@-kbYz~FM7gtNC%j8~UV2#s{uabq6U8nKF?g#qxN=69pcy8_frik(IryS>1 zAO0fp_3~#-oDd#|HfkWODZu3}3&!F(7BerU-T16yzxxJU2U@yzR+4v9Y6lWmjZ$UIcUcwFzE$()tkZV2 zo`E0jyIYm)K6D<~opymAymRhOF?R6E&j)6dKdlmRMn^@)J~WocWf z_)W}Ey^*fVPxNZdi2!LYC&ZvUib#*fSl{%FYNiw1J!(`vVQ13dC4#3v>~$i=a>wMj zq5=&z6a$T`T+OCHQiHbW)h**nt-@>M+M(Qt@u}bYYvGP;7ZMv(} z^&9s#jJP;DJ2hGH1-T+;PUJj{Zf^?pJW$uNJ9GVh8EBXlY* zSQp8gu61!e`LOcynJYWJ0yA(_&~G-nTlOX*EkeoI<|FdXYa!KPf|%=;abbg6!u^*1 z#+&Z@T^AY?`(^KLK2*$o{0d32^uo@q-;a@)bm=gS(j~6vvG-z1;be;|%C?r18oxS| ze%*MIic)|rksC0CoZ_240+q&Zs5S1i582dQ4&&wqM>_2SY6Q^_Y2_H&SPOI|RP8?$ z$E0)NYW?}q1U_L~(t#=bmVMG=mSBw==Ao)hp zIFX4Sh@fC89G`3V=?B1a>29LNMHnc;lS#?z?l9s8?_^mZH2j(n543Z|-q*nLPHY^0 z6G!PcT&*C@K@~L(%3fyrCbI|aoAz)$KksOhuob3Xl;HhTE(sYf6L}0d04~PXG+Vwv4TZ+E9A|f_l4aA)C4RD7+noABG5y^V_#_fUM zyeu_aXH+&L$dt4|A~z1-;BVWmlD6t49&g?2FxB807GMt`(=q;DBJd+(7&JX~t4%LA zoa@F07~A%qVKR8;V@yXL%Nku3st^m^+0iG&RpGaWGtfYVt|yT0v$5hXb9Rgn?qt|I z6YlpS4Vsp=mcHW*pzG#yz8eH>t1+Z~u%3yoSRtZk<)uhH>tB7K*YbA&CrIRy{`0=1 z=9%L3?z$X%keW1hpoUnCH*a6q+iZ$UtRPzRI>KE+O)7Hy(OYhAB(h;PWC`fP8JBSRTR7IM!pj2t0 zh7v*;3%$1h0iv`3frK7H2)T!udEa;5ugu)%`TcW$_qp@m;gGY>-utY**V=2X&-y&% zA9xrz5@y6=xHDa?LD`S2M} zOY|3n)Fhs~#pJ?b&z_E#9TE|3dTsn}ldC^8s&}(DxMAi%tSoe&q;1bY4C;e;Zq0>4 zs;cfe2eEt6%Tgt?h38XmGPM_IFX6=sg2HSiL&TTwERB84BBqPjq$B59@)pP5zXj~_ z2_dkQdv;b|pB7LLd$mpXo$S|MlHJLl>n$&^TDmk>&E%@&pJN| z3d#c7oK>^-hIQ))(*lr_1;v()ahKwS+&G=>P~nCfy_1R?qymuzY}Y(pM-kC4U8N9` zB7YUg$g~|5<}mNYHd1Hsbb?`_qNzA}e`^36WKV4=`Y<9m{Q`eL-?IVxcq@73(5c-w z!BCgi-BY{CoBAIa(PLO?sdI`}{s~%S=d|u|QKRV$ziN@sZmCEcr&QPPK5XZiSACl7 zkitL^64JGz&Dr%X>8%wem@Zl^^-VOl&>EkWO}9Ur&1HvYzMbi*b0xM3L0L#;n;1T+ zz+?~L1HXCmW-!-Y7xuYNfsk~9#-!)J!KbTx0|VLbbTaC8`+`f%3(I9noBD@(2wa@SfntTjl^{Y0Syhs>IB2 z7VI9s5JI1ZL4(5ilLHVC2vYO@sA@m>Xd=TM?#8seGSamhb>Z<&rQL^I>y=qo)P7sH z;?mpkx-iK8ewVQCM~0jl+F;Q}3+LGzj$v(1YmR$bx;b_F^BTUc&h9d>mUaQts{7Mn zcm?}6#)>y4hN8;MrGm1$UPYL@H}o|3$l>Aq=1_mT`^#st;$(w&u{$fRX0=Z0@W2B~ z|EFekMp5Qfn?uaT^FW{x?5ym?#jfYFsYeJ3QCRO*=#`$ZQ)aIts94fA&g#gxgUU^` zmpr}?^%$IaA_)zu$5G;JDwA9>TC)Lhv;isS#RS^Rx&Pq4LJ8$kJVp( zCl{6-yw=P^@8g4a7WE2(Gzyy4T(UfAKrS2RKCZ$tmAX2La!YZBG)$k~vbag2TS)~F z)Fvsyl+TTElhYI(Id@yWhecf2I5NEiTy%w-sD^ir_ zS~AER3xID0C52g)LNmQ_X*&rbED~G$*JAQDQ#&FeIA7~z$#o4}Q=3ni?|K4ghcY$l zU?kUvaV{Te2;^Rk|BPv=OZ$#4+CzO3`q%5`hji8sdc|ZGm4&K+TYidags8rw3J?6lpy^eA~?!$nwssX!Afdk-wx~EjKH|{@WQl&qn zc?TpSAZNcNi76#)8T56%jym<~1q1zJeW$0G`s(CM~%T2{@Z>+(<7_#Go zOJvmRabvAND&L+9PYoCv_@0R6SKlvq^d207O@V|?Psb5w+ou`sLuWTkr5f@Ew?!+q zr9VF0xN4IECcq`ej7s$!9UrWo^tu7|0yoBV`XtE-v2)9s-x8gy7SHhkxvG^cOLZKk zS$ulqVN^*h40<9Wz9)5B@a93ZY8{YH7=q zpoqAspv%>}fv7g-j>B{%N!Q6IMkC5k0MNCl=Q|lJ#HOuA;q>-%&O-7dkQ5L{6or7zFIR~R;iAq=GkhrR2#kj@a|(?R`Wa7xtdSXu>Bvi zs<3N;yo?ux=c7kE)_hDqN_&w>8>kn$E1DAhghd9~YQB-Nh&6YaU{6Sru$6A~-Q*f< zUg`k1rXQ!Ja8z8rK}?z}#P!7Lenj??C0;=3sb|KDOrr{oT5{sXJ*?f>Q~=$^0hO?p zyDOvOuv0n^)ZAT-TnLK{(nyTWS9G{RxpDgydKV^)QdK)f)jU1*aEbtp}mQ3IwRzINt-20sUIfkzm$S zQ=1+DX|x@wE193?%$a$yeM?yY>*+F9uL8KL_-wmU#H?(;GOwI{nfCS5ov3r2t#Pb9 zTrT+<$#+xBdSa3Q$FqHPkF8+OcwD3KjGF4R#3Q^ACrSW#Q^3ZYdH@h?43nokfsLz- zjI`%0pUpZW8$L5&8=KE3h6qaxu>9B!Dty)`Zny4_pBDj`IlF%rP2o>p=W*5({`GX{ z5)i=4y}WlHA>}L^*1bhw9FB5MDEya9iWik6{$Jz-yml# zu2f1C*{-pR9v^_3{>nfe%@avbggDu4XS%T8mHYu7a4o0HplR(>b(Pb$lPTa2B=@uK zO+FIfHJentaTLt-n<9`Fg<};n`wfpqZ%x&eD)bh}sSMiQLsMAzJUjxb@fpYn86a01 zNSO~73vDun2{#rsPbz$)G<6=C5B3y%=<|zX7Cd(~yy{+wCa~A~CPaU&cjpv^tph<@ zF64>MBz}I`2;HGbCaar-%g53Sv@@~@0TRy0#I6ab1lF*)r8xsbO*=!wz?USI8eI0U z;X@5#gRfz;7sf3sEBi7?3;AOVBfOvfv{;Il#@V>?K}{ejfGdpJ3P7{FwET`{|2e2+ z9{+UB`-pS_AWiu6v14Z=vg8Bwt;l8YaTE`#=UlN>)G0&v#h;Hj9sk+8cI6P?diP|y z+ux@VpGXDbE8kdj1CRO&|H<5@zY6$2Za>Lcy79NKRepMiE-qislR8QY|8wu3&i!X< zf4}-?=Kdoce_Z_|b3d%Le^%?Cx47TrK}Yvr3Kc1yp<)`%WiaqCZBxNmmH_}h_)u&|}U{FuCgg2o0pjH{g3Ua#g| z>Z6U4KXUBDp9_6so#S6z?Rj+wi9d4>_g5gy-{1Dyo$CGNIRh7<`oPYc*LCFpUmS&qLAoe^cRtOOEP2$DDy2P?dss!b9+f z9rn*%QO>5#1TrBpWwHU&_42`7;mz`279RX>`hBs2%AZUkqzYL!?oM^f;{C3kro9?b zq3eAHFz0T*raJ%ld}3z00I%nS14t{}bjUS=zfBw<({0(Ln{+#diI2U+?4l8Afl`<=l zzdU_ri~94;!BYUzF;aO%|CODbF+Pi%mzTHPK`idC&|fsqm5m~T_smNHaA(I}+^GEN zM5WVP)E&x$XMthuQk}gpI2vtL5sUnD?q8uW2w5UKIRQL6U%Lx7;kua>_MRYdS79 z>jJfZ|36fOu(>u?L^b8c@>gr>l6N=|&8ymM2US^P#NQ#7oU0o~W;5=izkH{LFPS<4 z09}Dfk3(4RYlU!kcZ3Fhg4s$cx~d>pE-|9`L6 z@cU}n0*5MBnS;hfv0f{MM>|=Ylmeu?sRKGOGfEfpSK3v1?abX@?%iJ>JMqzs?*Yqo z?>BrNXCokQ<2fSpSvowl7!TDA7u|_wORsDbx+5f-fz10fq7OHreHBxD<9jPY{K`>9kAE{1o>+2yHIZ5U zwiH+OeT~huCggcQ?(({Jp8L#NDZ1t-5Kyc8C%SXmcYX^0cCo^4Bl)GE@M_7qltGc& z>wr@**wOv-=W$tyUDYg(qhFzHbtW$_@1>C4M~DIsva?1T(h)u@ciqJP(BLZ3W$aV= zDf~54zXDOwO1j^>zF@jUUlJOBS5;Nx?%iLQzYRU({7XFycmP?x4Lt&RdRAr1c&Ylb zQ#4!4Lfe}jBMRJ4O=X9?F)f_YIMm_c-2mfgx|0!dUiM=2@Y|xIo16L7yW;I_F3sHz z`b__==f6?LVRZft(SgTWC^Z?`torw_U9aiaTOT5uV&(=7oG~D0&o&Lr9K2O}Gjb4|r9-QD(2OLI>M|AZ9DOmOk8QSHD&A#p{ z%Fljhqh)M|ENX&*g2~Z1ZDh=rmZa5{B#y9?G zK(NhY7-=8gy!RvtAX#}Nb7l_RkxDV%^|>FrP@AH?LZ@U;K*Kgb<^YoO?I`2MGuXlA{*@Y7o(KfO7!am)14;LU?b+xs8?vxKwo% z(3Z~uK=mI5j`kA$qB&RJhLX(s=K1x6I-%J2vsT3fyAr<%@f}M_W|7+nd|FmVTqxXQ zNHH1)l7ZzJu$w_x0A5^ObyLMnV&+E=q^44@y%$)n)aRiKl8}}t%1qUz@hu%&{t7V_ zXL5vg7{LH*_i2vpIatgYyaBpTJtTwb-Ctj7fNhxr;C5>t}r2nITn%Kxu%%xt^SFF#E+7CzPa~l&YNm z83FfjV{2Ucbew)(LKM#3Uw>l4&wn|GCc0>{D! z9qz~<#qLCkm2!^z(pe^0q;5eo=QSR6EjdZ%cwbN5m<<>P#6AM)PYxxGMf4j1oQcziITvs z%nqAgv~Dd0_=anp$GI~~e8-_*LH=T874R56{<2auIIoafrGuo z*IdE~RHnOyg9jT)=w?U?0#it^2NJ=A8brFY732W6=PZ9p1i^Zwyrk94x$L4PY?MrJ zZ|+<28Z9+l2WXR90jlNMm#$EX_cvOQI#v<$SXT^Ac6IW&e z>UI(3T6ZUwT~5-t&bFj@u|@-9=9>4shJAjn#SPR zSJ`pK;*U+cH=Er?yNM{x1m?HUH5W#pmUldWqGtqEX$>yPZ2-OavUdJ+E>Ngj6%|uqh8264D1%dt9E+mb5esHuvf|OkDGF@|Tr4grI8PG@8 zoY;T#N$haL7fPyar+N_O?h&vv{bk5W#z?}MJ!%Va-3T&Wsu$*XAuNQ95ph5v?6n1^ zVdWfse6fgyW2iGFOR{`-CUZ;f@B(9Fq!t;CRewh?bj!YDEuE&MUMyK znP3Dtg+z>{Fqpl%>d~Y1Hw#PsJwhGLni-dvFoRyzBhhqCLF!GiGw&^)LF^(W@XOVagx7TBorLC*fd~ zZ7r5wwjV>+Nqf^l6nOm#GWCm?u9BsvcBf0tzzdIwkM?&d2bvrLkP!!?r$gFOUuuz- zZ{>VeC6H}ieM+oIP?%*|@hMJky`NdURfWb3u}_SiV#}o3d;sop>a!T?sSO{qS~mtB z^0z$cx!0d*g_bPVL&A2dy=Ha88U<;EdMRyOQv2UqrT5ZuJ$v#5)j5gCFpcuQI?5&( ze33#}Yf?f+oXlcDD{d4eLLms;i~4P-`AlUSM-NDUSAI5@^XvC0_6O}CuO35>O-^^A zxy@JMLAW=XZdDzv>unkCeN4h{`mkq; z?8o)7gRZqpL40ZUEeR_k9@}D3Nai`sTKz}M?+HSEQlMS7^>J3yr?@RyQZir9fwhrj zIjHTdFq=XB4es>epYE%aWr!6vKF|NS0%g*DRg>su%I2tZK>l7EGx^Yr+YeJxh*>Bs zKgfTfPmFK}anK>c=BZF22eU!TKX2s6SY7sYpWNA?^U9Wp2wn2s1e*zQ)p{z7_T%pF4|0we&< zAcUs_F=f&-?Vk^uhXVW^anj3+_%B|4$p@B=HwmW0vPz1iZdu@EdoWzH98l0@2Yqj8 za|}ni4+kcjRGr8LDy~^7AmHVJo)5PVk_QO(D$Qu74O$I(@&MR+ zRS5J-aU?^y$PA9r=X(0M?GE1JJemTLTP}mqG@?MOcxm${`QG}bU=rc@MA{2HW386<6UuzG5*Kzlz%?rFaDw{9;R zKp+odYPgq5#^n)&rG2jD=QvV#Kg=e=4YmO7ve1L?^#U8>9HaO3D77dHb{NFV?EM7X zeP195JooZ;bMsvq)q~X@uh}9&CrDy3o@G8xm&+Bq3&(@$XWQbseBA1$d}HuzK3b*v zaE+fsm%i37exUUXPfZ1da^v6xVUKU&=M_S)SA({=pr|4?gne1 zFNJUR%2|WqgjZ*!CZ*$Lmg{$0LrEARxt)3&8ZUqZoA89}$l_|&hqjYN?<9sEkU0sh zwMdIBnEX4+Vr;c_jNr+;6Ly=e8RZF49Dz6eaGzpAnNe;q%g5$pmlnxg3WQ5;Br-lk zsd;b2)n=jl-TJpXbgC@6vV)mZK1lWk58+-yMSsPKACE-gssdd7$e-SdIqNJzpRAhs zn0>J7P9vL65&$jR`SDGS!UEKpo$qb{F(^G2hOiZ+mHsUW@sFJzWatG zyaY1O>`Pz=>17;jR>88VIMXR%TD?_PuvUOl-E;o3v@Xtz1wzteZ}$kv%~5wT(2&_U z5xTb5m75&t7>F*|k$Tp6kAual%GZFiAXrg2?R9TH*U)0$w2;gD0Ia4`Y3*~D&p0{T z@bWBAa*%a}VXp6l5EIEiVy zXvNzhtg}$!9L%4+w+6kfCrZsg+1ry@U+uvte6!Ls7fuj3yVHxZ+v;4L@Qw4|nMKVi zjZ4QQ)F}o$2@h{JM3pf(()MljwlTe+)b<*tu==27Mi-lGo1}I+{DZo~qB9Z_TKmud z*f+_~iTcWr@5hX-2?g1jmGh9;CdBpA($+KP<8u4iT}G8|hJyuK#M2f)y@B-u5*{Pl zgbyWsZBPO@tcl@4E6mQ2S|c}VBfh5PWprCJ2?wUm4xC%|b^uuxp~@{$lQ1j|wdqy3 zjS%(e%{FQ#3pRjDrq7nNwRDk$gM(JWzA1_XkG-_G<=UmI1Na)xzj1Y|q_p zF8?xZYA%P{-EP#G?=0V&8fnNe?)>?3h3!fmT56`i~V%+_F__*)!P826}E9 zks5Xi1Q3i>DWjVTMlG%dF*#J*ajyz#+YJ}JSswyt8#v62nja*=BRsY(6njYbn5@#_ z_@}<|8|?_|?lj#Q;jk=x3e>2F^VA9=ESLa8ciDTP8q4dZ_ud%YFPI${xS6)_=m8=s z^2PO|Hfqmf;ms%0a?(p$Cm!~Mt-+IOKk-*J3{MX3 z+h`tZHyRJY%I-`p@>D*k3o!3=oe1fxpHqS*pLnOAvzoSI`D~J1i`^`orAli(kg)5IP#+Aauc!RbEPX7pOiE&c8Y+w zIcheK@EnhW(Z!+_Li8B(Sdx|4!665ZSK#%p49y79f`Rx96o**YYQ-6deRxfq&2}l6 zR(8V3r}KFo_ENFSpjX|BLICUh1`6U~A=&1a>>?xUtO^~`=4!oc-C+B!lH4Fz zz^|#UBsOLyg+_x8(t#B#ISe4pDHSDYncr1h`*u9i3Y-EjDPMRqoxf8HSxrOT5G{)( zIV`GG_D*af5E57>OokL_4U!Ld#-*3?I9p3VCy$%gIG6?psLPgRn)g>upJW|oU1arM zcC3Bw`mqf1!$pK^I|UY(vLbO_kO+i zrfGH?VVpjyv!_<raq9;``1#_k6+;3|8?_~#?T8)PlipAqqnGC!p*V*<|2YW4*0-uCj9+%p~yi)9p@*TR&0wI3PGyxl{mF8Tih?ayVon; zLj?tEW!25_C;LFGc=Ywe!eUF-vpa$t`dWq8fD%nj(29qhKAOP!@}!d7)fE(j4yTRI3wHHGQv8RzR{ zEn7KG<#+8Pt`%x$8P#uXOD%*U{Kg%Tw*z{<==Ko`t) zxt(eY5P~XNI=H=!lPcMS4L4|}-p=~|If`Kr!l{7P| zjbRp3;?HE2T&hj0vn$hk=>Eaed5%K^}t_bGm|C)y1aRL|HGJX}Bg0)3u>e_02K+&($-y zR3JY|E49Edg$_YbqII?1-}=7Ze#-c(oDGNYnP8!V{Jw1_M^dIHM9iGxJ{7(wsFSahIBCyCvEvUB&a@AIpG8cZa&CWEv*1XdJkQwQkYUc zUGB`qNtc1B^55*KdkNo}mcx$hiDiJwR+b;| z{YdQ0RnbJ9E`(=wKtPpIE+t=PY&yFqN*+h|Ayczu><^q09~RzIP^nJQ){HRxs?xeQ zQI2T3`N<$GpTW~E@WGns)UC4nrPw0ob`3nyx&2yM!d-t)ldB{xrVckJ#; zT>Zk*XD@tbNyD}?AZm@J9Uo4K+lPsGRk}-IV~G4zOeAY2@uC*M8^%aF0Eb9YI9UWb zzh<~Lnoh14DzzaI%?9e*HMR@$sT5h?aUV?mH)G6K02`;-EA?bysB-go&UV>Ji;08$ zZwo2MPtK)xxT1+OUJY9a4zlt4rL1b@#YDC;vmTJ8^y5#7WF50al1`?$W2I1rNXP-2 z-4Qjg8Rns~)BK4Ne`&0T(-AgyXj^q!LREVaY;p`1I+qyrU2XNDM+T} z2K)UG+-PXo%2a5E9Uf;c#Xn|@U^;6iK$PX{4kbT%$;=_hHK9#OW_)alHR##{SQRo*fWMJ$vhMiQTdE zgjR$_y_eZz!cJjAYv4F+&W|j5Yjb_98Co985^YnSzLi-YG z``&lieG7`PI`g%l5)D478Lb4C*);Ok(ll`!?+&>GeS*@v4V8|Yjl-wuQ!*`7-fbOx zb{&-#$lhOrMoxR|Z#@>7?;>1S;D;bxNzzYqX*b3%%g;d0GA?4r8tq?sOr-B@K@^gx zN&DOSX*o0+rM08!kptq+1MKO33P-e9pZqnyl1poQ z-y#UK_$THZ1kkXgF&-ZHez|=m>z6L5nqqR}D9 z65~E@SG!7xc`lv>F!ML3F%7LO&Z=fT4S8)ZSl!r`Fk?0e2{pi~QIZkM4#N>GyuuE; zrl>+RX|S?Cc%Q;m1s`F1Po`zNk~p%cRWyvb_h-v!s|`c1X$Fef?AUwYSuv?e`Q0Z= zaGCtr>Cmi1ucavmNNh7CZ{_SO~w+=MX zV{nUzI?1Rba1RLJ#VDSin&_k3&oeJ***3UCF~Q=5;~J&P{jJdg4l>YD8S43K6=n|} z@lro{aW1Pay|nziR)$d{{HdjnQ_9rSSx#%fpz0(8hWuyl!VGc83$WG3AQvFW6S-jz z4uQUQYrhK+bGy?XFOjpIcaPFA=+9g5%YabF;3A&Y%V z#vYR(BTbg-p>7IcVF>aLD)NzeV~%DjvE-sAi^;inmnOS7feE)HJ}K*~(Z59(%puU5cnMHF>z9yYhlG^X}_io3}wlXEekLTpr}iP`Hs{>9KxkY*_%YUG?X1m!+8xs&PG^;LgCBTE)KXd_4<{dBwBQDD~n*i-6MsLJ#}CzH%0*bb8TXvlQ$^N*s{Q| z$b)~?0=P!K3aH*7NQjdqbQamnQD#FrMjbz*iOxt@+Y8O|pE*-WIY#;tgonSCvjk!+S#o|I zA;u>WTl&{g>1rs+O`tM*_c9YFEs28>nSA5N0DXTNHA~yY!B&Bq%snXJR%@sJ9$hz* z`4*nwKK2XJOUY+S()E@9?t(F+jzr%Etc>t^w~ZCSmz4j-Irl({V4%NiyU?;S^)%Ug z1@T?@Fl1}7Q4XVK4t344p$Q*s{nlSMYgB26IR-@Nn*7{GMha^r>}VDl!;A;zIPf9l zrp97Sr`@o>unn9c_19Ouu10;(D`X?+*(E{lva`&j;5=};n%QIGWQCkHthsGio*S)9 zm9UQmt#>RA&^D2Zr(kW0K|Qoc+T{-P?;j2npJUAE`amZV;?yRdev(Q`?7~E|1A#sJ$NF0 zGb4)aG6?O68FOke5^>qB@L%rWnW1nTAZ~-|P@z$`8>K~jSL*8z_MD?*mbHxS@h{~T zpG9Eh>!c%je+rz)8Jd|h(CM}G-5FPkm$C{|IP_vw|76kSztCVmJKFC)(F+m#}wPl`OA`K5jps?=HnX*Q}~ z7|TVN1D;=npq!PNmd)W#O94Xjte?7hqjcqdP_ zB7JuQ#oea`0Pl3IfWw~h_k+6WQ0GiibwC(lwQ95c&GX_V>Ge6hJwJ=zOLpHe5kfY? z7!)9ZgW=sHajTgjQPByJA#^cVjUOZF^bXyJ3R7H;_rBi@G%Ox<5`4i%F(~Xk++qBK zC0pZo{u`s?jGP(1XfYtxDRz=BgQ8r$8?2aRWRaiqt=JXW%VhY)-lv44vhRic*AREB zhgi!O5~u1P6CiEIfngxk`zMNLw#L^S*hvQun|{NJwEcVd}#3Kc!S`5=M-TlMQR41*7&15E+^SUFPa0v&O^6X z{r(1#e>WN;GrJ}B!4E(E^CzI)! zpb)5^P*I;PeuO33DsZT+GqWz$$hnndSi>X|Vga8Ca^m2Y4-htjR(>{9YgyJ^K3R9ifjwW81+wmv}u=*0!ujS z!{Q1C1pL71$bwJl+nYX{?9LP3)3wC?roKUl;{>1`#_CBeyDE(0tEOZyZ13lcG^lW)%@on>U1f3wTQGb}Mn|m%$J%scVx|BcSw7p%Y$E&$d(CzJ@ zemE4}Ig@MO$KgSQKw>}BDOJ_izt%xHb5u|cBKJRBPiG)oHdh3wqn=uMf6pIx`c;z_ z1!hFsQoD^=HplHL;=RNUU7Lurk~f>Lr|QhgFD%;3?wI3abxMX0 zI0RRMOiEO!k4PP1d9VDoVSL~;sW#Eq(JfeXJ0dN~waa9xQO(%nf_51LrTfsa0`Bh0k!xlPVm`@wf(fz**2#{u5JC-`fn%wv4`fqPn65<-gaHbUJj zcz4akZX!)urQ9aeMBG}}7AKu*C;b{Kw@R*%JPr`ho~eT^h$5SyIkbDBLfn7!b%^;y zP(m;CfK9>9duw@|A2>=k_Ad-VvH>=7JA5qZx~y+$ycIkUW3)Nq^L=-Hc6RnLr&=J! zthg=@4=pKKoi~+q9xe3`yT4=aB|Gut&H-2!lUXAu2LHz(2~XL({{zJGq97L#H)Q0NDq zm!w;QhvN|xE5>?PsEJ#(eP1thH(1H8dsp7bveq0IWWv^7CX<~6%EUjmMDqW52zS`0 zT4L_UfWtUa6=m?8eUvAKJS<^_Ou11#Ou6O7U6wiOZq7aVD6499Z)c=%<2}UuG^#Z) zB3>})-rmRIT-TW1N>%m}0~VCOL5?h0+UQ(34)FdD_I=>sZ67Z&hD`Q-DqkA@Xpjf? zlpB=HTY^V)?Hw9aP|NuySGl3+HbLm+3X{6~J4;jkOZFb|HGJje$sRF7tsjQ9`S&Hq z9(}&|(M+F*TA#FS4#{(Q{O&7}L#R{`bLPivUh!XA_Cq>ZW^g$5-%VJ7l$JOtD{$Eh zwq)1>D`^$l%1! z>Wx}bb793Bx;um;T>Ay8O`VH=%xHO53!z!N1;qjl$=+lZqkp6_=D~c0M`s z$p*l>HYz7dnjOCX=AH2JDWkZW=fdo)E)^=llwU5|v^*1Jl1;1~HI?%n3nf{=M*@v- zFij?!_f$u0$utb~izWT9Bm@pyD`E@3b52owuy2L~wptaG$9d|M%h06 zt?NIU|G(YgGY2yBsxZjKqlWOu@tE%+@SEE_qZNPmn}6E*XMX-l#{SR9kHcN6o^BA4)ev2g%xputk64C3x7s07Fni>PI5aM@UN;gF)kFnL(!sGa5X z!$jqScPpNCE$pigz3P)Kfax1bCtjiP3w`FchELaz;;k~3YgpL6iRf5u)^0^OJSkAE%HxgKx4Q?bP zEDhagD>W6%?U-yh+fQ3}>teu;{P@NB zuWS0rz+`o4aZ&FovXdWlXA_B{&IWgmUhwn-O~tm}tJ8He$3v+?npc5eIs6nZ@);fX z3LM`))nJHPF2W59PSk+7%^udq-xx>8d;EE6{%5ah`9PcJ<63Q}li&!}eyKj&_9gb? zTTdpxl?ndFvQhT|R=(&LP7xcoaZK#6Ek240^Ra(#Vt>A1dDHTx z@|tybg`bx~@!TWOkm{52L$zGwA6{c=AIgn(a(3lwH(nQWdv?_5#D7sXd-MiC)LjE0 zf_--U>wrc4#Z=-3)d@g%2l9Yq2Q`|Z+TG3tMtT`4+;u$BG4GAMp~G)Lty6!OcI_K{!ikM{{=JyS*x5*yP0a_<5V zqY;2&|NZ0qyO%#)n8*U-&)@Wt_`8Wbd7ggJnci3G?*O0`-oW@ZF&L-6o5-_(iGbu< zantzE+WuhxkCuS(r}XxX{M|&d+y(3!m9eR$A0Gct%PoUIqK04=xa999Qv4-gZ2Z3~ zNQ`RVsz9JB=@H}TMye40ui~uN|56Zgf~f)b{Yd?Mic)Kw)RbOxgvmJt_hz<EyIr1XrFTSU;&Bgk8AWRVxaSGEY z0U#CX=LTbDt2Bk(U8Y=KAN+eQJesGk|Mnd8rTqaA!F&4>Z4qF&^NySCNO3c{8(ulv zdgvH6)F*)`fzk)WK9Ws>!j1qhh*=DnvDwn!b~>ZiEr&t=&rxyhQ>(yn>m@*dS(YOQ zJA2e@CUZUQ;AZ-o#B1Puc0MeWphOk&E?OB>26;h6#c!2EvCC*n=#wuQjROU2C$E6G z<@NMKXXtcdeOD)dz_b>@ZOS?MsKkNa7WX{g+S+HjuXpGaOza72@t4mtS#Fg`SI669 zeXR7}M^c3_$X32Ew4#CsK{%q=JWa}U{1*G0jdJiU#}Ulza=ro|0S=|%H2DC}9SBRo zrOu$OBJ$Ni5pZn0@ItrXup7+?3GulmIAR+hp9%NvcPQy zf#q!=#V|(w?Tz{LcM@Jk?;AvH1{t$^JPD;lcADF zd+f0&MHdsYGgYEzWjS@F_WOL`lCX_x_60%K8b z2#MANR^v@?w&1m2J(hFC73mwes@K{LbEn)!jIw_ug3hPsNjx6)l z)RO)AFuU%w^Gec7LF-BsFIxZ{@^BSMwuwGYH!ZA4_SRa@AJpVs1LArqCx}jxQclC| z(mVCJSOB`xc!)1pfuX0@hYfT`o5WK=iCDx-7Em$E|6=bwqoT~3c40+O1QAgIB?tO5!W3^VWf zb=Fzyd;Yi(?y&E@Ygg5-y6P&f4Cf8u{}|Cf|DcjsHxI9+D;r&H!Yf6F)vmVY*cz5z zcy26i&fTW1&E_8~b&)(b)GDZACdQ)i!)0l*6qg;z{XmwWEv3u*05(G1bGoXjF0!C~OB zQKF}U%PsOcPdU0@8(eDN&ZOYB%OlH3iyE}q*W(5CG!HLtK|uw>y`txeDT?#kxMoz_ z;V1J|BG7a5UfhcY-0IM4PtQG9?2-d{P0~va(slJNBf8dBu3ImHU@If2-BRx#JLKH= z01};SG!-`^zTtrN%M*8E(#q83?FIl3Av8e#ip;OKjT1J_cV+}N!ya#cra*r6)Mf^) z`JPuuJsr*m;ORSGY@X}Bbixj^fj(4wA}VCp-JQtVvvSXXBVND%=V!Vl&d7R)gruJfuA2z#QBV1TGg^Cg&*;r$&-V zXaBR50C~+8@y)@M3MH}4S)@%(Na6Q~!@~tzLQnR$M;vC_WCLT$R2Q~Q z65DCf61_Ab?XiMER4g**Jyq*IZCwypnEIZ(u1cCp9ScfR^ZOfOZ4i&HjeykL>=<<8 zE<&hahR)*a%r3@iLl{`c3%t+AMbAfW;=e=xA*s^$_&FjkfsEG28*DvLBHXA2 zE~GFfdXmV4UoOH`g6We21(J5U4dqbMuNkzy^wr_i?$+no@fzsmiOAN7X}m+c&*5%e z1RQw1s~i0yFlvrZgiBRTKdX;S+?Iz?`zvJ+bggKZpr=Q3gsL83Y1Fyo=MjNg(-oM#+z(3cHjuhrA32NVZ*JXQ z(+-%>Lw4xfnN&K?(@0Cw>Db9-bi^xwlC>PCMV+Dto)^j2XLS{`LJ`dy6>)zsA!&sfx;K{ zv$HwEb7Ln?=V}*s?F<$evgDnp<@7v7d%GBeBuLnMepzew)wb((?#YFkGv^^H%@ zQT*i24ES!@*ADvczOR8^ac%0_!}If%LAMWe>2LVe9e8=N47Nt2c5O_wIv$|B^<4_W ze*Nyey7}w$aFJV6S3VVzEmZg3Bxk_ajp4K=wjJz#v~frD0mf{}@M*PlXZUzUVF^+@ zV_I+c>N?Z@7F15Vbq6CE)EJDCxIunFDce!0zK|Ap0tU;ZV?S-c}2H2j8o#0#n&8cv4YKQFY{$NOwUB8R(plTumO@uMS0#mnF3K8$E_ zq$~*TVbvtkgmB&83x*V^o4n>G@7!+jYC>xo&`?Tjb(XIezMra`hjV2ix@G7a=W=i@ z;fu8>P{`((6S5{LiOX2YA@{mSxr_3#MUFXt(KboE$ew6tSB;;uSn)X*6m*gId01bh z_9l_pee@$)Ms?FIP1-mnK;ytYt@aGy?rdAqXsKI|0}KwIjfBldT#rGbO>y@9Huu{y z{uc}$C>#<30L(v6iP#feR*7;L+?tRSBNLGMqtzNN?&bJ;vZk+Lvc&C!)7L5xH~1`DiLR$$6aNYGE#4~Rqx^HS2|BH zy|q~=^J-UHZPS`4H&lLGySuy0<2hPBnvGxy6uS{9wmMRv1MmMn74+sSlTF2S-%}Su zs^*`^TG1V-#vY%v8VUXZkTGr$zloS`07}d!Suu8?U}FGsy(03|VV*IuIS`jM>w*z$ z%BBMkcJ1-{Hv?|RnP`)K4#*H0NMe>=6dVt~`=p5xk)UfVXy}G*t`yV=I^$qJOLHW3FPXfbF)i=9i!jF|J1cZPszG z{oO6Ba%YxfhJ**$9=g3J(bJPjol-&GsfR$ zA1vD~gj-DxWvgQe)U*R{*lIj?UfBXr(ht+_LJs`;Qm7Sb==sw!yF(}xfaxHC83t}U zRSUViu)g!T2Dc*)eO}|z8BhOtZK&Z%#M9(WyN||$<>??Ec>(4nu`VU|fqUaw6Jsr6 zFdmTL0>BlUu<8qCZyUMkh-H2SQEmk{j|_DYS^VMQ$tQBe$0Th4IE_u;|D27b1%REBU+=lS53x1`%iL!co+FRyTg5b5Co1RRubLtx0_oc!&>xG z+tCVMmP)E8&ad7PtiUbuYn>R4I^5cVh}%sfjddRs-}Jo@mH>L9AcKiRjN6G_Sw9#_y@QCxT zCFpuf0^*`8)3bE{wj_8EJe&)Cr1Za@{kPX9Q5_H%eQ)Xhzb;4-1$CoW745h>0BZVT zS?=M<{4<%#zhmSGH?-ExF3rHA^iIGMm?rg}M!mwbGe)@P1_9oCs1#h2{S6_oW03an zp!q|9BMPME1sLtPln9m$x2?4gPmcq!_>Cg)*k~=#OM)|)C?*01MFp4>qX-%=ZnhIu zxKOKfXfQpAfn>GQXsy$<&GyvBhV~y~3J^<-A~^pUbuPokJ~_w|IMk0@Jfs{sRZ>bcTSwNPM?`7y)L$AgjWYg!-fB3|O6-l`rk4^ZR z&a!V=8rP(zi6X(zsXhV%e2}u(`}&|Mzs4?)a({xFlMoSF^!zaK*Qy`^nHvCOV8ljS zvgOYx7+TyG6jg2bc{Jcudh+duv*GuQwbEtF{!)=_QOEZJi<<91#v=v9pg8kcmH>9( z@6rh)JM&;NUz<0hskrm%J&+w5&gW&k-i)*=@ua@hQ27SvhqCvoKtJ4YhP6z|G-jko z`VH|)LY8oL#}3sRxetjza>%TzLteinGEo7BsD7mkF68?lMYg5bvgwxqk~jy+7_JiQ zq=a&=zmEv~rd92Ub*Y*Tkg4uU#W$wNk7{2}Y^@ftc11@1IP~E0`zcBO#4!<%AdUx# zz(Zy5K!d@!!CrNzK76HrO;OlKpoA_e#oRnppngx|nb|eQf6<7{Dj;I&VJu`;c^G5n z9|M$1>U2JAS^gNm)I~A?fS63h-u~k^f86=!yI>NifBQ3$@inlFzM<)D|2Qe&b}%U? z{yQoETN3KOlX8g2{J%3PNy0x-tEO5%byHZ&lp(&jg~wvxo9S(5>(5TjV<*FYnziKn z@3T$E(xr>%yRM5kENM5UbUP9S=rZq)<1HK5SGL!$S0oA-neZ>2o@YEtWX$=aJw^HE z2tv@IsS1QgVyz3!7OIv7zvn>4NPCXcTNXIiuu+a$O-CJE|-bNu?66WF22dcnLFDnL29 z!Ol+*9M)xdpfi{N1YG1nk{(HnW{H3P7=dgne4cpDIfeRypqr^bBY$f5rg9VTO>Z2f zd=T00zJwrLvYmwAG!O^~cjfd93g0H4lYrem_XBD<^e_r#xW?GI*Ag@ZBP=IEm;tXD zh{E&U6xwgNpAWf7O@ePVPG1i_0`oh2fp=#tY76^zG)ku=FK|w2EE(k4lkcm~n}*~) zumh+7BO)w{yFEr&Jl#X!^3?#QbG+$Ft^9oj7Vz0pomw`FK6T&+ke-6>hc9#vZs}>` zi~0o!yZwcZgav1-$X=Rm%@ zwkd+K3S~$IXqb!un^2nZo657L#USuu`qry8;A5Ek*mXX`PVp{77!B_cM5jy zKRWv8Q4F)XpA_@Qt1x2H%ljSzOXG`UX(;{tokr#Iv7w>pq2(E^YRvN)6|=q@YG>#B z*_$=X%6tb{q@<3a@diD(Fhtq9c)HxNdHgubWpAlP;?kL19v^oR9f2YbN?z~rTY18F z5W@v_0)STh3?IzZ9@E0OU&&><_*zF=tm*DzU>6f0u}*R$>fU4M4R$Bq&c=yO=nF0mp(tCXjglPp%vdLGG8sK!Q(?110EsJn16{Dqxvl*dc& zo69oaCLt@u#L%YqI?i2_IQx@0eT&R}wMBMvWSed?4x>deDMSGXxJmE8Ox*)L)?40O zc42R*QV2GZPvWljqQ#06k-iG{4rCoJa}Jt~aTjTQ3wj^cpXVwhqr^lkpj+Ur{64>b8yi?8DH721B`mV_ANrB}F=$eM&{eRmT$S_hllYgc6C&AeM_e@Gxq zht#w#UtPwRp%=^W1i1^5-r#?3f3Y(X7b>y=dY`;9(^t2KL_Tze$ zpVLr3!)Q%Lv3Y;%xXH1D4aHnZ0>Bd2Y-%>r`OJDA-$cxY%r;KU{oaDi6GUYwr;Iqq zfuTUB!ot&`o$2n_<{V2*9~H}eubZ_y&!X4n;o;O6p$U~9x1TTgsHzsCQeIh<>6TgFcKViLM_}9!0}Q;Cr$3=Jfe!IRpVBiHu83tFAuX)P z*jyBq2hrToB8Cw*HnzeSlkTzxpM5FP>@hb3tLGNn*A|hh)1k_P8O7G2*e#|wIATxr zDq=In$Yzw;=lJQ6?v#Bt{NmT_^s40=g;Fa=kIyS8=P@pmIHz=1e2B5abj3<%GrV?} zLzbyu6QnF?%z9_Mvo#n$+075+@gl;bb**>hbOw_(&N{tM~ z>e$9|mGN{l??b<>E#iuO7}Vo9fv*|2ssNYsIZp8n+2+qJ2Fmwt44*t^w+l3S!`!}n z?d}Y{>vZ=UMrw$|ZV&8Jx)t-~WSerjxQwTV3E@i5d(2S4wI_S)c7Q0uuMN%V~`oHs*5 z|KI|+bciO1MazOp}^0cl0Nmwfuici- zT4$dt|L}Zva-DG`#4KpqUJ@M^D*N`r`~?&)5&A9IBjJujj-|PIcJ=D&yj|Y7ea^t;Yg~SBq+ds0WMU*{ z{)}r0s4&qauYQs!;xH>&O|v*aWr+zCLFI6QN_>q0xP3l0T~iX{&+l5cLdOL_|dEOnb#` z`+%PoeDVyrB_L1ACMe2IDS;xrJqGyFuo=^ktlkBzj>`Th+HCRd}}QBOjx`z7{s ze>e-70d=ow^{z3BI%X?U2ii;S%}Pg$+9a?9G5%UoBDOmF8!Cm}+VFh1^EP#4l-i5A z$8iaAJ{}r%ooi7`UaKult93s6!Mg+n55}u-8sKS}!)O=O9Qgoo_ zf-192{e)LgV*T-fetdt7~g) zmoL|wFz2MEez^cElaIKmdxgtipvrM^axM1A62%M#6YXgBL0Rq29hhqeAOpV#f0|LS zoZz%H%tkM0%~#}5->Kh-fUZ(s`_?kXN>I@A>YjD&rwB|Jw7G1ODBfCgys@UYTZa&XwO`nd@_l1(9^HK7KY+J5xJgAoBMkInmB_V)IR_WjiWl{14d z=uAe@%Guf$Y)fV=j+8`hZTX=#T^j{CJoi05PGgoobey=v$~f%&VQ^4usMsuY%=b9E zV^|ja0KcdGY!tb$K%x4qcqB{B#0-T<*{+(%5pzA*g~mjk{$A+~SC(t`^G`7dsQ{~z z^NHcaA3h&!j*P)`E4Zi3W@z6>1E&W;dquC2!z3WLrhj%ZBEIL1(UQ_isRXIC$W{QD z;(%ALx|JN?ez?7ESh0T;BjWjp0*iSM-#X_dDJbJua<0p}m#1l+7wJ-D9<$cQ02-!; z;Wy@WwP$iLcz2-j>~>am8I9AG_cancXXLuCONpz^3q;$k1?-2NQ)ZZO&1?Wv&y6tScfn>hvsh$XWtlu@udQLjvSd`&j)*EB^Yu9{oFO1?(s!-G!(mbO z-IojY=YtqrV>FYLX)ifauABX^`!;yO81Ia7+cuR|=FOjqv9NwYJmd*kuhMe}ws>qJ zBg1)7a7lO@GaFm8WHsVsgNm`sF3l3B1bdjiN{=h1?up`$bu&0?#AS{2(7^fg3x2INE*F?svd zL!`si#>U&|jjxgH&9%$;$WG>gHEuC(SJ!P%v!aB6qR;mH5(6I4Tbf(W7_B zWwxu!==y3FRu#TceOSWG3i5P_t?|Ku(dtf$hkoeX6|(@qhB3Rv4b8Tq&cL;c)4g_~ zR~18gI~krnG>$QhZ#?j9F`E>T*qC5wHHNzAcx^`hwuhBAscym%}; zbt0n|fSoZy`6&aZv5s$9-~yud{v{;hYBFu7MCa>IFnx$)K%S4=JQ_s7#k(2&M$9h0 z3V0KuyrY|ZQTW<@MY}dC*R9|a7Su-lM-l^*a&MR&*~VrhX6IgLI`}p!&|@1jbDM zc-`{5FwJ!A9eYw@Deo-NW8QSl9FBe+uwy4$0F|?*6ECkx`>iYA**0U`P#(*to?wgP zLA=Rde=i5FhcQoA+Spp)?TnpY^;r4(818t)3vXOnY=P{Ms7hOuZ@dpYD;eb=y;I?jA#88n}SQ*ML z@LOE%6GWWz`0MA`M6U#{B}a?&>_ zaZ!ZOM_E*q3M6Qv^C+tOFK8p2mkkIFj^a*q4B^F0o#k(?CiXGww7(LRD<}f`MI_4oAu)4C6uRB(5c8*ue z)KLjJIX_qsS^)(J%a$*K5a)-4@@GS;Ii`6AlKZRnP1}{@6TADaT^*tepT_N4e|QZN z)W^WIp4){#mBzT&Q3M<@aC;xjD-oeWJGz~)m~hUZ>%jg?jP_Kaxh7?mso8&3hx>Od?aqm!_s zu)GFwTj2+g_dM;f>-!gOO246SAhtERutbc%VRfJ=V6$hsN?tTc>?FB(PBvEp5~mua zo0YSwO|{zVgBMNWXQF8EE$j&)O^Ggjp7{>qnK{+C1pCxVb@t!$zojryGZE(zc$zv0h~XK{fsZ9bj_X-=Q}8tOQrhz z5A>wSIv*HJ{at62qTn$Pl*fL2d(<&?f(kdw zgB(eXp6=6}nt5T0jigO4^+^hi*J$QH?8Zdari@BKnl>31?J8}jgJnTaX1DzC0l2P- zRSaCDp+{UQJ9-I)w>>Z5G*eD;W+O@|I2&fnm}Z(bUPEXG)f2h;+GR`J(Aa=E@;J}N zED2@0+eT4T%!qFop4$hDQ+dA2>x7=i2DL#7a*U7)Y1|&w%GfN?M17D6psQfAvqag= zw+HT)Z)r72i2m}oiSJIdao66;VH9(DYdKW-HHzwax_GwxWCKaz^F?#v;jIv3UPA}S zedA-@f^)w-@}%1^zXV0GCXU{z@~XN!Cm30|Yu3%fecT5sGUCMU$_E(Zm%Jtn2Ih-Y zcDCe`I)kF>v*l%7Da6c@SYF(odc!n5LvItlEKa0BCsdUF#$g#X(=NQ)A!^e`2pz|f z)m22UAvb*~I-Z0K9Jp#;>JYS>?`h#7s#(HHQ}j%628#ZuByJjRCOS4>T$)I~|8jkC zq!W0Mo`gj{*R{NNf!)wRBK}c1Je2I>yhHq4zdU`R{!!uf-lxcZxmY^^VL!M|iM;Qw z;U|_(dlPY(gmQ@aB3!Sc$7I`*X_+^Xq~+j4UtUwu)4L%!?ymnD9H2z;r(A!0BuuX~ zWg`+SEN{Rh+Jb(HE3EFM|3!kgcExdg#KofVQTxuZ)u2#ifsCMp^?EPhWJpr)deuB{ z$IF2^L`s{>{EYl1x&NBOR_Cd=YJger=IX(LTVCF8tquiMlOWu?O26WJlDqml?-?QY za;gPhnN71#Hv4<3iH;w%L8YMwzkSYHk?+Nn^-__R7-7DOPmWp7+m5 zqQAfAJ`=ykqm`9uTBfN+L7(o1CJ{0G@u7Zo){t_fJ+nScS+XEwzwmUXja5iKWoFh3 zq>^^|4`*3*a{rALcM^O;;*YrM_3y;8Lr)f_r>8|?%(}V&i)G+lYtfNX8|RUn;dDuv zcN%$V%{KsEv7`IbfcoZ=eR)X>ClS@wS*lP8~SwwX9=N^80)2LLDn5pe{nttmhl@^6IZ()Q_CHAxxYF5}B?@ zU-9E5IeNmUZQ!l=-r!zOvF*8TcSY*(63(Z8SZT6fR{9G))r=yev->_7kVz5uNPSP+ z3N>iLi-vcr>%7XXfs6SUF%5BStd+`1+^p8p8=~D+xXF-+h&j@M8i^HkxOYD&jHuzL z-QR;UV$6>2)+r*lENmWbZ_VX!w~V^3Rd?hl%1ku|)F`1!EFx3Wj|&bWQ*G6-_AX3K zCjxO}&hQi)k){{10w;u4vx`zN$}W3sYv&n-1$X+M7J9~8^o_>a)Lu^U=f{m^uo2La zi8U|XMLcGCb_2tGpqpV0@m0Vv-B*jZeq533`5?yXA(eUBl__cDTc%9AXsabz?dtv%=U{=+}dj&KJ-T}Xwk;uUA zfoGW9Fuq!Jm&t2uUdR2hBF%w&Efq$pXWw7489rBdP7t-D4E3t>4)x#9kZ2i)XXfg^ z{cVrI7W%2k&)D3H65Q}xxj!DkrPJITY$B9z)SAI0I)KtGGy+a}G}Qw1YegW$(-RTw zMRiBwO@fyb>H2mP&uqz;uU}gx>ZQ@O`yME3Xb&s$`$SCE(YU2C%F>{D8KtGGaI?$Y zl|QajVAFLM?GhDt`vx~{lHkT|8cw?djegV$Y3iKI?(;5525(MpzhUXZ40SwVs(;jZ zTYU^kNTuu%-S^1w@XJTxzPE8VFQ)7pV|LejgEmX8XWvA@Yd3vOpQ!eV1UasMdnmaZ zJXBf2(pBcNvfQ6tVfaQVP%3e6+(QKYp2KTPC$I6EKS;f>b*0EE3-9lcPaini>y^-r zx$Cr5c^*30ljk!ZPfP_#hQTvD^sOrz8%5m0N${m~^GlUp(f2q-F}aVNJ}BctLqpNP zQw+gCUIa|Ui7pF@PxI<~EpE&V7fDb?gkpm%u4kAR{`A?C?@erqwh}>u#CFPW=vO{& zloagH3$^LXv`2%&3?Ae{)zUEiePmu%re)X*+vuMP|MJyeOY(Ls_smkDQrY%|v~h2= zZaqR-VtWu%2~~)QAR#!2OUBRjlC5$<3a-3@-*T^NOujk^T{dd|@RWMf^DR}^>OK-ePAqA00KI-nXYS50Vy^OKLQ3`evyeEkAD?3I^m2>Fu>boa~LzuMtG+i4Aa1d~9yr8CuKdzZ^;xXcP8 z1f9dE87~f(1XiOP_nRXkMI4tFgi@JiJ@Hxg>%KHwlW(wg^{-R>yHi~QSAPc0s85c% zwV4VhS)o&MZ7NqH{IVyt7*9^mxb0V^*kg_XT9yiXTG@oaSKJ({;tkcFKE8p4zoTtu zPQie$dw#dO806@qh3(UK3LFY5;P6WLb_@rH1GCS z@^0sT-kk646l04taE}fPTZ+V?piLHEAS(zDr=ej>e=)~Y9zC3Km0zOgo;K)(L?>)E z@kG1Uvt|dk>%3^lKlDIR(LPSVYmaD3H=^?b)0wjH&rqhzAZW`MG%rXRC^&O&Z>b6! zxLRp%su96wldu#Hi9N;G@0`bjpD69l&(beO&|dYABrHRlb1Uz!(F9(7zPpuDEr}J! zI;jY514`+}>aWVef%6@R@6X`XF-HCjuIUeB#D;9Mk5AvMS+|o@lnVt~=CM$+;q=?y z>2^VOzre@EJzh&yO?!4;q?g8(L^GywO;z-a+pOV|XDo6d4NE zP_yCquMokV^O>_lU+izZ6})mcrsoMNipMz7Leoi3UO`mBTHq9F`5*hH1U2BUY%P1TzFpM?f=O32buO>$W z@UtE9D^MMH9X}P+SKx!&(1Cp|pp?8Kty4L`$?)u9Czi8I&>_?LMIV!gDE1318E1rU z4R>->OMe*r-rsCyU1DQtOjlxORvMh;#KfHZ4N>+cguulFZT?|oma)<-3WA7VB6FWX zquWB2B_5^W`Mllc^HR_sPqg`3>9S*s3zyW|bhZJDW&}54M^A~7r>FJ%ujzhL=Y8k4 zX&2u%Flsfe#zxOu7t5;B?{0EP$pnD>(i8tt4_pm80dz6&u?x~Z;qFnNnUqv#5ip)N zQX{)guxEsL>AiPU{27QLy%$->UAuQfmT62=#M!RhNVV3}!`@Fx;Y!VYg>EL!a7~Rw z7-xj2iG4xGl1L4cN%L$KjWPMIhu3el3jyrT%Dd#&6rNpy` zA6*hkbg}b2#%=d9*876ZO61JU%L^O16Lc)c5-;IWdH9Vy)5aFOMo-R& zxGbe~t*3@6(dl6;_^&738M1&^7+8Y}v?lCSBBKd$UqbLlX=MQ)NdHN$lw27iUPP z;zi?GzBtqfs6HWfn0B<1h3KYW$DrjdEmvCjh>DlYupyG8&h6Yyvqim-7AE9(I6{T; zr}=a_Sv6*O?;L^DO^qT!l=PdFu8aN1H0d{| z0|k0|R0Fo%nZoQlB$|SmmNmTe^j=OyeG!p7V%e1h<59;D!z(vfJ3ZO2UTw8)x`fFD zmARhD_04nXgF={*YmF%o4Ub(yBC|f_ocSU+rubP;HCoye-ZeF~sB7vk3gwGdmQM0c z0sh7yp$6Lp(4tEKXXx=Ter9_z6SDzbnm=* z(>aHu9tJKFYbeioiXn^umy4%og8SCwK7+^nU^g@jk#gks;pRVi&Y$WbUFu<0@Ta#1 zM|WRXjaAmN3J55PI4!W!9U<4~F;mch8?b@AML0P1d0J?ArP6-#f3EZ&S)d$WR>S)UXPU7@XCl8ZF#iUt>)W+Y+Z7F4@ivyby%`6 zuFtv{V0OQMDMHsK1mw6HB{n`tyS02lqDpZdR+gjXkL>L1{jXd0q=DLTgx@{=@j_SQ z-V~>nIVMMB2MNmrH%x)MM#VE$6uEP^zKKbRX8@8nj^eJS+;wEnHt1|(@?x!Y%7OOq z+saDE?mUQ>r|gACd&l72Jb23*cJc69*~A>YJNe^(hRTmdU;C8 zo|O3sIYWs0=EG0CgFoKa?YPzw(h^=|y~1dfk6y>WlbSvWw&*wUfKtqbTG^dT>Z}tC z*6NSs0`I3vnVZKr>=S&7pFo~~L(A;!T`zXV_H>p$cSYLgB^zHg6GdOT$G};Z?q0^u zzA3dDiIJ?I>jVXlXkZt4Vxg<=Y@c@_%?Ua1S_k57zWt<|W4WKv2=f;~M<5kq3_v~5 zk&U{q<=KsIV_L&C_4F5+Ar!IG#X4lCm?Z8>Y@xjFBTY+sfG>W}%ROz9MP~4h1f;1& z`VdqF;F6iO*ZxF3NhG2;7{x=`++(bO4w8(sI7%UqF0#Qt$K4>e&<5RkC%dq*EfeQf zSRtTFa9V5+eytF3v!G3z-Xy^>;^0sNayySUQ#kZ(wvx%YSA^M2?z+dNSn9>uTj@33ar(;;ere!q z=KvPPSTL?_gXRLRkGE0BC3^fsGOH|dn($<`X|-$cgwdQyVmP#)?u>`$4!Paz$C@^- z&%dL@t>@kb1x13!vmHAid%-t=-7S*0Aokiwvj`Ymox95P`?5d1_N@juwsyN=Qhrn# z8XBg}-_bYT81C=M)mFno*Lm!EykR|6&WYFhUKkWA>EgSN85;Kur5NqK++J<^{m=kEVWL17LfkvDwyZ7Dn=!q-j^OHV}g8-w`8 z;2N#(A)q;s_$J0D&uG|x1gRz@(aDqVp40l_X!;#BO_^Ys=Rb`@2cSG0ROP0BS^ZxM z8}{}rP(_c*Lf_v3@Bo?z`HALE@J~;m*PKHUm(LXZLK?~}ytaEvO8kKNrbK;ih0F5N zkrJtrUwfQb2_#@AEVC{iY)j@JM7FlC>~``P~ zsL)4i7HzMucuQR)<+B;;Sdi(cL>F>;>0WbqC&7GXg$QrZi;KHDFMeT#ww>h4nIo{U zMy_u<&X(iMqRy75rN|$@J6NtWH(-;Wl_@mTF87}9Ecj4C?LZ7>E3-t3PohbQQKE3b z`tij1w+FB5fVLd~X8@9tQLZo3HGQZNFG+i=kUMGkaiDk9GLHw`tOs`~WqT`6DrttO z5@B4V!T62LP9XO~yel!mJip~2(jYZ<^3)}yF zs=~vE56ktA@7`mcCSFhE{|bGd+7+dvf}HNfitw9#;I7$#T}DDgUP%d&|p6yM1M^szoW%XTt0-#)MY%wSBBOsNt4w2WHu!521bM~-^GWFmGT z{_@$7X+v6Z5<=2h@T2yP-ch1Sfav)q>HHoO)wa@6we+DgL&?$jle+Oy#umYu`Zqax z5_)83XUQW)?@LliF+U^Wn<(qmlFrS%1@U{2^IM^{|6;&N>noK+A|F6i0QOXsFToCj|V!HGv1Oawo2T37=y7AIWWESu(1a16__IUM9$}(y_)cSwG^?G~99)U>+o)Y*O`TA!w zOG*p8j7htUS@wT2>c8J^4b;`>t-)U@jYIs#Us{=24_NlsZSl#8|HEgLAGn#HpZvR@ za0K>(3Mjgp&VHW%!{<(11w~vGK8lAZ%Rjpw4F?XzPiOrvbNbhF4;cA>C+`p=``^hs zY)}6GP4d1}Srz>#a>|*I8J%# z3T2Q7A1nTBv(l|(?hloH2N;R1Gg@VKu=6yQyg_{C3D+r9=@e?F&&xe{xcVsO#y=?u zfLn9lM$?9bR>>K7ZM4rN&}pExQ@C@jY2)t$RYVJHQsb%Buj~V%-!{crf${A5SzPp@ zwqX5`^V`7=x~ueLeALyhsn(jj!gISwni#*6C8vL2?n#I$uc9Ir`k%jBABazTnleKj zHgo2mRIrp4iGvXeu=85zDmEv8ES!-I1*7y4*n@M|1`{j{LX?nMDO(TzQRBjBx28jb zh)@q0O8`MY;Di|s@Ya-{D0eHZB|9qqsvA0NmZe@0)7*$-iu0XIq6ReW-b$gs%ZVq_ zNP-*K$vY6e=>LZ|N->g7PlvUaYMf9!Q7s3`OW4y7lyK6+gF>8&;15y6C;#b7!ReF+ zz)>ykavY+7{%pmVw}3r*X`}T1AI9*<_w?xijh}Zi>mM!FuRp*h$N}^oA~c&8af8@T%e0u`F-azlGe?uY8bRkQftlm4}BX5P;p+LSIy(njo{@Y5QIS)H}Oeus{Nm4pc=zAtd zNd~O8xL9UJxy*VD-KQoSgW26#vy9%UH;NWmoagA5{`h}X4R)F31?U0R;)oo(Mwh!L z+avXHZ)c_H*+oguxE&#AsbqFqpgGwm83=#sO(5TxEM|30`|_wIti4aQ$O% zbjnKako}(s@tGH}Mn$@|_vZ{k$UCZTO<0-H4@Z>BktG~#0PWu2A*{nU{h9MeDayrh z3*_ns?>m{|d$Pnz6kmGmb>-?X663RH?p^=4P0MUaeCg8K1F7`6+*48+t9_+%^#{Hu zgNe?aLuBed+Uez$q-Z$0FAE(Cz>BF)SrWB)idK6+LYo0xB+>3)9+I z1+L17I18&3x-nj9kIm4GRX{Efuua;lQwu}=5|QV!Z`QHzsFx+P67kb$m08DI4u$A0 zZU4Cx=Ajj3u6~(@HT|x#EI(u)Rn`dUYG-s^xc5ft(Nn~ET55dtU{tx5Tz2A>lkZw2 zf(V@UZRz|@Ny_t(NVoZA}SDT{Qd0N zv^(aZWNr3m{_J`G@rAHTn)0PedW*i-${9+*Rm=FYPWvW*S1o|boLSSlA3aP92xw3C z?_Gc)hUrz09blIC8(g-(zwhMaiVTZm^>D*Q0HRC-&<9Jy%$1B;MGY2LqF~&)q#vG; zU!S-BO(~?O9FH2zDqa=2_Mryve4UGnUpHT~_G9sUPey~m`K8v-052|{wl9rMau}bW zyia2P!L9#gyqO9Idn~)V(tTfWkyEreG9rRwH;_l-#0Z1~KvzFhSGxm}I?>dpK6tx@ zg54ZP*GhxLO|#)*c#h+x*Eu)q=sbQd!%^Bn{bJpNq$b8+MB$SAEW*1?_895ot17D1 znuW-n%NCLQI@;ZzL=hP;G8P6y7@c<0KOZXJVeBs&u_j^8nGHK-D7VjI3Bld+5>Y!J z#n$VzTmj)EdKV}00RX~h3f`TAz&{19o*pPxM)%7i)V!q4OHp)Bcm=LS)SqYiH1UIIKH2;Lvlxk>=LdlRhBa!vUOmy z=bD?pg|P$*We?LL%r_E2nb-1+E3KYOfuT4;BTuIVAhQL7+T#V49ip@6t}cdd2Asce zG=1sRL0PH_=(3isi1usCztuL&_cB%xLnobN(hACn7%>4C*FN}PkWf8)?i}Ox0ys^> zrjK9m$&AMNXm|-~a&O=F!-eIc%eZjdVBqzMibz*|w*dM*J?0Y*61g3OyT6LP{t&r8 zmNhd#Dp7)(TTRVi%t8ZRO3AovXx_2cv=Nz+;NEbf$a)OY15i^{?&^ZYHi?#lD*fNX za(sc>F@N&t{86uyslO{S9*u1vQLLH}Xxa`Rw3Nl`tDkXC4WT|z%tAIO4^n%F%xOj; zcEBptu&E;?CdQmGzuyADZno{wi6ODEoLsu;3YZT4)GR81yQZe5Zi%TKxPV&NQrKJ= zVu{YX?O(e!LWd$UGZ@NZpyYua)DlkANuxVaCNbi@Nm{lm)^ZOXtPb^#RjuAf7ITgl z>4wmy%gf8Fy$irK8KysWS{l4kMNaPj$d@N1Zdhwh&~&Ex& z`chAe%X&DWZ9*I3N7y7T-w6p(V;j1_mF2M+zIlGJFZ5x`t1m`HZ|Rm=Q5V>aJ{6Z9n2=}hTuZI|x5TQ864vsRIn`XR0&m4oqSifj@R^(NW} z2+KdF{y#r8C{KI<+4wn!rHJ-TSA;~9m9=|q%X6aE2=YS%oEvehDC$>>nJiG|RIbD;_?TJd|OjV`wd4W=8x$x8MEMQ%m=_1(fEqfwWy4Q{t;RXKxh| zR&c(@JN9&hmF4|#E|+ctmI7+?P#P=7CcbOs&#?@=+#R>9hYRDU^C6ShirAnf!X9%+ zHb?=jFmeqb8K@%DWB! zZh!4(By@{JmWO$rui~X5Z|a-(VYp)3h&;^9vd&_r$k8n=`P?a(>Ddg=er5he-3?to z1g{?Xt^24F`>&+0;-r?PYu;!qk3eYNei9*U)2g96yii!)<_u#{d3wVnjz@JoaXrCN zJ{;n;^|Uh1Ytj_}dvxTSHf1UR$2xig332SJfa0txzPBn&e?585{rGnA4}frNCXpHA zeZDgx6dB(|do6s|iCrNe`1w*+^9AFwz0TJi%!#_Nvn2QN zvz(MWOdC&febD~w_+#@zB9(eXNJ!iB?cp-GrIeI0T096@Y?H9@X_*Uzq()vb5YIh) z(gCk^DT(jC{kbF7ICsh!6Yo4optd?dr~n*Vaq^Ww!&m{UkddYGyiuum=%j6@7#C>T zSWsjDU8Lr%Gd0<%dAf^UPLRZJ>auZniZV|c_%3!)b!A=SCw-oVZ*0EJNxeMAH6OA6 zd@&{k*E7O!ZSnLR3zv?wKzgc!B6}9>8l$#Gf9m24h^=Z5F_&Z&3*S;0rjx9+N^6K0 zHHzd>f3Bzse372=*A@~h4KJ;Xd!K8h6vov?4Al!VG9tfSh(?Z=?=@nr?#a$+o2~hV zH>Nv(=n%<=i?nA{mI%m5r3=^w#Pmre2*+nUx>`SL-{v%ys+b{tPnNGznY_|$jA^Mq zdc-1tIp?@8|JTMwzj3BTh+FK<8BlJpJJk4A<}k-j%7r{NJ^l3p$tye5U79>UMnIT& z|C-40;moL$>~YtE#PCsib}MYjpG(Jj&nZhhTlU=8hy*akQ}iO=F4PNZK5yTW zTdGC5DvIP`UXs`|qK8f@9;GO?(%PsR6Q(e#P+lpwMiZ4WYlb*1coy@SN0=$~X7xUB znjJu%+S8^sf@@n^Y2YiL_TR1YKi;n6ykM5!7HO2*o-tx(4f~{Mk#ni5lyeuF@|p*2 ztan#jR1~_ww8=jdjka9)Hez|=j$uGjf`M1B#v)sHRC@7!8FrQl*Yc@v4MY|V4`oX$ zQjjTXrrEkaeOG(3tu%SHW(`zFX1f+ioQ8JBjy5mI(7`ff;zFpMHkpQoXp-^1=+Jv3~i5-jB}WU`Z;d86qi3-D*^ZPTfV z8HhlhAAnb!6nn82ilX9ALDo6da~D5T0V-hUsoz3D>U-186)eaBJX;KqRy*l(YUYOp z&;B0DrPre7V0+Ud+c$@e6x3G|qv-TY` zj3eBaON^c*&lCZ@W?g|3b*xxCHB&G*Wg1z??GpUW$|T-F5NKDF6EF5fyn(ir?Uk z%^!x}O5OU>Wg@DBJwIZ^xN5k6G* zlod_c%LVDogDaZVF8KAc*eH~&Ntd{!%h)XsdOa=Ks|BT{pDLMbcrU@6+KPguJ7;Ih zcJ3z?ty|rhWQ*8^U4vqIc>lsS{rio{BYyxWF%8+v5t=Y%!*)1S?kj z=+alYwFdq=EDgY^0Md=dXcOjl`t@7EMKw>PN4-kSJi0Hek5{?R4C1Q0v-AYyn(a7c zoTtt#FK*f`)_F)Gc0PDVAKYRSwV0Ao>s-K*P7eX311<@3DI)o6pL1(QM$Afx`$APM zpp1%wwcD*4#bS18XMGo4-HZ$&a`){yXCzDa0~gutH%odJJ+J*_4c=-)9n!ZsZ5USP zG1|IWyac}sj81d6B^Zx74(pZno?V*B$&ChmF@)8=J$SFCqFHvz3t9j}=^|>nwYBak zSU)p0={?$lOK?ukC!8W)ed6}jEVBQW)xBZ~Ek`>-LO^+K;~L@8@CC}9R{2oGz9IG9 z>-xyI0WuqJ)2Q;17A0W=B^e;$OG6Nur~_NMm*m#YGJYYu<;I$=hsy|;z``MR3b+2` zm-&}D@|THt7`~S(U%?-M2|0>g)|D+ulHB}MZMBgemmmptu>mdgQ%j~Dk+;Y197sZ) zH#A8CTyp^SIur8R2ToacmxxAR!xrIz%8Mop@jGB*uk*22U)#*ViNe%LrZZRV-+_!b zLV%LIW^TdVWHhN*stryBkFtw*!QOkHx(3v=e6U0LJMPSuAG{E*gsC(0>rU@U2@oty zX}bMJ_&5s-SX+H=sIAu5JdG@I+(`G%3!Nwh#G$Pq&#wmTvw|X?pN-mE}TekJ8m zyub5!D*BD5^_nw|_(q zK?N#{@KK{O$Mjl!s&_jb{7>ODO8b*JMme2MjSCtztJ9f$$T!-&@ID4KmXAcA^94(J=)9GpxW}DnUqyw&lA>@_M>ky;ra!TUQ3Nd>avtnzWMo~oKwCB z$R4uVoDLvR*4u-<@g`|>3)!8e!8GC@cjfjXNAvjSQl6TXRB0h)8*+lO^pqr5R#)&q z{Ktp0;La^AieYqhTJ2HZtyHS?2u0#4KuH{q#6PK4JfBUlQP=?-nG#?G0yXnVX_-e@k`Y3b#{M?z~*=E0neWCsXO$U zUUS(ep{(j_zxmi-6=BWL|CBcg>ctq8?bwT`rHCew6tP?<0Az!!Te|_a z6}gM)C5HdAY=N^YA6c8np%LQ~E_M0h#a95AgKTRV1Uptl0%z(qXRAqp#FJ&RV+hC-Aw$EX`5_vlQ~oklQEb{4?lmpO-*}K2Q^=))}%r%2!3&*#(F< z6r7T?Q8SKrbRg$WSv-1EWj?A@s`KaRop-S)syAaQJAXWe?rDhR>To%DVV2@c!UzHc z2Bj+ibJ#&Sz}de5nDO_yv7snEH z(6as3Av>N^fQkATF7tWknrpplF;J{(-HnwvBXj?inA?&~!6gL*g&@7)VWFbW(-9XY zUZyUHhIyM7hkl7&*mz?>m7v7sgF$2OB+ctw>ToaW4jPaaV4B(JpyQ#*NOG6v_)x_%^%kxL{F_fsRDh{)icfREyDMm0E zJ7=5>rxonzVvXB=KgA9e5f|6v-`zk=yXlz(9Nf(1L5W;nBg#ec@uopT=kpyeFaW#Old*{Tz@0oVbi zfJ$%ovm|T*s?>SmD32`tb}b7xx9Cg=`&XGLWUa2Yob@v?eF~gh=U~~gIg|Wayp!t^ z-k%aGzujy^o=$YIB?J5tG97JgZLzMC$yc97$qLVHlUEiGJuxWM> zEKU6Dd#LgV=S)dgEgV__R(p~-JZ?}noo5D=kdt+Tz=ioJP~byaerjJH#$Qzh>TOTVbz#z~GDZPe(a5hQI;z!*YR7#UgyE~GQdo}^A{kQ-soAk5 z@1W;W_bLj`Nu&Ud#Y2Lu1vlI&H_2;a&)i5=Fj+VpW!qUEUbjNTr7C?*57_rd6zs+J z@e^J5EqYhWi*0(@OF-!5ik2~jKr5}fp=@)QvCaOM5~nlMwhTp*FN7x8+>Q`iv(5}cQd>ZwW1JTP5GrLcL zOGnm|pgp-Zl*l-USy^OV(RLF*Gm*m$SrmPbLq{klV zr9=5I2($ncLl*$(2 z&Z*vMcF9h@<{33uHVrFs{@xcg>${CWrJ`=E`4!xNqP->HrUDrRBTGlM`jan0-7)=E zR&5z7t5b9M-TiYucR_0F6}_1pz4vW#-T-$~mgW}e;WY`twvG}hiF(+L-JRt*0aSb% zA=eKc8V+5*zA`a^^p=%{M2Io1^kdDypDPwkP+Dz{J$vxvQ)WC6%u-mX(_8}Em220G zxqKJZ&rmzNf%1oq;tyPiV`r}F1O098K0l@~GByR2fi6fiE1;C%7AJPTpY?c&_Q3rFq+gA|)llZA@th&b(ad*O zHN_f9CHY_N%8_x4-jz?_pu~J(d;>>@BurDsF+^ka@?bug8`bMKUp9C6li{Jtu})vu zso@#4W%X?vg(4k6yN~^%SVQCPsjaM@EIXyNa4o-sDw8KpsFEN)3;Wu#5}j!2;zuXF zmR~k~ok1;#G-o)2xo-!YdLfo!CdlX~UJ76|0*-f8kN|#|hYRJ-J=q2t>X31I3^}bf zHTAr4txq&S)YBT)c$Wawab7ma0k7m;?GU^yB%wwsvwv-o*KKCt$aj=MI8l0Z5gMDj z^I(vY4fPHevzZ|(p!pOvxKao?Fx~r_iTATrarN zX*A;!&d2e-{nOO*H+>3d(T0Y`^R4LR&<*P+YR7v;eiT&wTVP3<@xZYb*~;g32d7KF zr@-7hF+8KyWUeIUIKu`r-fuuzO$-57ekl7Td%qlFj3g;5+NX2A>(o5##SXwe`swgZ zl@w7c7q(LXlJehP{8y$~^1uz@uFO&Fef&pjuGim#_6Z=pk_p)eQkWcssfq_>q4oCQ zmb@J~dk8uCQ;5W$?rFd)0ErI;SwL0q4lMRcl1uaMaF~>hERjgQqHSBhL1DI()99%k7@A)N!O$aq$mtl;`4iRKo{N-v7FR z|F)@{ARwiKP{{hu?fQ?$|8u-OAmuv=D7|Com|@OlKf`Ao&!(&OS*P#M@2|E5nB}x?9{^^h+`s=Gd&Lz7 z;$Y2nASq{77Ws>j1;1m>^ZLne{H_LI?qcLtfrEYi4o{VTwgkBUeN9FZqu@h3uJ@Q1 z)$h%Y(*EXWN%cV8|62&&{~T5n0lk>fcY7y0#OKc=c=^;1G#rD}zoFr>w;$g5$pYS{ zKN|skbbDPeK`t6y4Kc!VR_D{|D;Uq?fnD(kR{c44Xo+k}u+R_?;?Z5FvsOOgLQ&6FNs zusgwy;WTvpk1tU^1lS$2dxkFj7@XFi`XUDCk?y*+=GBS z42QQa3ui0*>gzdqcxD&&JdFK?t;BQJ0T?V@wM^|-U%zkvemLjj#S^r?=!ZyL7Ur4F zDSfy8tFQNX{5Yjx-z)Pczgouh1N-68Z2eq6)|JL@1CAX(z6#w_OuG&7-(T{s6d3Hc z5dCTy|1T9H<(XrZ&uQ;H50-AlB9Sw^qfT;KB%ZaPPEv{8P;$ygUKXzyuvNV^i?hBR z!hS`{lf%tEWKlW;&WQ0{U!4p8#3*gSU1Hnu;s68NO%)Yu)#3ib0VXHj6_8E>yhmY0 zXcDs;=Ti4+X%{~OraBjcalL+C2^pG)uThje*|&-x5La3#VrO5NVddLG%zw&n_@buoc-1_XLzNEEZ9SuKfl|mc9*V2fl@2&ikZ>Gym&RRz@G- zLL$IuT$i#leQsy1`^Qi$mOnz5W$-3l|7Ie~s&i^$iw&*bn1g-?fE_L)dun$|fdkg6 zV7Bc^MaJQ(Gl%sPl_htkfiXQ}*_mjGKY`7QnnxzMsO&4dRH(So@#Fx5BTLJ>BTnZ7 z9Jkw#Y)N&uPZiVBbxs&M#vFDje8=37tuXMUwu;K6tpXd8_pb9_vo*k(JId2~@^39E zc@9vLFCCY6pdr#Uv+-R~DD>^C&MMH;JhmoC&s&RY!@zUfW}$BwMk z_Kj;vKA);qd4BYzHWZqkzd81T4k>MgOW>z@jsiUCLpaFYgLa85si4>Z2Mxv?!^S7+ zh)@nLB>@ZL^$iciJsE5K?&Ta)*-A$P6(LiyqmWRNv~vg3L}-Pj2TKZoZ z##NM=XBvj1e2GyqQ(unEeJj}R%vy@n@K17)7OjEAZZzm1wt_u2@BoGJS;e_R5466_ z)T<*)>7xGEo*&~h9A;%?WE8V*L`|S){N;d3J2Eig*|A#F5d@yQbv;@tM%0oEpfL>c zMIdy5j#sKh6G+x{6|n~&o^1lR+gqG7YuNVeC$DjcXt}|Kh{yANy^0)7Tq`ZJy_q^D z8%WsLOPmW@vZL+`rO0Eh2Y9)C4pWOa(8bVAe)ZearKj9UsCDX8q!LT!sr(W}Q~{hS z=n63THrT|PbNLd*^1bGxNC;_v3JxD5S#r+0dF)Wj$o5K8*ky1@scRDf*2HG&q&wn7 zjQ>0yZ%H;e>p%GRgP*79fHA-&C}dpBm!X5;>rkBGC~|9Py{Cr}ym(*L@_?A6>c1b+ zzxDGS`o+4Bm&C>uEM1sK`mxZk2XN1zPWYGML7V0Po7hD|yj;X&`>rf(o(+yZ7RGsS zk;QWJ%UTh(IRo*zV4*Kdl{!5WTaJb~z@Y>ADvC+vMFh6VG>vheAssi1tWYSGIFN3q zp9&fBzbJiVbpaWkj2QuoGe)Oe&^V=UvCn~hi3R{rwy1pn9J-tyk z=(?sBO;;)99v%9a0^fQ|?qk#C0fwcX1w>e$4}{xvZ5ZxZCZ^a;L)U`)lpy3U-gUR^D zd`>|lz$llhQ-=|=#fo(UO=_$*-FLiCH9lYw_@p69Rq*vO@ds#hgTA_q`hMz=BbXf> zj^I@PvR<-VBYRBVzOK|I>s4-Y;^maCUOKWktBG7%nIOMe0)x4{av5~lysiZ;A zKB=YTjS!J_xg*5<9df~B{k~l>c7~k(1Vk}>R{yXl>{9My%?@=%b-XCr!}09hS>NIh z&Q0BNF-Q#feWt#&@qx|75}jC1#C(fDTe&Ij-KWzLo}Jr0AnRda>86E(y6#~clSTBZ zfL`xoQR9XUfXlmS(OBFnTwx=~xnd~azT#_)X`^J_CiKCW(#V9L+^_)8EEt6yNv5}pq%B9Ykj3$jh+^&zV ze97$V9^0dHE6JOyn>qA2^;OU*-=nCLFhy^5A3x{=gYAiMI|&?ieOeKhG?@A!`D3^c z!&b7Hmn9!!sB_7eDsyYyw6=9ll!0`a!#c~ts(aOV7bA(xDHWm1M0xroEd|D`AU5Cf z2IW)@^xayXZR|u48Ro_haaAkrbj~Oy9<*x7E?C%I$e0CqC&f0^4XN-j#xp2_qZVEAS|1<$QSZkexc;T2XgINipo zCmK#6)zx{JEK!2m3WV~#G^Jc>2{D`FOrk<^nDWp&q35c!iycWHxGhei6v=PP%%yx* z7nxfw*Z0hPK-BU0FRh?rrL0FRd>dFn+jg$L3waeHeWWuGVbd5ckbT`_HCQ?RE(1*) z_uaet^?i+}hBXWqRfQm4C8~SMi(PF-cDHqcn<95Z*bD7UTyn0|72Xi>uCSaF&+Y6; zpE!ts5&JGY-@xU0`7aaezcS|FAH>M>dngx-bByG(NQc&s`I1ciK;|nss7Ko`+hNhp z+={OE#13nC0_~~JcgRex07_)hwA9|!kGh~ma`q$nwt-47KnGTmxu$NW~>Cv2B5wgI&>(Z}-E&_zXJc`L0s8a-|o z=+T6M(gadZn}~B+OR$KGi7uMDt|d5Vk~iWsGcuFNr7maRW!N5OXj^j6`u2_Co|)|x z#OXEUf<|JVf89MVzWL(M^tIg7Z))9nrew--rme1>A_^Cpb(dJV5@v`ia{qWh7P6Vu zLEFg)2t9Wtw^PKSBzVkEQP_?_BUUsFY%7uXs+G%BuL=^LXJv#!_Ixa{Op1v~qz~NK z2;K4P1&}nG+qlVeY;x=3%R%=A#GMN6 zhuZeLP)AE;#w%HZDPc;)x0Q1li9M63TREiA#|6Amm!5zEI61snem# z`@#DUWC#07*aIlmxngv<=1wp{J(54Vb{{bx-3AK1AOLhDD=ZB97A)MM(VBQ8?s_{2qy|?)VeopG#s?!Ja=^i{73(IOQ>BQ2zKy>Gf-T7 ztrn$Lc>^~#`jY zOnrJ)rq(ZBr^@e zr3>SR%kdI(?-!%Z=QgAy)yi)`KLmvDMaGWZY^D8>&_90)Y-U z?htVJO9MopMP&%i-R~hzd#t^}O|>_>?;b_{V!z3r__I1&+ZEmjK?7k5v8PO92^o-% zWH^;?F=RB{;V){5sX6U`f#Nyr9UFJe`quiRCF)Zae$9)-g^_fP#R~jRpqc@Zqh-FK zXJ#0TFt}{Eh>ox}>MlHw*(wulxS)io8LZnu;!d*kwecxGC16tPbC!ezK*_8vO!qJSUrwD8%Ld8d`IfrN;_M!v6$VsOyP4b zy=(vZY)%1$s)&+UdU1Q_*vM{5@hiysqVZDJqzTiT60E(2MOpS-5vo1>f*^5VEi!jj z5H1UK^tM>O*hA8(O4-~Pg&;(ak-MHroYw3>dk<@gSx#!?=+3YE;$@oywv198u-Pf}8iA5*MV92dac+#?C%T^Qp71m(z%I1)>{>}OzOwbj_ zXEgocXcn)WIZfNNpZ7=s=scF-?4xwAH6~AI2yNpi3@x zc-@Uz>pUg?k}{S^7zO!gB0**H6ocT2a2ZD{aAtWmU7@}!c^P)zILnCs@}ZN45+cSm zl^&Igaul!6b=x-DLq+9MF(=2YVPj;y30NIEFVB&~&&fJuyv@%49I=XmnULf7;~1w& zp^|GJHF2kTRc4xnJNW(6iXax9xU+w7dU~;+VB(Y&%15t1m+`yTi$04TPN84m#&iFUTvcR@|wjBpRrhTFWqoKj&?icCS>7;H`0*kpERMfd~TrL?I)5ZWT z+VDiZ@ZI&XArl!#Oky$4_j@JbCY>LA=f_OSg}4NtLC^%LjMKxRXTn*=%Xoy8fY+WMp*4QzOf92nO&KCk9v;f3@WhG|autT~O6K+r3o(lu0T_*DKtzldGXi z@Y*XQKErk;4GZJ5mP$Euu(C(vA3pG^%s`uY1qCIa_r#5PSv#0LdhhKzV2r;=l$~6$ z6grriKIse>H7dMY49i=Dl@t8gXC9q|fKouwi0Yo^gGe9M);_Fs(S_ zeyNxB?!lRv*iqLnI}X=O2cgaHi%3$DpxsdUbH4BTmpOHI)v6Jc0PYuiYoBN0IPMMl z_6cCgrA1GfEN7}OyRZjTZ}q#r@5_o!=iF?$5SuwZP>M4=>K0IT} z{Oa_;!d>z6SbVBpO_CG8=Y*lR5akaUlik9c?=Kj#nvRK=VdbSsuo;+QY$*0Jduc_|%Q$N!Gg_9*b2? z$xC1MZKweh-;-aS3cMwOH7@(DESB`F<1W|iY;m~uYDqT&P3w$vJ+~ZC;W-6WvW7J! zsf(onfv^SSnrIHUxUuJ#(NMumi=sxc5qPJ9Yp<|$R>!|eAT%UDNfVjEbQJ-^jnTN z-RJ&r@ydPFgk<^X1owF|c&Caw7i=}&W0y#Bfl^umHyXf@U4<!wjV10uxtn zDv_uHI>aS>?4+kh{=$j0gCQDB?cU_%(vg}gE9o3oAACE6h+_}x4>5G#H-8X{pEvMt zU&vorAKv1hH3hXuV$qAmD8R%T>bC+b3QmN{kzNEN{B-?x-0e>0I9CwXUvEq9CLwk< zCR+qhP8i=%oL&dTLNm$Bi!VHUakvcZM-H}Q!|%o@oI4U9c5RDg$lT_uFGKhqXTeRi%W@p{^h%vWs}oLZL~*@3>wIxkh2kVb5MF^xk; zUYQfFO`IGc3v%!6X`Xmg?K(SXTd@BU+|s+h@AeZ@R4cughne1z^wIOSi#h{ez1}pM-TJYSqBX;p1X%^)5x(MRQ4P z0une~OT5CC=jhc@O1(Sw?bqqRc+ruwp`?SpYk`j+t$u+g8aPxFvkkWr^es@B==}c0 z^HNxz!tCQcQHnY23B+jf%1kE7R>KfX-3XPA%5G?Fd|o;-IOw76Fw^&;wTeMRUUbYW z%_c=02#suh&WfB&u(UAG-XX&DloB{Ex?swjjLm!jlS)O6yH+;IrFm6Jq4N0+6&1ZY z?A;66vgBtIt(b?Ohjb=Zo0PP}KMHWN16@?T*6)s2#1nkW%?zFMI6`w0wy>|_gQo>- z!^@`Y{tQTl3YvB}SdZ^AS<1RYy-eqI2HZPX+$X*qIrQmcxco^7hHRU4XN$Vr#xNmh zNxiq-geb)+bwr%Sq58m039vZ`m|SzAOoJZPON5Nt;MLp z45Rd{X@&#_W#|foD876M#r@0YzX7$04U0o1D?Ovd{`@)Yk#+J2ss7Uu%F+cZ$xY<- z*$(-R1Vxsg!xj2!fIQPQWbYoDWBv7<_sQd{UtI@Q<7wKJe?FKp4L}Se5e$2NKGN~# zGywe>UXs!Ii=iX|hbxC{y65L3JyzxdhVo95gwxIp{FPzz20{kyc)C3_t+&4d3CkA& z!0Ke~9ME0xXA@Fx2OuGiJ8XMsh93TU?v)xa6qlVAil!3$>p@2gAW^qcv1iXuN7@r0 zPk(r8Sq8kNNlO9zHPqde@43ag1ABgvH=NQ{?uu>xhoM>h=5_sF44VL` z@cwP#JwG2+nI{|=_O}@QWQG187b9AtBbBWTH>cimww&x2{GPIh*CJ92#^l~NOR0U& z_LJ+Hr@b=gSZJNAMEw*<__@T@Es#kk5JO;UGPD`2GpQHx>E&Pqgl$<6lZY~%Iyc!X2sV&JzxlT4V zz4@c6bD&>CfsVj@+aKHl$sbmP=Fp=YIihH%^;1sJVLxx(kgwR(iMGGPVIWdSs8^Vb zv__O(PfL;~Z^ntaPai}s0Qb>=NMRGuNzMR7^&_yl8+{D=Dd40_j86TLwT)$y(G2-= zi$d|6bCy?$e2)u@MLS`SF^~?r7D$eX3g**j5T>0>?3JdunG6Bm1?^#KGkA9Q*v~;& zd;<4(Kt{0t+@}oYcLh1&7=`Db-tD#K^%lTy$>*2kap|CTZjU^gToVINhZq&!zb+vm zQEo6QMzeSJ`7_Z2;!+dpbWB>_6??@74J&JG8ID q-zV$0-l3s1erx6b`JEN8%am5hbhq8nt9%dePhC~}X8w))PyQF)h^y`Z literal 0 HcmV?d00001 diff --git a/docs/guide/letsencrypt_authority_2.png b/docs/guide/letsencrypt_authority_2.png new file mode 100644 index 0000000000000000000000000000000000000000..04947ca2e015142942387efa6aebe650561bb670 GIT binary patch literal 223344 zcma&OcTkgGw>E4;R0Ko?q$@=_C{+kWkRsB12bJEX1f&E+q&F$jL+@33O^{wf?=_Jc zS`vB)p}cu!&Uw%K&79}?eSa{MVfO6od)=#C>sr@NxQ3b{=>ysaH*VY@ReCR{b>qg} zbHZQDy}N|}B#7@M+_-Uv#_rua4TpD%?_3;R+_b-ZvanLLa<+1_v(QqMy>a7BbhNIi zExqnjsaAIz7S<7MN1jM(*cy;Bt2L7tE)tET<0J;3KE5 zqS4eWsUMh6CU|aPHsp$Cw{mHGmcqroRRAOFd;AvHb6c{;^@ktkQBmZecE?;) zhEf1ud>)xG|C*^&;nt|~Lu%0}CS|JK^FXcaYq{D;w{KO3cgr-RqSY68n|F3`nmr@b zep!COB4vRAP}17_L>wuvLLm=S4Q^9da=g8ZpqjHS2_m~I@U-G7O@w)lH8mabkHn#P zUFvWLv@`Llzu!Cv*ECYcWf>~b((0fr*u>(Av_FsNE$Npu&mYeRjhYE4 zoyNHe++_BSkWf8Mps#sii8-J9;3#h-qpLtgGN9drnlk%Fz_05h3^ZKyLs=a{SmDZ!k5bXS)D|+WQM%lO&L}sD~;SE9i2P3dxfhiKLN1_%Mr29&pF=zjJosWG24TGt)q~+ z5|{K)$D*KK=%FPVYjoR*gy6ENsjlgXXWPlN*e_Jnec5j}?mWM7^WNJVw;ui5j{=l| zM*_M|B|QJU^!Z1Mw_9ufKX!fllJ(nI@_lt$-H?Aifh^PPKQFo?@Z#nVsLI!gVu61? z=cDtcyZ`vy&wt*(|NPjhGeP0ki+{4w^KWB)!vDDFMxe_38#(+>GKESX{)5p8r~4sQ z`Ok~)S<2qJ2TI+~*R=Z2Z}It!z_}-LwKlOCV(u?@zQ;02^M#T##fXE-pD-Wyq;YJ> z{3jpym$59xZ#m3X{-Wg8>#u6RqWv}JwKw%t52*WhaFFaj?(_5( z*P+v5!*FSg(Mg3#vq0`#QIcOIHGh(Jk(!}*5|1G>F&X`P?B?_10)QLYy?-*oPtAMf zhPAyExIJ+c9Ad0jVMNJsTGCJ0v!UMxY1ran*AfZ32ND4#{d*t(FJ~}}_`?)%@uJle z^}w;Vx9r{)8M|7>Bg$i0c`3l~DQXcl?P17#TtH$?cIzKcHGXe>C{^2jwvwU22;jN* z2)J9)L51_bzQq1H4t_uxzcXD%j8-n|ceGJ?U)IS<{U0LmzfMO3{?ideERUQgp^l*D z=TnTKukMCs3lWh})!pgZo3ABCsnx#u0Tp_u-0_c3ni~98>$7>2XK}xNnI7*z9E)NT zs~uJBShBS(=d)Whk>Awl&&U6gOZ;lttMQr00V%4})BUGS1Is`B{S$b5hTdGaPAl0N1Lh&YVia=$+;uqy!5;)gzQ8Pv)D?V zGb(Sn*ubk*szamXGHI_*X%PJH<37;+efF1?&Lkk5zx^WKlJ^if8lWaGRq-)|;@qS3 zk72EUVzq@L?3{Gh3O2**wt9A-`1APZp7rlsy7>{fGS5%I?Quxa`anY8{OE%_xi5Zy z>bmQ_F`X=-wAkR}V5*+^UZmz}ppy2>K>((WjZV}tQ8xI&2ZYq@;Ih{>oAT@I>i=Ml zpKr5m$8+5@qa!V+2X!gprYcSG8BMz$`a2mUpdvMJJOvB%=Zh|>1m5(MWu?sPFVleJ90N^a}u(SQ+Mk$P5=%@vkO z?Cbu|=`W_x>ki+EdIzr(lV-P>?=lYmAzi)IXAXuxZo%z!H~byFtLx{KQ~aMpKqE-dElN~iqoCo^qL)*ss#Qm4^Ot;;KfIM2|3Ek{QGoHmQECEp@r> z?tM}}yU{_+6yM;qNYr3z>LYa-+fIyT5%9Uyo+Uv8T2)DLOLSeN;Zu1gZ5G1-pmO#K ziMU={XM&z*NNmI&u`x6Md<$>$w)&%bTLQmhYNpQDF5%tj(uR;`eCzn{3wfB}tsOB; zvblfUdYGTc_mkaO*i3~|>v`Q|pPRQD&$-#xgXXueQVAq7&L#_FW>jyBa8B&g>Mgl5i*vfod>+0!W_g2^1+^T*WSFz_S@y!47U=>#L}4}WcU3Tz0CfZ*0YyA z5RVCXe@<8Ja;jciU_GonwB)C|R-04z;6J1}zdMd`n6aF9P!Me#yE zA=ejp^BP|)dd#-r#mCJ}x^&jyaSP}N0n+=c$#2J*d-5G-ox}VNd?V~={;f0KWV%U- zOX4?oemF&TerhL&UTWrzQ}2OJ6eug7>`dRDv|A?xsulhe^$AayzkE-K!zmf6^e$eLLIeLsp3FgV*gmVS_Io&*aaZO2Ve z-29I<<*nc2H}0EJ8E;&ff4hpgt$&e|0t7~YsLj6SK(>t;rTv$vlana6cUs(s7ldnE zyA;fQ&#_D#gz9kEqZm=FUj_Bpo6{^*NnKSE3Ai%Zo8caf&^tmU3Mi>c#OjxOQ!14_wOJ_ahH&Q9pt9Thq zLaml8{)5qSdx8zRTM671u^#DbIGoakH!~>4GP03N?0>PPFto=vp6puUV^B3-ql^RY zS2(voioe`?<*Z@^JKQIRD0re4tS%^B;+y~tN~ezV1z!(7Yvo!hm3%g_-pMbhet527 z+Uyo=H%TUj!z*!Wmi`cGYWoz@;_`XkZ1DSkC@ov}=RU&;?^yDCR1CVx+if7}j(X2PF60#|7?k7YsY+QGIA5@DRxj%Hi zgzj)n7`RQnLczEgi z?fg$&3OPOO%@wy+Io8zNcBTeu_L0G#CX=~zwa!l=YR+vr0R{)F5G*|D#XD(iqU&mt ze$EU^uOi)h!eehtlQH0eQ{7}NL`?v9@R!_@kVs`(E1Hbz?FV%wed%5_BwLPVN;n-33}Eo`C@Jllp@OKr3cqyC3i|1B4!(x{L0w*mU^|-rC%UG z*a{mgGK*A{*_lYpb1%hXcU)dA>%40|U_Tq7xDVrpa4><+pa}~4A*7d`O-;O*aFWKH zzHtZdiTneFVp0+Anb66+*b7x!Vf)9GDPae6(T}7&=8yuXyRRSnpyp{JX#?9-%kyzPz2{7c_eEBJ9Ooc-#h^vHQfEt1w9 zfdI}*X1)>mFO2oS74_%dccyr4mRo>U1MzIRy)0aZ+nzuBZIa7#Gb2FaqFHyBEA=YU zAb{m0u?f`s&9~Y`-W8bRxywovJMM5pSyMPo$bJXbIC{@3UDTz9XL$+WMMsjW5h%Cp zCv|}B^JD%7FdsXIg7E7rii}|#(0QEBovC(0=oq3Erct&+|0n8%{t_t$YJ!RQzTpw4(N`#tlvf>yjQEUpwX0nQITmy2yVhur*h> zH!TCP44U`9pgXOHQuu;Amw8;;PoPgZH2y$fjg?d%MkVml=6+ffTxXJ}jf!kpuUS-M zG=Awcubh@q&IOUfx!pHMNRwG#nW-A$5mV|?XCHr?81S|+E&drAh|+Ntn@THlK=Q?W zoq{Nm$Bq^C#L`NNo1U@Na@xgTv|`$i40~PzpVu!8B}=zUd_lZZVxt6x`(hwo#fsIzmZ%BWi&)lfZdz7(J|f$ zIzs+fFm<_GEjU@#S)h;OXZy#Y*mr>g?5`~Q@z%e)oVhLf)Fh+u7^4CHAM~6rfBWH> zQ^nkC=*8UTXv2$$D0#Jp9)1~ULbo+%Y5TR*%Jm96`2QM6P%w5HHqXv}NFPT0jmLG> zVyypQ4Jwq$_t=Ddns6qg#shL@Rn#YIl`IZeGJiuv^L}pHM9JE(5?+B#+!~Ii)#aLz z=5XGZtQLU2C_BQ>g9yOoNd(XWk z3O&aQytevaKZQlUT=-)(oab^z7>9mmYC$O*qN0&BkFp z0X`Ldt5@4_ND;+3gioOzXI)Bx%u<=Kw@jnqctl%i8z5gpq`|w^$5b>BgIbi0RG#tl zXOW#}5b!YNbv}+5Pm6d4n+}nfK zPU7^&?D!^__ZA!b#Cl^G;4-atu5F2lG^3*i;Krq^O&t+u^_QHVIi}lR*rIja2aKT#xi0dnLb;jjq(kl+JR;l2>k{0z} zw+sBezV5Ufr(R3i@LW8A?emAs4^(Wl;Rf7`wg)o0Lt6MjYr~>`B|awJ1roS3?Oar2 z&;vVR`>CHDOaacDD)`2z^Br#AWN9VbY_N58+&dUmB{@c2-zye{MEx6@yzNl+`q}7F*a%t()L9RSnb@V)*EfS<6 z9QdH4s{J%6kyE=j!3^mz)h-U?t2bFIy|GC)3fV6ga$4wo9J_p=R3UwJ_GF?}KTD_? z8ZYj-bwy}o(+7-q`&WCEC0bvqr#?Ht)jG~=A>3u;aht=&Uk5+298ruREqJ;HbzzpB zO>ug0-bWkBfM~kHC`PG^zq(>yK4$3>$g)-?aLbB3rp{4W=%nAen+dR$&Dhz@l6||> zEC3~~M&wL)oc&ZNsh9Joh<+bAaCQZL3E|1&u)w9gdwm1S5vB(ka_gd2B4mio5j5c@vOe=?_Qc|9-soQ zK{G6e&FOgz3$Yg`n%mQ5MSz7aSZ$Z1-{JP(F%C}M(tN-3fk&~r_O{GmnDg21xQS+E z;!!^EqI3W6(FCjdv!x^c{M7p5BDaCLE&HY3kOv^8W=w|IB&MOZi;_W0B32<54caKK zwtx5l;V(KZA?j(+dWhtH4hEQ0CJv?Lz5YTRr}TIWUJ-jCgg55Y`JG+oM7=2H;xFwl zt9Io&QQB3)fh<+6>9ouEyB`;%1}0}*$m&%jAv8KCTT9K!D(NC>S7&SFcGC;^s0Zc( zz7rvTvPOV;8)f*vi4Q;xW91u9dG(eIE=6aiA3pj#+xu`Vg`@P5Ichj+bd5vpvC{KL zl!3PJ5=~0ETprbLNZTZ-OCiSpD%pWgAHAo~%=|tyFyM9=ivSHMKjq(!R2~g$MYk=o zHxMyg&zkF>nLH|G`+JF%eww)S#OqT%i9GMJY}WMXV)PjGwP_S;^g=IB~c@WxWJd&Y@Dg>9;{j3x}S z9kzki7PE}E6%=f<-6Y;Eog zaf*(G&5Sqg=sL91;j!erdT@XIa4|BLKnr>kzO7D%p=!Uiq<-1!oYqg|^SMe@eYk>` zYR;nCuyLUZQHkeDPhbG<2sz8DH-;BB5$6s1DR3)!s+^?}?vv#@FQ)N;S_*sgt+1TZ zP*8t+!fGRg&N(^oHYToAx70@Up6lF>McOISvJ#@8 ze)i&}5WU3LS3+FViv$#TI<#9vR4Q+<+%gV~ska{=)DJ~j^Fa(kUt1{RBOX3J+LI$R zP__EtjUP%+Aj~35?fk6*$m97+{TI;m}mR#wv3U0H7R>>jR__1^VXwj0BJC1*Nl zfvegXmJAflj=&+Uxs3Eb(=6l%%Vq+6K=fYWN^ui?KITU5W4i&E%w@N|t#Jc=Nd_2T zjlS$l9P@~e5I4Je=I;^AwzRr@g2b!g677Kg5!1!ti;?PU`1|3@@=*mysMfi}@56M5 zHI2K!`DPr1>ix7~Ec@%B;VQd$J#q&KeyEm{r464eDeWW42uNzNi8hNJZ_5*kUR)3z zDtSP6i9cAoKYI5lO2uvr8v>O+7Ng_LKQ&f5QtV>YV)zWfkgtK?N=~v)pML3!Cc6G& z)w-sA6gBPk`C$0zFz|CgSz%()vNgzjTet+t&en1DQv3jakZ8$H{y*f`+sAjlU-G<+ z<19mEQhx-^{`Su+eyvmGAIFbsaH^(kblVu&1}GTG@fiGVlye+==UY#pVma0-SmPt$&QwqGi1wTqe2lEsb*;Tl+_vuB+n|;JUXASR<2~<+>fB1l zo?v{V)1r0i>F}HR22do+#2jwdbmo1U&1Ekw{8@w%d1?cm>Hgc`H1(Pl`9^uJ0+km&J4yZ0MqrrQESd?Rwkt`e=00`f9$Y9JS;< zlI)B29-60aIoz#M-tb*(IQi3Yd5FxqZ)Iyt09WjgDyZfmjcDb{chAS_U;zaYm-&yt ztwr2P#>S}m+e+J6Rlb&{26jBLwHY*s_%Zw@d-NmgP{Ug5G@Jd{8Ee=KLxIet^=V~s z65)yBXav~_wJQ|tUP|M>F~kPsci^a4^hn%8viYB~HZne8k9ulVkYjX`=4KV^P<}EN zu<+@8d5E*7GhY%Uu6aWo)3&G8W^jM|(_wI{lq}56#i8%w=L^ih2tzo6T>o^>=I-Lu z7k0r};NhFE0*ci+41Ut7?)6_*OkBNYwlB524N64pCbaCQp%TCIfhQQ4R3AD(gOw0L+oUxmo%(PGm8>%j)s&|lKDlD zI=kCs?H4r~{%4{ViH1!N8_xAEDEkW z@{Q{cspnf=8j+Veg9ZlJmeMR5k8Nrs!WYYSio;SV*BO^pw?jg@eH8Wq`A8zzr;I6 z!SNp8|IT;bQa%4UxAv7LTlL7~V9t!)ZV3>*a*QkNc%x5;V@_=ESv zGs`Njl}4&wQe-CXs+-GvVt%QX@pmz=k{&qH{a%LXS>srdonq|OgB%P`o#RdHmq(+7 zDuv7C)-BzOROZUnD)<0p-4%8IsSurZwkoJC2TQ7uLlOfKwGH@@^AozSX#y6T4@VoE zJ;I+G;2#&ccZ(mfm;r7(R`*G@J$TCXC*XX$Re`=)Zwh3kF&5L#UW|3Tia*VBxCM{ z%sU(`zs-Ih%WnIs=KexcIG92Efio0q<98)z{&u*~Mk3^H4uSQNycsrk<6QI`@Eb~pTNIm21*H9`RyyOGBocS0r)gW3&cG~cu znZ!TS$wgclM71SZnmcKCuQZDF2nX|9;@;jU?LGX)IMRz-V*;wb{%- zoA}NMidPF=Y9-M4X(yErEBahn?p2yLYDrz}1J_p1{c(q|j!kUrwa?V9bt{hHWX5eH1TP3x?9_DU{67*qA3MN<-@l|AhIXCF;OLazW<3{HsXY0(h?4Pt( zG8@5>%EN*p1qc;S(|C7q@f%T(etaOJ#Rs%~|FfOdEvA;}4~lfsi=J0edhn}ze&#q5&@9{&%+2)woZ z@_1tux2y>Y5iRW>30{d5NZEId&iWw=xQ4~X-RJ6(Ctm6oomtdr zOi9LhX|VBx*CZ}`3meoyKsaPQLyC99VejeCWO9qQ*0U*U9br-xyNR3@!x_2BMtc6}mQjIQImYYS zBtLWIBaB)6u}pKw#2m11Uf;vG-AYIgSaJu5dNna32`K(meYeq9%=CIbYNx$TMjMb? zpW(Dq{5cPhk;Pt>$fd1EMyEfpK;axpfFH_~g0aRQcpCQvat*0?boyy2D(t2~lhteC z&ch!0bsTXKH6fox6gMU5%b|He_GV;T9^xm4wn~1eV0> zLb{|eJ`6AFOObX-s!nsZ{#8OnXt>xP>S;FWdx}f?L;Y|SLIoe>u7}RfcUPq1)Op_@ zrME9{g00L0S}yh`z1Ayz8LRIU8_d2VCL*Of&6#9$3Tr3{s}7O!UiBJ1$?3G9v3O0T zcHHp1=?Y{eKDfjH-+<}Hm*J1S3LYtO605(U5SjA9Tw=|EU+uLdk@f|}of+_=(`KFO z-L+VF-DX~thGmVXmFR3WNHt4@VFEqk3yF2#?6>-rjNKD{TC%TYHE2lXoz@FNlujEm zTHGwlu^nHXnYh9>Q{0(yn`U@G$;BhD(z49AYX-5=dDM#J4-mYa@*To!OKfMWu^Teg z@VWq7J0lMWuAiJC^KqV*)eaJ<8AZuX%9zyIzRv0)q%rt@nBVzB{?8K|Lz$D0n@|_x zbDxK{a+2!5*e^QFPD7@=l#2U{W{iDI>4m6FjmV9MfJhQk_4~1j60wu#(E7!K*#bNR zhz|i0f36T@#$rS+$&U8=4PWGkp4GVzZ$omZ3IdF_A0yP$T=up;32dDbqpZ)9(8wQf zgxK&!7uqW$B7;k3ZnlUj8#wr>GL>c1_z$RJNGNF`*=*sD=-7KisLyC?P*M(pSp08~ zKY!Vu+|Jb?a!Hihjv@JDT#c3b(`7AuMShU2g8xY>GR1zN=tTF05+Y%6btrW*Q)t{V ze-m7qvG-9DFjR{?3%6h#cb~5bo~SdV+g%(}D}xyvdL`4Q+Cx*n@5D_3b7%AL z#`>^+Uw^5AOqN!l>Q2Mrj#1;gAm}VY5F3IA70lL|ai2?Ok7|>iJJL8nyFMiIBE>{~ z)OYJQp^e8z+;$RX8A3r*r{GJnDFG_pO!k6kD*}ckWn~-Hg7Jyb zRQG6wAT9JHz(y-n)Jl7)f=V4gESV>h|4DRx&RvvB^;&{>Mhow*>xQtJ@zn4{Uy!+% zxY;ZnJm=GDw_KS5>IceUDNpycGYKjit%L}`P!t~s+=MwAujz-G4JJvEFz<#8-`eZ}h* z=o$+eno98XK_^F}2=^t^(%nC{KcIw3ln0uLS8=)PwDn5F6ew%|=1nkbwZKl<3&uE1 zd>T*^tqfE%u1~5Du3hf+(KO94^*bm~N~B+QW?=nfJ}Hh^h~&ST9QX>%O83)~92xoc zt5DS_^2+Y(={$P|9%_o`>jhN>jI2}`i++`Za_N>1=<1)kXEGU$c&yc^6Pb`M$Ur9Q zBr`~~RrA2}q_jc_geiJ@ha6>vgKlsYAq%i!kF?e*CwUJ|HBOk1_FCW+T=iY2wWDy) z%=?G{fbNbte2V6GwQHAntV5GRwDJ40q+VV0yv$wk(JZ6oh0i1Q7O55P!aau>T-p%C zbVWHsqH8RA_6;^wD%q*b5uiFIP#_VHmr#qvi`0Q=HkM~|BlZO{FjN8 zt8z$qF`Bl~?eIrsaoJ$6s__6@FABKQ*^`0&zytk7fB|b_8B#$DjhM~ulIeMyO-U>A zpRS&A)$t2nrSK|K05(b4ei~Rw0>qZj_<5!rO*T#x>`tE+_Pm_A0C#qk=daVGr@0iq zn*ME)cGr~syrSCf$cH`MKN%sk&xfQ_&w^-2!GM%%EWxhk0o#6EcH%(D($I#Kc^2M0 z#l>tUJy>is|Z z16J3tkF;GJCcm>W_v|@suA2@~L(eJBUuR^>RGh^>#(nt3#mV(v*m*)OP1MEtl4v+9 ziRwfO*N3swEYAqnEXh-heZW(KIGLqASSe+_CWu#g3EgJ39#ZpThn#}^b1`Qs)UE4R zc@gh|THM1!zsy@DN5cEiA6qeXYlXup&Q;bdwfTlO4J`+1ov25_DdGQPTt z(4Fi}r|{NW3`_Rsc-J;C#;KF%&w#l;F3JEJMO!x<_(GkoXghlonO_nN~UHHW}2n|C=OSwJ-kB6x-nE-AF=zGe()*+FKFO(49Sr`a)l+LTzl&1M zUmb(y=E8iY`HIn&>B76lHj*ClS^T1;O_Q`?+00pYE7C)z{JFQ={G4UlJu6@m%vp@@ zDoXFBc&jZV9BH!{bChS{TCF1+nY*hsFj~5@gKo^mm`j|&k$NeZ6V%&#p3XS|T%~e9 zS!iAx36(^A75^h^n%&Z_|8B7&=x~a%o`FyJ2;Cwt6+J7)UDd=qPSotf@Wpt{dAY4) z#JRG}jqo<++N^xr7|dk*ycC(*t9bb0fF54yKq(XZX>5;@}j zn(@4nc^oG=Os_{wwujBPM?iy~a{QbdU!PAMiYD;<#G6g%h22|gdC?EucRB&*gD^C(mF1_8p9J9VeJgj@M>(uqLU^HE~$LtvYi$;Q7Yqg zJrS%PK5~P}G^g9-7-v)86mJcweI2hI(F9&BSX>mDF<&c*Cn)CZ{S(c%!eO>_(NTY& zEGQKMb+AC(5%rgHvD38O4u+XZ!)?cz<~pb=;@QOo^y0AhaIO4~a-ZZ)=O18M=5kwP zzb>SbTt%~af~e-Bj4ZMq1sz0xoFr@Xy*f0$+8iayHnXO3nvEeK>j*Vmf+E|(~bp-35GzF2C-c6No`0)@32kK`F(_Zuqnp69<7nwmUH`s$9{-WbEf#OzYTTYU8U)E_ZJs* z)#A<}MrC@EHj9v4J*UY+bq^lRCH&`(E>pAolRBu&a^J{%40LmNvAJxsT;JUgZ)?fR zA~%ou$R20N`LQr)4D(=dN5vF5(^T952(LB6)+jKp^uyysU8I!%)GphXj%_i1v@9ar zti~`bN^DQMy+a!6yyoO6E`W|O6t9e}-0_O@Y%w@$gpoF? z3<&DMB2GlL^LM*>{mN3ju5jVEHI`8J@Lb3dWG80!q$iTV4f>FK?yOR-puk++1XUQk zdT_A;SSv{G_F~O>wjoXOg2i=YRU(x6kcvU70^qLMmxS zd?-9JCn|f=_eA_IN_KNNo!hkauGh19yG~y9ObJeMiLY5MUPSKY=O;V(Rqe+VYOmUM zE8DwORO)txJyN_z?nLXCWyMNe2z3on=;wd=)Y>vqGph^AS50piIc+vdwKHKT)zvZj zF>Vc)CWub@^$5h}P@ga&;cHoHLXGF}U$tlIva`LX-^3&$lT~opH}uD8KGzd;!BROc zgNXx&TP(5Z%{x%+bhSm%2(&4RD(-TR-caUbg5scIQ3T|TmyB6W#MwLQH#^z<3EJ$@Jwm#0|XoG5#gPjnmg zXJWTA6Jax$SQU-mq)fV@**qAmRunglOS9FWI4a$n`!)B%s8lEQ519!*+uqb|ea}yh z>zpX~q`l(Qqo$O*(b%PR)Z*9bNb(GQgX6+qdNt$#$?S{D@F`v7Z0|-gD#oFxx?x~X zHVaPf>}n2pNluHcH=bFz-t5)4FoW%|)4>im{)tI&O~cRpSB4{txn-V=iggYlZqt-qz@* zFE6raIIKgp12Ss#E3C?Kz7NRf-V?k&GMwP`O}lak)dp@=NoNR*c87WX6C~Amo=jW= z^T)sHk3DA6>goaq#^5D=y~JCmgqvIJX0qKMK(BVL^ZkQfT8|xWjNo;$0#!}WZ^1+5 z|Bu=*KkV(ovCxl_!HwkU1Vd>y1LZj-DcteeQxZ@l)wLt_8jr3@xi(;KXILpQit@6< zuvkMT>sy^C%6>ACLgMfhO%Fh+{vB?&s@<~z^@|{+hgn3HmV6( z)I+Y~G+hOa0PZeSZ0H#wAsFc;HH=YJtJ=hG(!CBy0q@*$UDwYN%UwS|age`0zBJ9Z zeO+Q)6zFX(D#C|w3+8-rGTs-}WJgT-j+v<5<3YXW163w_C0EVpv`@UIs5ZNSmHzQ= z8N%HqwoAq#o2B-J*>1A6Qc+KHD^G{fVXfu(;a5o?7rbfsvDlkyNplSFfNM&u^&HT+ zeCCdii?H2-k@fX)g|iWJ)kttrfc=WmNRzELcT41OhJ}xLq3#!B%}^tlyzh{~e%Pr& z8Xqraacej3SS-BH3*r4brK1yNZDz{Qvdo~c+U|v|e4E2>l4;<_jY4F7tt!jTGVJtz ze08qEe^veY*9<}hFDqgFl#|NrPs~&yi{j&owiDAUxG%rL2lSr1ahqP1nKoBq)t*7~ z?oz$<#jEA>$qr@lS*I=qUV7}b+Kc~>o20xYXvQlR8;USLGerwjy9ZZ%R zLM2Z4t1#7n_SxRc$e|XJ2VX!j#`xT9(dlvMVEH(c-4$I9Ye6m(I-W8 z-bd%AxqEOFP<0FTw2$H!x9&l`^J;#$8;mz77yVZoR<$iygdc3q-(-U}4bhJi+EG^$ z&_a?@LNCk<5rDLtVM{iN&$$yoxJldWy8h*}jjUUw7Io2h1ZIF2z;S7|Cce`}ZjOyg zBKwd`xvFD%xgVr2c{Bko^EB6}2wC#ky?xk2&6Fl17M?ev8>SgqMkDxgU??>q+W5E6 zzgjRmtzl`kmqv+Ch1@fG#Cc55YiYVYniH$G0-y_hAC$@3TQ?OY5mw8enE)QCeFpmr zvIv>A-q(zshZ~VR$~|9gBU?!oV+;0Z=G@zh>B1l zKJ*QlG<)_Zu)FdZ1NdrrCldnf=ksSpCj*1U#u_jL3ieHBvk!a#AqTx&MgOeSpRWKbNQU*In^h^?M>)s9r}_~6#_bV zvD|v{>WI-tu|8G+4G8)HeXwzPMB)0{@1;et&SIhKHA}6`h`~^X! z9=3?rr8{Iar_C5K0+o2eubxVhR!rtY_85qsm=@(n5}0S4{X~H~wYf=~+>enUqxq=) zVaDJdMY6S*LgrVA!1cn!<)W;%H42=EP|e7-cBeram5`nZ;etneuythvr}mTb+Q|tw zwgRUxU8rX7cjv5B_a3sH8UiO4VtwpMsi}Mus0|z%(mdO7xNb;DxxX~RTG{ti6-iD& z5sJ}^b*8<*eICR1Ptk(pnT#I*caon~JK|3(+$O)Zes;WXFBdG~O;7!$FP0%%Y4`Zt z)r&dcV583sD;XOKM3fM#a+7(Fj=D6%CoC@P;0Prm(Vz@#ICzNB!(R!gXir?PRc5yo zKP|Fa?IYfw)&BED@@RvaX+S&~CI|p2nC-N^M}Lu--rD@mt65qONUyIv%HKv1n$+Ri zFEPSZsglHLFEw5`Uv$USfcmJm#|}Q2Uri}&U4%DqZ%J!!8NNu8$aPXTT5cR?lrHgT z!+*aY3|#~Mjq@j$d-8#3NjmPZXVq9~nROxP(^9T-{+jhVXnR1qo_=CCz2qBgxMT*G z@5+twtYvfpETIcdnrVf?1dGd!>Mw%cEY$CERTz84oBJEGw(}UnxW~L1j%|Ih4s;0D7<>r?xoB>lQ}Su=keo4U97>|YXvI=|CYYKemt5h|V(e(aKO za-Hpr>=kY_0AK#+LrOUOT^Hd>mHhIf5|Z-wpAOfR2xCnZ^T&cCQ`FR)vWtmR2YfBX zA&FHQY7LI?9nPIpn`?H$)kG3*%a$jYrTTa1wj)o6tDgL_AXqeDq5%k)>|n%I=gt1z|MX7M8@Lw1E#9s2?39u*D9jd} zHP-c%S4)Lf)Q!CHuR^$fZX{nhDL{B*1Q^$0T| zw4s$La?L%GCYe^bwsLu*SBtM%C6bbx6D;oksGaY>e+$6e%4*VkQjf=+hd|@%#$?u8 z&ei*d@|Z*AcBV?QmU3AN`Rz9=YJ*2_6SuMKuvkB=)kZ3|9rOY$j|ve(cu>u+4}jhf z@@tK|51RlgmEBtg?iw#XW1v5YsiI$Tb|VW~KEOB1N?ie%|c&Jw*vI%5br_p7%r~dHf&t z-ZQGnZrdMyt*8hn3cR2c8;CTeN=Ffqj`WW74nlwc0Ro~RD4ATuj9*sFKQnZBaXd5f6?!CdpC~7R_l@ zs!lszJU~I*Nb`!0-Q~|yM}1+Ito;dm0%>P>9d&wvGA$*-XhY$Wc3To}J<7Y0`&J*+@hs_gjb8vGG`D!Ki&Cpf`4eIZr%Pw zcEXL>Cj~YGaNrROU^M@egz)W7>{MAimT2nK*rju;dRU~D&q)rPyj(6|v{$X+Ga)De zR&RWqosdk!BKlL;MklB?-pJv+={OEfi4uTIJ~f?^+1FXdW7XYYiTBOs&Abw-x(2Do|zT3iFe7fT=;`_#g3kR68^7798yG`7r{psrBP%f_x(TMmLsZ@4}%meh? z14;O_j}NDOiS^&!M?J|3B}p1^>Rg(3zD&b5ujZDDRw(Uv{*H%vz^h=-H!T{ zdPt5zlBi1yh<#gR+hgIDdE4oPLw5up!FL`kV1Ctqs~OJ;ZhaN}tiBN&Xq+~`sYm>k zHGXvnKtDH*ezERDP{_WsTAxBnHf9iae{j(K!?>t{`GOTyZQykK!zAfkCd$NXDDU<3gQo>d;$*uYlG4e76PHjYqT>X4`w{f4Gz@@Ag>usNOS_)` z%ql~FB4%mN>@5FJ-Gsx7cMr;mgghnH_BZEN%)7Js?n~j}lzx|b8e!1-z{7Gdz3hFj zxJ0AiNH(GV_r9zvW%cM?8<HZCe0PV( z1`il~MyoxzD%Useq+Jj-OJT4`M^o#_w z{U~pzO4L#gf`=I;O$*d_uyy0ER#y{k2AuD_SzE)reqw6HZzRk$zb*U$EKA+d*h1_W zP|4e`4?%8avnB>2YN}I-OEvYNpE_S?Gy7uYdCP_m%RVnisgpk+qQE_hM&@;%!9WrD zL@h(+(L;v=yvLtn$ndF2Qf$SZl9Dukp~q*Ca~j09@5j+l9b{#1ecbjnLKuK5)$TL7 zrsr?+BQR|NZdIkvH$4K6-J6^jHW-Y@--HB6nF7ZR#Owr?((HDv^m5vZ>3)D3k;=!g zBjB;ZXI2$gt%0a4gQfOX^K2YE5?|C|0UMP30i1?!O~YB?NdJOzI_6Fr+=ni~vN?k) zYv$-b#JoSEvr(p$KJD&)U`a2$+^rTjKL}{}j>(WBZPL_U8eDHG2jft#xJxGaxvd-O zv$Q}d6~DlB&^B2D-FZLi-NJA6s~+~v?Al(I`T3}2p~UvVz*zIP8dVIQ58 zew+Ut>Qig)dd{ht4{9HojhTCgj_-YOQoS{OS39NK6}m-ZYCv zF@smE;3wbq*l&7eYka}^kkWuN67OuoE%%Dl*xu5KmWFbO_JP8l=C$LS``5po|3~)S z`{V4G>6$ibFj*uKpwr1-X^l|$Rh^n}d!6LDKG#lcE=A?pc1|;NudIw$f$VsFHF}j;1N6RWv{6QPji?yxO*#+B|w#>y=?BePs+ktMyN=kN{txmivIP zKnfB#i9+IbyBs>dDSWX0`t_vyTWNX{U2z>V^MdNh$?`R&CjSZbs25rN=fc@{yqm;9 z80B%>NZ#$p6w!y>8J8Wuscx)rYMop@F+A@2>{0=Y*H)6nVU=YQ{B=OGL4$z!+-(luBG&J!NaT zuN9sXw$)UKoKA6Lwx4_ni!33T*qE{d@0lp4BGYOr8ltit+>ClL#MHwm`ez4MLN zs7BUW%=&0toy5HO#Ha7o%-J{;A&jP3~eSLal7n}j@v7xGzsB$zDi zM@Sx+@-21GL7)q~uXnYJwL6+L2C(~vzle{5x88F0$MftojRrVPAG0Li%}3rdeYyiw zPHYA@7}9rZ&rp`y-2im?tYo0q3Z)Z{80o0j|ErtLD;3>ChaZ}LW}CX5BE?4O|pzzE@P#&J;^p}BXm;J zIc$npQAZu0M?#EJdyhx|3(zs?mn&)=3O08{en~!dL3-{E91$lAZv|VT0b0*8&@!&> zj0U58QJ1|{+7v*J@c0SkmN+c)@%W~j&3aXa^q@Y|3eXM-#i#FXs&+qjL>G;+J&-<9 zG%gtNtUR{8#3yOrEFabxTnxb2elGH)@NK2){C1+ zxFX7zbPl0LOmUOJ8oW(>IGsehSJ|=lNL~sZ?tT0b$2nUvRb_G%k-lZ0o|At0pDGu@ zyy_f1jgFMTGj5-Oj!PeYSeB{o(gv+yQRdUnmtO0Z94eIL?s9^mfTGVzI2VPVazucC zvz=_5y?xGW=Wz7~?r?Q&T*?bOA#(aVxvg>Wc07{<`pr8OyIYonTg%CFW>MjW>JD3| z)Uz+ui2M5GsV#&ds$C`_TxNv%lP7#k`vO`(T8PoM0eTx7%W^vvF36wXlyZp;gmn{v z2VYgce6dfz|TMKT;MU-DR%c$<9g>d8gEQV4;q z4XZj1ghrD~jJ(&ArMBx3@g?@9fsPVsB*zu}cC&!8;gXj60%^^JP14&H?61 zx<9E#&R1u(li0o1+3brCyv~|eioxWrkI>axP7!Au#+zKdkLD_W&}IY@qX3~8g^BDk zecTO8(Vo1fE92hCDT4?1+xGe?N^LDi_aIT5>MhOuO@W8+(fuv2v$BVz9zM9zNecQM%N5KS<1BS)K_CotuB{!do6t`(-MbCp;JJJ-3I$kq*B zZDTsVwSKngFSND#WrDRJm-koqYjR4T?Pz7|MLP}Lgt_kIJ_)ha;($IWp#7{@b8~AX zRpw5X_Ga$--4n~hS4$g*v^>V8kDnFGPrC*zM~bDpmt!5qds5V~8M#l7loNP{0p*Hb z0CRSS=(U|YWLmy^y$DnlFZ`x(D8A4l=c>FpbPL|C=HNT)wkkT`46aw?#aq+P_S&4) zT*-XG=P>PN;>632f6?r{P>Ol}Vpqq-%&0lN@~g>u+@sx+;nE@Uw|5b}zp6r*N%g&E zCv^=;{zk-}qZL+<)n@(2xgnDT2`Wqp>=-eSWn#F?bpI~0&b4mU-cxqm1Eb^_2I*p0 zc%~pi<5;%W$K|HP3{wAao&NzBRF>Pg-ephfPVJe7AsZV1$683UfGH8@kxmmjSpyp^ z`Cvy8rt(NA%=XmMOY5GfvUI=02d_nM*GkoVu}|+=cSnahS=H<$EiN#*7?0gfqwQR? zejC4YP0^?eTTMY+%`iA43vdqX48`A>QN*h-ari2(2#(Py9-#EaD!E_t)=xSgRdTI6 zI*uKH!?zSDBhp(X2i@Y&jOuY&HW{>N-(mZ{IkdOTt`_iStaCSoyYP7AajUw{NG;XE zCbGH5=TBmE9KTJcm{5fkP=yJ`m5zA$?Kh$PV_RRDay4nRUgR|tH1Cfuvp0nU1?@e5 z^Y!byjjHIV^G;u%ux|nfkaVoI6AJ(~M(M)qj>tuC zE@tR@o|pAu#no&}RS)@E$I0cA&pCOzAEx>SHa*|uq&~7Q+XyuShwp76#fodTpMftoA?#e%P#G_$0QC3W45;WYJ%YH+ z2kWUR>@Bhe5sdrB=)27o!o7e@LXTK2{s6D%mxE+r03PBzhi^;U_VbSwB)B zv}|5tg;RhVlPGf*4U(szYZcae_ZvPs^nV0BWx+Q*1)|2L2I-j{knsuJ{*wy zK^VJa7`Xv3fm$6lAG~xz?@c4UdNvdob@fi~hB(@#Qx>U^ZzRdd;CZ4Txz^gSFpyI) zO{yzP*}L7ESUS5#crI=`Nelan|&s?g^s$ zIoVp`OsieNou3;4XaLONvD1|(p*xM;ZNj=fnhx~lQXlS#U{EQ;M3b0{Fb5kbF`|U0 zh!yV~ve?T`C}`8NfX&Zk&3DYHqg+vVTHn!Utv$z=t)M3W8}wpP8R&)ORX99C~6u*%T(p2(eC@Zrk1q0sy9hW_;T;{-V#dvjEUV(S-@r<|Q|MrbFVL6bS6GZ<>VE(slBrBeY&S83UMk_9#;v_o|=A0}NIR~K6DRk9_^QJpu1 zE^%;`wbBoQCe#uCt;H+1oO&t1?=*|3^Ih}EKdlV=MGDPj`#{;{fAF+Y2bkq0} zJ+oG7>cNc79{Hs8sPJVDr(oc`^%1rM%D_+>bIAxTW(GwV2(-oJdPRB5%!6?u03Bo} zK}4gWYBJ`fFU^sEAH`r$94) zf?ZrgW~b&}Qvx5hw$z^5OJQt>CCkH!=vJhWzEi`CEY&1IL1GCk4^W}{ z9OHYiBHBYk(-`|WN;1G~ylv!@jAF@6Pj& zRlHs~jf(LQ`}g@~+fu%#``_o^d00z`Hl<=cGQ!7nMkz_QsX4(eSoRc53fj@a9$rH% ztra>+E)T4;R+j9rOc&tf)+ehomZfDg(k2WYF;x);rV@mh1iovMw5eww8T7>q)fdER z-cpf_EpPPBY_y~H8C!pTuqmFaG(P2l3qWDko4iV-92RUge8E&=IrinUU^4)PoHRVh z=Qq2hn-%cT>%2xBlQ7OfRb;GcK=w}%w$%&3% zb?3ZuYGl*`Xlf=bbO(&17MfRTip&(2V5Vg!55CxFQtdRBQ(UzHMg+O5L1fNNcjd5h zNvp-$o!e$U+SJElUrsD-bn2)x{8|;D#E7p)HO$R$<|!wNzLDb_7dILe@(`Ze{g(Gj zNV3$hx%;?GbbBZ&IV!nItI_fvx7(KzYJ`2rP*T0~Ed8tv=eUgD;d^T_Sz3G&`>A`8 zdGag$y170(ou30o6Os(aS!MiZK7KN_Tir0Y(^L+^0!8@G?l1L3Z87R@Xg*_6K!Kyg zW~nq_I>Qdx^mg{krer-RQjG zJ{P3ehbBykW>&G{h_WC^;_i$~X=6QZ-Z341l$+~L`m^jqZ=8-A49zN8*Ly-HeQciG zO|UWcK#ckZ*SVtZD@I~_V!9`7!nN|Wop&;GCdFLL1ORc+x=sr($_C@hJCl3~`y)$h z$0Ep&kdoU9D({W7a_wHwgxO<#Ck&Qc<{MBgPM(^I+}yUEha>9SFKo!J^S(tkKiokm z#FwKSVW>w)pqQ(Ojs1PYs#5MdTW}%Tn=?+Ue#i&nk%z|Z?>qZgO=6hm3JLoOWjB`h z*b8!<8!u%inD$-liDnlnH=ORvC>S=Fem z%D@Osr^KqJi4Jiu!=Pl@Xo!Rz70)n5<1CVxdc+CFuE5r^6H)Gk@2e<`4{S1i=%Th3NSXZs(P27qoe< zk4b9LAN;E@vEaE=s?PQ-9*)@0M+F?9q^m{Tu~W6h06eC z!T0`bxBp%+$i6dD_)?`vGb;knBYS$Vgm;>5@6O5;?^yZ#DImQp)bvo&c zg(@zq#L!>-8!}w!n&u;zNn`(%`B;(C-{;srg7B5zma{{M7Tto?Uq9}Tu&cgB{rWGC7-`6X++|?T`4qx72XwLBeY70t|CbL2$sJ#l zTl+u(sZ#%g4E*md{{QGnuMmGwZjL|wPs8`?K7inD&ifMcYI*VhYnZ(9p8d4J zUe|DA?!Eu@vxe`14UJJ^R+!;Gt|=fAa|B)h;`Cn^{cBABUCQydN&MH8`u}i69m_(% zr`_a_&c8lxe46ky;giVSJZqljP04s=txGOOpK1dHX6s|_g%j&%3HNU~0sru}H|E|C zPRJ2_c;0qsKpf+0LP%dB1j^J4;fEw~1OAxQoM9=9uld>Aia~Pb%kwJbXHT8IBKOC? z`-3Cm?1epr(ZN-%tN;E(z_#5|0<%4DUQ~96kXs5l6!HX zlWqQvq7_}=j#Z@xagRG(93pym=tJypmdSsGAhCkES zxj=(8(F#iDR;1mEy#{5k^d9z=E|QK^81V2 z;?5kl+)pZ%Nr?qvAt7kCT$vr*k_Rs44#LoJ_Qi<_MJ?Gs$!xWah)K**59 z>|&6*C!T&cxDE8yO6=DtJPKawu`J^>U;G$5T6N{d4IZ?A^S8$N50?Bf0If(_RrMnv66JZ(8Qo?o z>UK2SwB{ie7cb?z``^OhDL))cusexw|P)qM@MO5bv~^v5|6o({n-Ffj{FOh#tJZ{O62D+u<%X zx7YsU?oat><;mj7H+56W7P{Uxa>SUf!M}^h&$T<###c_CBJ!Il~3uFg=J( zfTH$taY?!NR0yAc?ROPsVf)PLqE}g`1)aK0?4(T?R|CZLyCYe7Ac1H;nj6A#>CzdW zFEb0?J=zmuj9ni#6i!e%1Dn;TiY}W5sdMvi7OuZUCsUg zD-!F!yE4q>D=>_4sQ1D$j^h0J@>I;(ad7Ino?%(HsyPOJ6CID0OM%V=5$mDUNfQva zQwE%JV!S$##%EY<9Z%e6xr?nXXZ6IQCO0qD+@N~t3nVTT$4+9y%~YDwvK`AuF!FF>SWHm$P}t@p+3zD-Ro2{n0O9wmuU7DH zwzw;^r$eZEr19(`es6_cCk%F15bU!|{!#i&y1_C572+EUp8=l$_7tkY$|rrw3B26H zGNdekv0MvpZQI|+h_m|LH@Yu%U5GfO+>f||%ALaanBR_Ww; z@hBgfL%2}|SUO<*1Z{@yQm;b!46Cf&H*UKx0$uyvF%^N{@FlKNM9M?=?D^O7z7BIW zhIh^Qr?1~T{mb$YFj|moe@;P6;pWXWr0*~w^!L!#Wt2KlSPgE3e@&Fq@4C>02yj_) zBXu470^V*P1_Xkq09z8t(gpUw+#IQ?9d`}=0OCF%#Y-M4z9F&6*Vy=+Z`d1RgK8OG ziPtx;(=$eU)dLFu)d!rzC2YfO=UY)1Gt}zp=pat-{SRgfraVe71_FBK-PP!KCbMuT zyv7M?o*mYkLhxuLM@i;wNfV4^BQDm3*wRO{lFC2ckmaAT?Q=MtS-;?sBDM1jbC;5G zG=>o97vuR+70V7ecK1DOk9tDNr~ggB;}AS2bG&xu$YSyV_Qzh5FXa^3`g>6OXWvSU zukK4BMRdA!^YV!|!QfQo;TwO)Mxbp5=#iEUq_Q$iR0j~YkWKk#@!S6fhF!7ZK(Z-c(Bj~A?*kZ_S=y#)n@=ter35i>p5$*nbJ_Qw^n_5mIb z_N(`gKMWhqmRH*idc5)9*?-y2;Av7Vu?^*XQ)^n~8Lr*xd1Prf3$>^%i3Vip9;X0i zva7rl-(?LuLndJ)E?hm@2-*>vw?O#{__Ua#m}N{@M9R64feRbril#?P(Xz`!di7m5 zmd8cIXxZWj-BF*-TAMzc54^^cJn2GpRI^o6q)m@}xAnV}@ao;4Va$Av6TM(5{k=(j z{0e^94zghoe(in4N14H8$lgv=xXip4oZHOjL>&9X39AP-p5#lee=R6blikHv|5i2B zFp@>&9^}ATJt3Rc*sw^;hOqK>a5q)FUA7Pqc!_5mbfZbZK2E$g2nc1*n^fofx6Uv< zwD(1_rk78Zo8EyO9G-559Qd6XN$)P@_|$#c4K6gma@$Zt9-foFv0pP$tgFx8c4#TI zyrQu?=LCC<6nQW0v^pP@b(@ehAk-}EI{&Pk$8^q&#;0{XU-^_5QT}~T#@c;k-BIAN zW~-l9W8I#YM=djbgKE-^(+5Qci_f*iz7N`$#$`Um;m?&$){RII@0jTZxY!HkYE72(m$F5 zr84O;?m)+MN2Z*gP=qMxoF%pyJ(lI@Eda3qCQu*NLKexZ7RztmQCkCA9O;CP_h3{) zLOse=dSb#inUT-L(k8{!OZTD^ic@1r*ksiA9Q@cai`THqV9ZWcV6s?8S;Akzw5XZh|@ z+b!h}jC`E1w%zA)q>1tdVPxJE{M|_p0GOA|Q^P{?_7}iLDRs`Xx2rJ&F-TVzhcc{5 zs6<>XBdewF<`;LZiGf)A#Bc9DyhssM$bBBk;(;-(H#U>Fy8#eQ2BKdU&kUY6D;WI3 zqABLJVSPH_)B~O09I-xgJw&U*h1^i{nM`#($B$dx&|)jxLJU{BxV{H?td~R==Be4z zN>X-DNbE0A2)y{DTC#9rB@WI72(7tq2!~jV=SSM6*Hr0wzq2)%N%HuWG>8izR%*Ni z)*iacl53PaU5m06%Hv}0!4d!rW#1Da|0Kwapd?;ih}Ns^ZZmMEe&T@{aw*$J!u6+M z#sb8V7N2l>51Ub*E=q9qHY@;1*GqTv@ptR^R5$d2_$*9uKuYTuRn{Oj-d(p(OKY>= zGKSw&BDQf6!8`udia&Pt^ZYI%!8~;{{S|4W1FgvJD>ajEJ$a9Cau$0G*q=*}8A0nl114f5wsYis5I*;gDt8EajG$Dzy z&{H7Fr7v5Rdfu^q8>H`O=um%nEXo2$%PL`Qb9EhZ8_+X4xb>!Od;Tb;R0#CWW+-kp z)y2Ye*=oCbLH&Jly)Ry)$_uH71%%y1D1DEh3*+VGs{jK{8R86S)@ytW7?Du5s1@?sd9d#E}2hzlJ+-VVd1y7Hvp zDzeKtGOF8o*T93OVAQ=O*+1QM1S~$1nn8BZZ{lM$WD@|){EJ49Hf|64`CFGnpDSeICi=$Hjf;|~8bRmI zE3k;#FJIhSSEm{@s4(JGC#nLUx97t`g@vrAF1zU%hWG&N)`)>Gi)m;Lq%36~ug5Ve zz4;?`HUq(+f4%07fP_G%sW9^O+Q{Nqj@@*S>aze%sZ6w7WB+AwyTm~NVYpD%YIxT? zu=4ArWBXSae_~y9R&YL$5{T2aZTYblZxt1!i^}ir*Oc6aU6dL~A;njxPrPG3=Up-L zcP#*+){fdEoEWHk4}WPd4NG><=t&h9T1ordkw!JT?J^D6<(c`di!>H@>TWwGxt3!i zr_Dhd!fP*AiGsUB7tnup$cMGw6Pqr)H>l`KCy)taKFA zw@O7HkJ{k4AzB)nZoAD#fEXhgdGrR3n=Ivm!l1IHT&8j%i)X0z2?OS~xH24kd%=(M zkZvAW9X1V0Zcy-mJwyysJDCPno-UUTIlm?R@Uit8N0P8_HildziG|=Q0cSE%FdBRD zWP9!!Q|YLvehT}!PJN(0_rsYueQe(jU!!B%j1h1PggjD$p$C zN}O-DW!C!L6uzx3ml1GMw^aX3Ks`e@ikdn-HdNt1oUh;qC?==S=)Bge|*v^nG8 zrbpuGbC-IPC$Umcz#|1D77jjw!B1jn>E@Y)0jbE=lWy#(;41|}_@4`=!mq{-+C)!1 zk#VWEBGLN(%%aun4ufR)Bx+~kGE~nimgv<>(zP!1NLUL_&OebM)GhO3J9d}@@q+uC zvxxC`(i*V^g5f+EFX)M6_f7n#Bft}nhGlu26FcqpNfPJGouPhsx4nAIw;uSm%Vq{^ z;w!4Yc^@>Ha6SOH{s}@f8{<&-R|rA2zww_7>>X(zL!Ld){xIN>>d3QsDE#1i$X3jW z*<{-NolDMw%s#}{pMwz$PmpUQSU{Nf?p8fdi0gDelev$K{U?ojwJ(oHkH5aE;P;C- zKQ~tk*FPb^>CONb)`V;92kago<)uj?!lxWbq#Er4{&bh^c!0}rFPIY=_b<&J`{&PX#us zbX{tLiUpc%QBOHY4vtKIPf1{zbo>HOo~klhi{_cP0X&w6iXk)V1%gS`c3mXgyY2D} zxG159_QLMEC@a0LEGF3vbocAxfS{Gb0`OX zId*|8W{mY4{&o0vGK4iKchg@D?ceXzDyQ4_yxha@A4}0L@lbG_w9`@;e z(o#2Pk&Cc!M8wB7fJAz39swNLlf<_M{FOOBzO zUPm}s$5*v7OR2-Euh7wQ3O$2CVr=7o$ubx5w zaQ>ARY3jS?Qw4O%HO{~X1oX5_maVhO8#%O$+vDK6T|jYAeAoz4o2QJZN5kTZv^i52 z?}-_;?rl!0G5Xie#OkhYn}t=h?AImey>4u`F=8Bzyt4}_XVZ0i`m;6job3Y#J4vhKNh)U)Y}T8HCN9|=N8N` z2M}8)a~;m`yk2q5e9|&)=6i=@$+ey*r^2U_e%E?>D7*bWW%or-)b7s^)-x?YK86z} zw_j#o=SK{f_VQJ<1FRWeVT!idLG=(-- z9~~`yW#9dIht4s+5CRW8Q@Uazz7*jOx?`@wTJ3^y; z-9Ad;_dw;+F>Q}=9~#xISY_t;@2SMaIVqbiS#vmM9-Vlv!dxJINkRn+ynQggnrg*2&kwPoeqsH-y%ADj*_ z92j{G*lS+AFkvs6OnWgOO01|_VFmZ{4Z1O;WpIltV2ns>OLS+vM=j`JB6LAs{H65+ zuYC{B3q&T7fFd=zsutH0yhl=Q?NO zK!^!Vr$hemQNnup;#`dYD@xF5s&q=;37n}>EuHR5Jj(2#?grChAW&gMZZq@qNmJqG zLOX)l)Xz8`weyp`pWAMou3Z2`E$+l}s(az+Jq`J4P5j;SGm*7gnCWE7%G0BsffX<~ zWhxtI+`PtwcHy*-9l2?e%sjpxK9mirCsJg|KA z2KX0&$9+9dszccX+p_>CMZ+frJ4@Y-U@=3Pl8bx3&tl-&Ag(6aFkrA^{f~AHSsA#s zI*BMpUqxyjq|O;h064sJ8HbhmA=#m*ygoKJpF2(!+6*=qWC)R+f%B&h8)WmMQ*$5~05x|RP08g$%nS<7z|TRpr<@aQ z!F47cN_tLh(gnABpOXFd^PrR`LyY7n&-RNa^Oka=4ZI{eT|GVw4IZIpz$+gFeyoa5 zkoT$n^d7yXOiIei#knBoF5ch3m{VL^)}IUM`Q0fpuMf5}dkR6nf@v41dlZYSr9fyv( z#Rx_r)p;?V0Quc5)C%N%n!Q~hP0To9Q@r~4fbr;iNOUUI6yQ_+`A?xZWElHhzC2MN zmS#QqD1d>k&rW1Mk#{l;Q5AsiW>>3)U<6gbYqpWrBo^i1RlOG%^jf zlNr0Cn6cJhbxQOWE(bKsCTk41);UeP9aw%X8Cse~OZC=cZXy!ygzat2c8qWTO1w|s zn4{E{8LQT9Y84E!>D$I@_dTsTOMhcd7n(ZK=kHcV7Py~rtE1})3FjE-|BC8po~HOq zP@1iZ!_i<1d$F>JlOaY6GCLsYd$0-D3lcCEHUHV?`08XlyQ@q+Qv(;Eh4^FZ$Uz!s+qGm!7?fP8Ys+ZjV0m!=VLv81O*=rS@-lM!P$>T>##x?d9tFN6? zUM1iGFNIV>rtQO~Gub`Q5zys4RhGM+3yU^YE1;-ufKpJo$qpR$@lPW>aedhs;h zG6MXr0OQE1`GX%&BJ_ayB~Y8Z9J#o=9-8vza)@!)Wu`TL1|H(7=s59?;t;uYV_Pga zQKVTlq}+SmF*Ix9Yn<)JSp`fdb{5Z#g#rGT-nAH!w#Dlp%+OMe7%-cdro z=;YNC_%*LUi9uT9D(gyKvldNSwq2VqC!p1j;~Gz?W4Sau-#D`bVad5tBjaFO@l4rZ%vPI*o`cLU&iG1zK(s zJ1)|Ad*kND_jYo}tsDB94g^@%su9tY!PIWdZvGP=%^zD>T`mu#4@R&`jmN-=?Y*c0XiiDq3LkvT~~;ARUq4a(Ikr z_f7=5F}+B)$WIzFX&i&eVswZ6sX)cO1~6G--rw%cBss0bIkY(g3eo|y{!MBW))Dpq z`%$bx-9X|?2i;xv-?m`04LFKV2l!+LqUj_2H(3(1N1-F3fO0FTH*5 zh>=}1*S+cLWQ#u*?up~>(LBDVCun!jrmvBq)JSl&?1@_5(vX>z7k*KyAoNM3y2`Ou zd8#61N!G_4;E{pb`Jp&etKch5*&?ki&LU%2!aT_B#`?sO`}%xdwaA{k2W=^H718Qa z^JP{+)k{qLmay|Jj<(Z4Un$~5vjcA5?Mz^6P2JeXPDvobXesNwPE$pc=zE?^t%Mpc zRj&)0;O{#1`i0+9=sfq68#Y_=Y#S__fx2!&>wVn^M}Z$J{f`7o0?W8YFj&Ma_ghBc zFPIZh#vG3ToKr1D6r_L7v~6|6X?b5Cf{1}7A}X>va$n|jr)Q$ygF_MD+s~Xnn8alQ zr6Y-bwG?4+!q&UhLH~9*w|2qYWXtW}0UNIDpI*<=l@l_a1DxTA$d)56W<2W8VX0>u zg0H^+;V`+eotZ@MS|0-S6Uydq>}1OIsHg6+k<96kA(V3ay8t;FIFwu+T{$FGiW^y! zf#_ONhzP?vCflJ@PCD7+!q(*P*dtU1Jo;p7T~-A!Nh??3Z|xl4?(B6Zi5*m{qbFho8`sSk({Iq@l$MD%&Tr}6T0G52NKpqDe+m95ZDy^E29(B~JUt>+s4x^sr_+ICNEWIVZC6{5_--z6Z zzSbYth-zX&mm2W1v=~*3x&~^y8294B{up}l^i|g1`h>$tEzp})llcu>L8h5b8 zwl`2)2V~K{xl0}4Y;H)HaamB-f~HWhTXSt*tKSeTnYARiMt&7vfM#WRHGYf7YsRp7 zbD_D%?6S{bJYKM1d&B}4E$SI!yYG;xff&m(mjoRg(Q%g|OLSol=ZJfO_CH71epEkg z+-(vsRSr9#OSaIKtyUFF-dy&zRhOr{LTAa+^}{Kt@c^B#>xSp}t9c&V^d7h+#pCn@ zkAC3N#l6`layNQuTjsFsW_Cl#{J)NSGn^a0tABjc%=D|>@Nh#yV*FQ4n>^J#kT2Z+ z+>r-wglurfazReoMCf`#;{CjCoYiqdZX>Q-oH;aCXn)g2)o)a5v2-450rKDKC~6C$ z)VdfXS{A_QmhFaWY@vCg zWr!8m4jsWzNuCsmO0w+H@*R*%tD3hL+y|~Lk~1Nr$U3JJ;%_wH5$uY-%2Y`0Ekzh< zIS0Ftsxm2EkILn4^Idi=f60^C%rfbZH&;(REUIq(7RlrqD1+hPZprM}+xS%A%`D;h zDOF|*Q9KSc#!eL7+uxW@knnO#0CMG7wUD-{k{e<@>8>WV{e*>p!=Dyhe-t7;X(tX( zfTMrDTB)EkV9`29+KKqxsW167P)K|}SGOOzmpEdGdM(c!`)TA>7U#=az8_}-k_Yqf zw2cUoRnM1OGf@pCr7N$3b5yE)!mKXO^yysZHkr^ao4pD6ajo7Mi9+!~idx3NU3M$W*r$*Tv|ShD#r;l#u@PwZ8J z{2|bAo&kM{mL>OAm-BQ1GVz5f>k+c5-Ht|y&~f6~?C*zGXs8B@Xt5KS&)^{%)b;4m zBfb21$qwh0{`hoJOj8vk+U;cYFf@I>UUsqZl$Dj0+9%vqOf~99g7I=6S2NdooT&30 zM}|K!{1^vi@9)ld0}U%DyZNYYRWx9Qz>~WA;yqu)K}~%%v;)tlnrfW_@I;3DnqOyo z%nCKL-n8BanoTRb`b9rdVu#BovZS_rufyFJLT>Q9HP6(5DTS3!-7=~LDnc^u0mBZQ zd97;oQ2d}^KzRL4nVVQ7;}Xd{+K^Ldm|rbLBv$EKK{rXvX4i9+7sN-DZs9)%^|8!2 z1N&X5a^MS0(=uuOg5ZW#zU1ETS!ZnK zYKnH#FCI(py`WlOAkk<@nsS??2%p4WEkPM-ohB55Ak=n&FZDKNlY(%I$_-1#LFsf<%>yz8s)a)wb)ZZHMAvOnlTtK>pba ztnH-Ub&cx{UCz3zPaC1&V9@IsrotmpJ&nCS^zG#0rXTz$`J+#dq*Up7(}?TbKT!7+ z637_n#2=xk56r6;jgm-C1={Cax9+_5>MJReO9})#B&67O5ED0Nt0Z-qh4Kdq2Yfwx z&iU3X0Wx{Ll~z5eR%_n~C2dUP@@j-b?8*v%^2%f*!b(r`ITnzk;1g*58PTlt1|hbf z(>E*Q+?yGMog%c@CeN;fR@rPR@Z`YM(;9dMS;s(YBke;ZIN3 zj+R6W=#9!j;GCWi>iENX(~{;YqG$8@lc9TS8IP>a7m(22#M9;KHV=5$OduMb8$WKY zr(NwR^S!hck*}6K`(gLnhheJY3!SLK4wh)jtJ2m7+q_ByI6Gf*?d!SD`jM6{zM*`F z^2;g(G3S1h{smVst{p$aoE@fYPr2j*Ubai37rp5%ZfLJMynde}HM3}8SmjfRuQ1fm zXFrZ&7vpaia+@rU_kDqd_^_ETePkBu4`F4C2_%zzl0V+acuh&~zSX2vtixTT-CW0O z&@dt{Uwh*hA}5fan&si)F>0r&SA3qM642C!l-mS^Pl1*g25nVV5CYoo1dg!!-jQ zdzWJUg0_!r(oem&=3Pv|BwClj7H8JGK)bGL`Z5*q1cC$f#vSCg+Nu2I)f}nPB)z;t z-yH*`!e?RY&aQ8ik|RZ@&+?1N&+@nWN!bXTZ%Git?`_QrjTgZipC7kP7VF~oN+{!5 zr@k&HICH*sLW5Y7b;1P7Oo}3w`(usE8;Qxw-r%9F6k$illQMsEps#D$N_n30-9CD| zwe(~zY4Pgn5NW>uU>wTi+E|+eXt_Oop!a{V_nuKrZfo19h@EDmOR)eVUAk07L^_Iq zbdV;!Nhcs8paO!RgbvcB_fAlHuhL8C9RdUh5R#n9TFduc``dBtG0uecT=2{WwiwG8@xyXdr?wA+mG<^y5;Y5s2kNE_9LzwX$`*(*Zq26(k|W z8V{utm3?KBWolSz;x{&^-8sI-om@QJw)Nm#J+wTHJF#4`Q!hTvln8puPSUZ(RlX`*NbXMz#0JxoQ(0~F z)&#ty(31PC!@S>@wbFVA@d?pSy1GS>0x2h3J9h;B=BSjN#nUo;U>l6MCv0 zG;HGJ$bCn&V?H=O4+rhz$k@v@$I!D zW$j%e3eWjd*m08GA_&m-Rav6#={mL(yQt?@@rjL^rW#=_k?!oN*6_zED0A^R0Wd<4H(RM7&Xa5SrY8L zUUM5Y5%<;_Nc(!;lnMT1#jKJI6mzRm5wl{n5R&cQm-`g2Tn7qGZ1YdH$YKg)LTxf0 z-~(0|`T%@f7`Rbt0pJRcuEPG=IMLPYhVu`PdgAti!JTkf5I@kWZ;bl%r-MQdTE57?!%-Pl}ArhCkgVMWLf0k&wpf&~_Dqq<{QsgOJ zX^(g%Plg~w5_azI)!v5aBrJ}V#lHmYRp-1Jyl#Ndha?iQ+4O;&(Erh{#L#OZL2_hm zETxKWmHIfh2LUZMt0w>}IF!9Sax_r}*FW`>(Wu2kn zL)tIwXeN;@PB7KLJsN8`3QW*qFUtCf*+||l5jx_M5K%kJo(Lk3Dx3#nPg_|CuA)ij z>i%Rgx=z2})t0#n4&L=9jiKM^yX{#p50b zOng^=#l~0D!qaRUXs=IMHKo@53-rrKQ;BX*E3G_)8y&?#78m7mrW)Tx*SVHUmLoU_ zNNbj>>6wge3mQ_s!xA@qY>|N&InCPIw^9~bEsf4A_ z+fN=$GKeVc6ve50dwKV8EvbH=f>Wmf0Dz7mMmiqrB}z3Ok#gzVwWrITRSRSFIPKbM2V!Csa1&x6j7?l4qSus7U1Ov2miU~cw2K;Y%Q z!%8-Z*(OXpuFa!|tl1xUc0uE<8r<~vv2>5y^5dMn6&2OfbYa}oepm6CmX^z)7)F_B z-xg1|^s*zWX+e{LNriDCo7HFFlYX*e6{z8%j1@gfPvIp9t3KG4+t6_f`~2i$!pANd za)3^yc?L=)=B-VXXZ$F1mOh8SJbmImJdQUE?Yc*^eE=2&+=JC?`s z>-uDkHmH#iI@j_^7nHobCV)si#mP1!fT?!>>ZGUv&5D~Da&DMl3B} zoqJAn^E+nSH|oJ-?*?(}HsbKlpCvR|T&^c{Gp|16w40q;rlZ_X{Mu6^KE?4g<=rmy z>0*C+H|^c#4)o7FWr=T^MWtWPr$cPfPFX=z!sCuqowwd|RWfaHQ;! zgkn=od#~SDwRqm+C~VvLXu#0E+v%kaN$7HQ^7ONvsz(7?KWpt+qX!~y%8b|v>vmni zcf0uTbfxn;C#tkYhTd(z*ZAzggPkzW7ISIqM6Oi++DHlIG=;bqPGc4FX1v}{ypWOy zS_i8yvzIo*u&LnY?I`PkGvxbsSl5wf+2o)O-(xora49|-;~d%PPkW8Dane}Q)xHH! zcugyL8qeidrHY5!nDg#Go~3=0ELPZyPHVY#4bMp*FYVloiyYl% z=XXw596Y-YvRkazFLK~QQ{Iw8@F2|oh*VfvkH0}($;$-i-|@o}WBx5iCVA)l#U**B zm!!Yp^85;YB;_uEak7es_$=bzQ1*;p0B<(+GG72bNaEkeF#*a;Y6>&t_$U3E1kb-h zbIv-vCyC3`4cSFOD$2|>);mczD?X$;B1ScWiPIVgo?X$lv>wje7u`C$J`b}2< zAJ%rf@|3U>KCowR7P$Y@xD+t%^UoA6JXYZE2me$SP<-y(ccl7HM1ng3#|MaMq8L-+aD;k^_0>Ci0Hq=c;HN<?9nc=lb@x6=p_8_@D@=d4c*S3f z7k>Zj-tAwacoA|EFea}r^Xjp)V8w3);D+fSCvMq9<$JxD`>E-$zr>A}+=aRRGmUy2-D6$&81ZZ1_vW6Zda`hp& zhXPKVG{2KrOVtXfkxrylrP#-&`q7glq#yCT>p)yT-ARn{JS0{R6b}#fw-T=zj4Vo9 zI(hn7*rw`0^)<~v##uj=4hn3$PftHZKM*mWb*_wV0V*qu!?``s>`Jk3|_bokR= zcyA~n$#HwxE>4TD1zKj%dainA3i>dStDL9={4mK6M8{0kzZj)yuJBjy@GtY+!=ht4 zWxdlQdFBL29>}O({^RE1+pR=`+GNx0B|uaJEhbc>wc_MteybZKjnBpM-%K_6U!E(q z6M;CI^YT&=lj}d2rQ_XT3_8{4*IVI``J40jpLK60obVB!yzn1}z5NQ*6L+d=+!Kd9o1{y-JiZKm0`{f9{}F3pPldhTzAN>Tx{LsTEr#Cr3eM@lA!p#JAK z|7NJ+ZJ^_bZE4bj{=-PHVT8X~(64X)cf7~7k`Vtxi@ua#-UL9SFsZsB4F8z6)$+%@iC=UITV;ES^VTww~)OZu^8)#mwOeeX74R_7YvGTCvC zbF7SRL=g(^GKQ03lKPQwNhJn?QxEjN)6KPOXQa%v=rZmoNAo`XAII=toyEGxQ=1G` zCh)G#L24P*d@fz8iJMB~V8U}E1a!ImU2|;DkIuaDG;}#DY>3}DojXgQnBQZTueI|h zVg5lP7tO}qAz*mSnG+qi&3hGK*`^PkU-+F~{_b}Pd>h#BK#bL1-8I%%?o2(D_fT(m z@1O8Fccn)^C|20%=_@61|J9)UnD%-6x?A$`g{7kB5az6I)8y+}A%J8KBN1i=RM}HL zKnAkgTjF_bZ|RT@{r#Q)`-0y3y~ZdEdOf10V1ayOkxfKgn;s?syYN_H3dm2EY=k_Q zd^YRYZx8$F+ z@z1VwcuhG>5-wau=@(~=Rhb=B{;;gC*{>YV(IQ(jcvG5lr-v_iylpify zy*NVYL9Ze62<|pjuv&2*@bMoB*kWZ(J7SrWC+S%AA^iG#8xtNjaekY61qCv5jnMe; zHZ|MN&g+OT;4Xp*Z6%1QmW^N9$ru2g9F^O;;%b?yLJUMY&6v@{62P-g6QOhQX zAHk!;nU%w+c}<@WRsM}}e$0Z|IrLQnM7rlN&4RkoVuK$V4(6ru^IxEPM1y>-PAp2X zp-?^d`%2d&4A*%LXC78e`)~IZn-|?e-IqG`^r^wHwiIYuC0-vbyRj1t*eCUGU=?Hdw_3B=d9g*tL?I4KDpC9{5;ZFth9I+YwE@fi?SnTFdx z-7L6kSa)h|FB-M`un4Hz0&B3&6fiS%(okyVB zeCq7Y5aHFzd{8L9`%wJ~yBIvlM_i=E3{`SW&VEA5AkIEf=@jAJja~h@yDlQR9X(CK zdQRsYC@K>oJb^tXQ%B*u{spfo2<9b7vDcFy!DcJ$=%_axd2c+QZBi85&3?!IiUsk8 zh!Ye<=y>Ssxe@*x5K)UO(!X32S@76LOwx0<11j`ptVN)*li+BnDNJDP075qi_j`f&`%G81So8!=fl-goS!8D%;zy6r=*m) zdBX=Xcm3`WWAGx~UQ1G)+d;rJKGKtTGGhie%j%}qRB=~Zm*gSH43U{gRMROPM?fY` zbq|hynjv{zv(oB)N1UJx-)twR$KDZVct@-*s>Iq%!*9Y8z~S4I{N#;$ll&Y8)R-O) zg=-(vP`*7$j_|}`%AS^3IJG87cso>`0~DgrG82I3F-9RgQj~Hk!@#v^;(EqIM*dSD z{-WJlT>D`38|UL3QAdJ=GAJZYyF-jxo8*9?VXVhjq&@4sTC#~;S_scOx~Ole>Ba!9 zAD>)Ojd6C{I)KJi9^?H9Y8E(X0!iWK(E0X=CfcPjP{YA$qEKxQ0p~i{p6B-|&SKKY z)!EC9A0j-v2Z$Ij!`sJTv5KXnz`0|7Rxs^QI}C$PdHIPg_2K(0QEHcXC9L)V1_d`+ z?1CG2$BTvWnvu48OWy}1(RaojkvxeNrcKNGpul*J^yE2_y6Z09Z(f!zx z;YBK1kA@X(05jT&1#rEmv#%P$w_NShg4E&a$v7*^5z|_6aMr2r+36~2nn;s^%}=TrI8)vBiV$F8?4&7I^R7cJw#kL6I;7< zILxqOHvuN-j)tD*cqM#`fz>7urtdwv6TVA-=V?m54rG`Ay_fi!xVIp|T*>O7ToW;M z>a%Z_mq}DC_i|zBCBuj(Re69?0m^Dju9lXK8WP_eKwC-16Ls~W37iE4U{RY-lH zWV6wQ;$jm=htd)RTl2^4?s_|0K{lZtvPn98Q9Z_=w8v4*PyEN`A zPSS)-;N>XwFW!M?pv8u#?}HYpIV>0N+V`26=Kktj_*bQ3&Rq%hwU)6@ptS{c%Kvs6d^S7(!vAN>_ zYqQgJv3tDS-gjqdgnufMN$zu~jEFdWPl7Iw`_4IPL9f`7(LCIVMc$126_0{vbE}M7 z!|qNOweE4oYoIZ)Vjcnq?!9--#ypIgTR>+|%N#OqkGCDM{89^DnDG%CE^s?1)d%Q3 z>YGQ$HuV6xI2IE-G^Sn3=cuuF13hrb5U;22^O>AVD5Li1n2sqz_aYt<@)sYIz;Q>q zJx(ck_2*iVrNX0-Do{v79dvOGu4>cr!VF9nn(p3P?8be?&NLDo!xGMrY@45@=~>#8 z^l-yrP$v(aHOAe{+Fvs!*httPy(Nzw%{Ppzs_-;?>Ni$W+kIu+L9hoiNslcBB}ru1 zvRZ1^A8-g}P*NTN__uLkDJXFAB2Oz%9?G?qZ7I2NxEdzmnUQ{_=xHrX@M323;2MjI zX7#=$D)S|Gu~tcW_QUQo>R}w46DYVgfu_q|*2HnmmAOwJ$y(k$sZaF?`Tjblw7>iP z8JjX&xD8^V+Jbp3eB$0|u;`m4_sXhg)x!rW`vLxK-@Ww)Z<*=p@w(xKCV@`k&STF! z_@iDs_a?gJgAa!crb4e1b}m+1_b?`uMv0ny*xA5DbK+&Xf|R1)F9+#!1$)#(8*3lgQMr3pWE)~Tfr2}%?3hruw1u9XL)9a0(8 z@3dY;HZpnnz$zEKxUJFy)~Yo|C*VppmEQ{Ic%*v0Q0rxP*VeBOE=m!)B|rLDd}#YK zqhNfZGWccMSa*z|c2u^rKYVk#J_WF);*qYqB~S%@xTaI|{h?$JF5aJJJR<-&y|W5$ zCU&bBafjFL?x`GOkBbn?prQ-t>FhknU$ogPLoX<$r)TcpJaLQHR3TVUWn=C01){@b z-w2m+!$Ph0)=%{TCLNJawmaLt41QV@D_Y9cs#g1%$ z_a&3%;XPu->jn~>da7jbEvhZcsCnvU*J+V9z+5$({ZT5TN(ZX^RgRgz%Ap&TxCII+ zSbw+s5>1!l9lbt~=tXEgRvM?HHj|lXCj65)W|`E&JQ(+Our}KRftvvr4httWc}tbl4ni3LXO;ReqJMIcf4X-X!i%jE6&17r&YJRdE<^W z76#9Xs>V~F)IZu7cs(yCs4fs5>&1GHA>qu636q~0z07v*sm86b1JO709a80#UOyaz z)V%xzC}ljBP$yFx0i4w_Sf`@O=s@&zyg+zosQ`g!)zTs}V5XR6Xuxk3&v(W`Ad_om zf;_0a$daRL3fNiVdB@Zjaj8ngzCEsJl4Vr0H7qqnHvPWHlV7y}!qXNQme2O{4W~;*e5B!j z^eWPzYUwDk4DOb-Sk#y9qJKp)$5`;ws=PiITPF&Nanq_b6!Q_63Bd1SK7I7v3l@8~ zMp}c5c!F9NC8C(C9)P$eW|93lPepubi$64t;0)bUB&hV$=0k`UT|W+dh$uNWfAU)m ztDGUZc7tS9Fbwc;6+W}zdyp7Xm3$?6!-qIH+w~EP1lL(>!>M~rOvvp;}Q+6<0y~ajf zZb26?V8zgurhUK}%dRA6s~+);PJLEtW#?TL4jPeYH`g9*S37?}Mzeqt83Hv>EdZKX zvFbnFIV`%b?>{@_1;#6?gZe+p9_n4Xl7#9Tj0qFrXS^oneFEs6^R(LeGlyQS@D(Ws ztQ5BgX~GV8_`{WbTQ`pDj#VIF&PoF)=6w^WZ~}S(>9Of^HjXi&Mz=^f!ENuf2fv~o z17z*Ia;UQM130q8Hg`R?+iB>_-l+r7QrqHSbeKTGgBCB67i9V+wT^GMF3D|CZguwyr&qW;topgFAbjTqd0c~rELsg`%W$_3$!mDAO2Gq>ehgV)0K zcB(13=%FLV8sG;G=)KY-i!sC$gVXE%)_+bh-@} zY|}Hzvvz%)6WDp>VqMcKNAL2MbaYJGbhrS^-ZA>+qmVAb3~53(wXA31Raz4!g@c(w z)=!P@i#fHZdX!6xEj;B_h6f9(hcUHBxGMdSKzA2yT~ptI{rsk@O~Jl%dY3Oe&!WTv zgaZ`HA&N8}ML1DfP1z?tfr<;UXObrfByfZPPe*k-C|GR9Ts;rQ<=a{GqomMsh|^Nh z*Kg1BiyKmp-ueE-D<1KX{WlprxdNOgDexx|PWbRqf{X2iGRAw#lD;nvQR4+hcgKj) zf-f3Gl=3B|U(x4pF?$D<be@55PI(iCaqa)~I}+1E#%>;|1~{}8dYWdiM* zzl_;f8oqIdO^y3IS>`xbI4nAhjuG8Mj7cxoIKCShThr*gC;vW7*wNJTr4n7HBZy13 z530Mx?#rU75Fe>I+{M<%Bn&ikH-~`Irzpvv02CzV{&dByYh_-(pXo~bH>%Y;N7WZ1 z#j-PDAb9JZw5kz!Iec4$mC!#vi$vuBM7DN%yh79p(FUmG)LQ?Q+KV#Dn6Lif zV2-YaX?GNzElQDw$I7f)=z8&W(0^Cy`*YtWC(EJmKy3FUi|;9>A{*|q3l%b*KMPftCR%zXWNySqZD z2xa5w<2O--ju(v)aU6|xTjUh)_Vnm7A1~Ls83g|!k|YgPo2BS_`m;1^tzB;wQ*?p{WW2|uFJ%MuVDF)Gphmmi;A}of1KH^ez3goaQEoZ zgx>nOY*z9; zgtp4k~dnmxQ+7hS%Q_Q$P5zMenk9UgCNBb+wC2mXDRIC^nozCQuz5h)C~cw%l^j zYhL@;r}J8^$^*hon7PWQV)GaEs^(1z=ZD6uke!a{_Rc+CXheek=PX)ZJJf`?^8TW# zRQ(Y{`X<^Ozo2%8yKXOBq-jcNhRj~%sHMuh=%{p zk?28<$jZ{q3wvEh2i+*ythcnbm7EIug_~$v)QO||sB}{&ig|;Q0SYW>Q>vjpWPSck z8qbvL$~P8@IpOR7D^dR=V~MV(KOSIs)JVN*A*k*g(&r|rXVjk~SF`xJCCju+B#CL_ z+_%lAtC8It8T)NndH(yVnjZ*2twvUo4LVSGaQeVlv%8$nAQ})L$Uz0bq@Cg^q%u_Z zaw*&^;7xU-@cZ_l1um9DwaeooJNZ&~)>q7`G`xj-kEoxS2MEGFRW~-n5H+~S|YK6GI&!4R}K zxj!kSaD2GZc{So_8n-;ZP3!JY4I#nM<`ZvtgR%ysX0dMS6;Y5cy7$f;n zrG1kQq%iFHp7r2f6-#H{QjtcU-t@;YoYs@x8;jQEKm4Z;S8Xx|hm9oXB9lY1_V}Gg zzv-tIO=nKDqR7$f&%795xw;w`AcxsS8M^Z;WI_g#C1bmYTJ4I;E$SVRs@8!8RdU!( zA5i*DTB5p0?sg?stz_Px&aWpWX};#6P>%UH=1B1Uxoem3qRD-jtKQS6mpLmp!`9H( z2xm0(+@yteKZn;V3c@3awj1HAjj{oB&c=IaO5Y%;o`b#6R?2Mnyhq-ZIqhpI*8if? z{@NXTpKxVy{$iUNor)*)aaQR}UnnmwvDw}*V?B5D0s*PJNhU#m=9;U)JA#Z4Ph+K> z_orpvdW#>t8g`vkY*W*8>@2B+pu|HvmV`6w#9!F4?s+fLd6NK^BCLZ`>&F$h2a%oB@!yMK21BxXwD5Xxr8}xxQ+-qUkk)a7uoZT5GG= z7L*4;?@dqEV#Km*y$))ajizve+BQUD?hzZ8B6V*GKlToJp@-$)Uj59+S>rrgZ9g~k zzWr?awA zZcwqbr`j!WJiA`NdX(K+zrgT_JeJ$=!8THQgWi2-siK8Kp6~S?n(kDu{B>3Uc{|C@5{d9{oc1TcLkSucQ|@53z1MisaKvptwsBV(?v@GB?>t# z=CR~=FhgvGIuvyS^YLal-7~gDVxfv#@oqQ{_=1~bM=jbcUtRwQ1CxgsFW=`}` z7uQNHwZzX_2YkD+^6|T^w}*-=@5s~JN)Ps%eWk)&y2zDpWLh;eV!cWW(RX6FdNkA< zgb7Xt-m=5`EmEwJl~4QAz8$kgx8K-(qR%Gy85jCgG;dYoypRQcZBViyx-wvV?&U|fHrt7V4w}H

2;zdJjBX@<7| zT&s9{#aEK>%CLbiNqs$;*y#6-(e=xJ^NarWWuHnWgXONuUC|)_-~6iMIV4L?ojS~H zOu77jdRYJZB*q?%viN}Be_z%Aev%OZ)>jhw44=F9>tQNoV78S(BsUE{Df=ZF1D7u@ zWl@C7;jG4hbOkOfE zPfOv>CH#B+$L3{dgV{h%iHhW(_R`|~xgE7YS*Cy50H@>GY{bZt@%bmeey<7#fe*}r z=i9?yAK=>n8!+2I&Y~e7z902#FEg3I@p3SK`KJx|c|6~7dR3B-|mm`_hxxLxcsuVQ^6!>>A|M|&fk&zv`^%qW0Zv~$^{4jT) znwj8aBTG-Eb-;LHI)wl0{u$c_ykEZk)MqEo>hNRfW7}k{dWv5%|NNCtDP5I6b7ypb zu)Z>pDoe2L4rwud!Sj*6{0{MAR%B$~MgW$agOE+kgGm^oKs&LV3$IrCcwI2(jr;@h z(!JHPjO^|4%G^&gpM6pRSYR+KL{K{cAvhT$sSok4+Xy+~qecGCo2L(^S~gB4j+@iv zMO*BemkZ&O?p-y;uDL*jfzbzqF>Rd{%ry^K%By6zj%R%Ce)5Vq1AUs3PPwO9;QXkc zGu^!ab?XQ+#h4OeCMG6%M4dY`9^rj0DR%YUbdH;&S%4;Q+S_s#eVtt1G^)APYEXX2 zwV50apmB|y4(Bg2s?473X9*#?QcFd>(2YrJLJwZYx5!g8g_7gpT%1EADyG}1-CBTy zuqg{ni^;8vgkHSiB}r!8(e%eRzUk^*$!t}`c%QKn@coT(YH^xkj9)~~sQ;j~2w(XL z{llxL0U;y-?=k;%avvTiKDiV;paRy5@5lZr*86&riQO7L_+|W$_fzuHg>yUP(^9;~ zcu(`Mho6MN2>)?0DnP@WV~D=n|${f%FQcFS=*LhL%|6Co7~gLX~UI*}e(jrrZ^Zl^siwmA&97E-hbepfpwwg@%lTSlem)N$? zzT7``?kB|Oh*5uWqvipcymA0@pBBGb{_xmfC8fZyE^@l!l3(0JdHddX=^esgwrV)C&!tFyQO+q{19&<}_Mo>XeB7BY;$9eI$88^shj%-~ z?XFE%(&1CPD=Ctml(g{HCYK@-L-hnn{kS6d);8*O@ z*F$09itt{F#zacT(36nB^Xb&!EWI8Z>}4-k+{qbb$4RMu!LO+$NzSJrjQd@R`F~o+ ztvyQ2Qxd1}y&;(>ey$@%P|$@I*w}2cvRQikl6Lh^=f(O7u{+OM)}FFRyJ)aUQvs*s zDA-%nSXo&QzGk-J*JFwPr}|t9mqkWtR!WteZ}w=gFa+Mmzr%mZV}=$mITj45_}L3L-_54KAAw@XYM@#qW0TS ziAuoS4zxDEQ2Fg+z;`n#KnyV|!}{CD0Aaqz3hcsvH|f8d^m|G2@#nBRAcyi>y|x&y zk<{l8efAFl;B)7bpbV^D`D(66y>%J9a*b2#t%k+Zw?+bUXKr(Y(ktBNebAA(GMj#Z z?24x75jX6Q=9}WAyk-jB-52OYaxbcDrpnycvyNAY5#aDVTz!`NFBJydXQH#biop}5_dG|(y_wc_9K!fIo#Wpc_BKxdi%~-P|xg( zK*$hwDw(X#ECE83rVtF(Qd0VyFW}R58Cp|qY+T)H<C(l!&TLr?*9>jYVfV_=g~*+$;Ihec0Y`g7%jSD-T_cNw z-z4Fnax`t1I6B+PGtkivhapp^S<{Kd7JnN%DLC$GO?B7!@fTmK-a_eNI}AzPCmDLW zL1l3^`8?R61C`Kb+vce3@sHXfb4&?wpM;9~&Oh$`?H)-mP_>S3BF%i><=36;$ctWvNZymdY zB^o4OI`xBGQhnZ;HR!2pl-PprUjyjqKZ+h@T!@j_9uBnwFrl}3VJGxQiZ?1(-%7-K zsW0`L#T80-;nVc{1U^+*cQaRHPMJf=LPz`+PBb#1Zr&TVeO8OVyx-5rS{WE&|6$S6tWnlv>-xk_opJzY8N zaC>96>cgEQi?L74L%9T8*S4A+?pFauwZSa>gT_CZK|YJ2~r2SRN6{nykY z1EVnwy#wWPaEo9rXxL2WOzbkb1>Kw>!kzCHL%%3r9~`R}7_*9F3n8eIA`_WA>kF}0 zpPZGIWI-dK!B%nZo<2Ur9rdG{NDPG>H}ip zg>+E&Gtt0*);t||y?XOVe)v!-5<}!rGE&9ppEY?0k7%*_LL8c5`Bv_z8W*C7+rjg- z5-X^$POAbYS^k`HRiE3#d$~^yY87b=zv}O7n^^NXOCQUA-s6!Cvo`y+$ZW$JM>tlq z2ED zXyE0(+F4dtZl4eUZhpGDyFMEAYS&5n6xFw<* z+0}WNu`&hc!ilh9mzB#%(i5B?IZt}zOoFVn4Y9v>j0bDZJtp*ViQ#A2?4F~nR5TA*5Frw#Gif!TIwOad!t9kVR zP8D8j5g9d~SKKEzV(-p9-Og3#0_S|JT<_4%K?x?h0s|m$cCIrIz-`Cp7#@DleVJ*{ zXy^Qd2Dsip>7{F-v(<)psuF!unS09ZXYHD(%>vRBAonOe0|^TE#Q-eOdL!BvcHr*3 z4nc8{1erv^8Dd>$zG)_Man4dhVW7~w$%sV-()%d+)kxRP5TUy|9usw}yJX@!2A5OB z)9M7aJR;I^$|frrTL2&iplR01XO*TA-Z&3V%i&E!&6?9h6jwVI#*9M5`b|d`!Bsu< zNGb2qi^O$ssagL{`O&kA#i{Q;-dz#XLA*Y*5RC{d8jdPV!d9(?(WRf=LhZ81p0I1F zCo;QCN-I#^muNQH#DecEuM@-E?YH7P}7s35L!ybAb+p6$Qx9F(cYKEaph%MH!OLN}TbiHOsZ+fS{JJU{m zF6o{r<*WQ>r?pBf$knx-N;ZF-Wthw5ebKPF>5VJ1-zc{auVZvJA$$+9DqLnZJleOf zxYKW@kj33Y*{<(B8>|yYA~?=SOl4ay5nI@%5lU2ZXoijarGJ zMx1r=lVNL=SSImK$;(0Zs&NMnX!}Pb*$WjhMJIc8`+EZTQgJIEzRzpuP&^8u!T8ow z3oU&xZp^R68r1kd{Xq{QnU@`^N5}G-DG5zC7_kv6VZd$co$A@)g^c8omUY#=fw@-l zK>+we*L9;tZbp3-ysiP8{RkmiX>_({20CWiZxN~Ni1xKE8K|r2x^Tf_$v7Q`Y1`dd zbL_r4SF<_eczSUw1)(GoV0`!4ec)xeAKw(W3pdHxA2dmkVH7Jcy%fy_Q%On@nUbF3 z<`zH8cM!sZ%c_-#i7Dee7*%!-`^2mIoax@{$_rli@`h9EEXbIO_WFw3bE_8L_>9?B zK{(mEm_0GO8vQeGx?v)FM2Kn(e7w6?hCy2;!h3rGb-y99%>C}HfZ{U3R1{JEAf2yj zeDMOEyxES|rw*H(+l^vpy2ZTQ)-4Ny66||fBQgH#O32>MkyU9PhY*fD(&JWZ4c$&ODwqYbl%Ks>t(Z&86(LKAu zpQprSBQiR0HA^GOX3jY^dq>jFN1#!uA7&5EQoY^g9X4efwPdTFwe35+FnyiOg#5)O zXWE1dPBVfMiqIM76VS93iTg5KvMOijyDQjIdFasBuvVTq?*_mq2icQvtLH6>I_9Q; zifrXrr$&AhUC970ZphKhg^;r07wzmgU~R~7Sd{#T8*#jz)4+Fb&;3muJvCm^qDQ27 zY%HIh4>ZihDuBW3VTm{FAnjG56k$41s;U@G+cerAk2#4WoW1^vj=XO`E@?J@V;+@B30KSjVG-*t?0u+h)&6dYr(-nLloh+7P2 z9GwoJi(0+2F+^_GIG>Ki$U~bty7O9Ru_7ok4Sh$aK@*(^JR1{~P~sr;0%mdE+stIk zV9^Uh)P+<|ngGqWC{!or2lXtQP$GOY!UtSE_go*Wc-0sORWwfCVCJa*U^fM*ZPJPMNcBc<_FD_lm@ zMdiz2uMN*+Ke{XReYI-i-o9lb_!j|MBCoZ;*8{z z_WJptx2>O2+#S6(#1;z!hu}7C`26{PYSAjhVNs7aG_DZ3Q?MO~^Wvdg0v)lSZ-7AG zRZ==hO&5Nm6=`CMY!pKPg@<$)%3Kb@Vq93y?KEQb<(kG^(!HiT%k;R)u9{`*YgQf= zvPo6`Txz1V4O?p+64J46oY&l?D&Gf{N*z67Io=x$e)VO#?0WRRxjsLHvWqH^t}~M7 zX3(_*O;_Ls6?z$M?v)!UcXajpsw#_%?PxjXY&J{ihCyUrsBoUa%_xV&i`Bip^}VK@ zlL$JaF!lxGx%u^MVYaRdD~VOJ*nu->u4~NDG3GcA2x2dXUEr9a$^l_2q5ThQdNb@3 zh4NdRw>5XD-r@fTuBXaQl8hTAj7K zDz^JUl<1f)vQe(;<-D=)M;YG^5yu@Boi-`3S#vQ@2fiOE&$+OKc(j*n?_#_+r-TS) z^)j$PZ>o>NS5uWm#f& z;BfJ|&=6`#V7HX}lg7=)^SBfb2!8P8WpQ-BBzH_#%cAAD7b>>4YY z=bvS7-W}Am{lvM|M4K1j*e#Nks2F=aHf=xPY;%IELgYOsi|F}qyxA*$&itL^OZ?)- zbeA)97&mfU2KasJ-Zv-coVTVj?x0)#de^fyxD+>qdI?aZ|gQ8t4_nyM%26nY@QvuZ~$E_7cr?* zE^2)g&j9&Zr|G8V9ICOOfIAWy=|*hoD0z~a#n~&p<29Y#PO?83|ItbhK}~unbM7Cu zLr|pNwK#^apb3PVn9wbJ_eYYQ=Rl4yC|S=!)&PVi7ml6L9|N;ii9n^XX4hiRoIcQ3 zjCyLP7Z<+VFONF83axJGCfljmUTFs&KUS?rk(kD+xHsg&cEo~gZ5-N9Lf6^c`F**@ zRHzI+=W{PUij6`lk23<+gM6!k^a@eiY8#%ibVT=@3ZI{()PH4v`S~b^r3iriwDJaVwS(RL)c^+f z+alg#Q6tXdTg}Fc%Rd_46QK}+i)7;a2FC_k4ApnwA+Z>XtTTy zy{SXmT#keTLg4@g=Q*;UFR2<&LP%zXhuW)-23`|Pr(98L?c4|cUE3QEQMM6}RRl`x zYP0|ciMrFqxNrmGW3QFawE_(_Egsw2+61iY*hMo$N>TdEA_!~0B_1e2(DSCdSB*Iv z!%5p+h-0iOWnD2PLq%D!E|bC|_M%HQ&WN5G^qtdg1Hw_CDCdH9MVxf54_V35WjR_q zQ79%_k2z1f1YDy4`6!j#>0^&m0PY$1hKb9pJ3bCT+t`Y2Chir#d~)w5H_ifNaCD!$ z&D%^Ib)HhhoDJ89? zv6J*t|I*w{@8S7np5&WKV%~MJb@x|!_HK1lbAjgHU;Bn(>f+fcJjR(@W- zX05e=PhtB!@NpW_m2eyKHaO1c5)s`I8Z+?Stubd!jRZv4aAsVQCc;QPD9TfJ2yLcmCe>?=#4`}@7@9)S-hs6sOz+! zS$${O_~)_3Y|#ApvS#Omp^6|Tig&y_U|U-3di^Np4l5TbEyL!;TuAJ>V3Fz@ zu+4+USq(i1)3q;a^O2m=)X289Lf(5u2S*3U`s5Ij*33!gRl4h(XiZJ`wE~%XYP0xw zHZKrjww*ql*^ZF8X8*jDa&AKcttqroeuZt7vpobwV>a8AXgYq=5roe3dzXSd)E2l( zXmLV}X$-c=**m`B1zohZFnW5ASJUEek71(2;srxF{i>qyC}{Sh*mYd-4+cSt&66iq z{Jw)uv2-&EiG|DAytq&zrG}%Ly7D`X{v#>>W|OEVb^#q9eTQoI@HbNa5%%WS(5rkIQ3U+Uhh0eO$LP={? zrn}6q?{+ZHZ9nsN%>lIKQuG@7*wHB-6tyMd*`>K?GB5Eezng2XHB-k0Gf*)vT@*CO z*!42#43Blme8gAe2O=~0?plSGjb=|d-%AsTn4_2dI#v7LkU>qstRawX)KSS(PVghZ z2#sxh6tV4INjux?w5gWbe%`il#MJx~UKDQTMWF>$^Rp*3bzbNV27%Ef8MQ(4mwu)@ z{QnPo?->=BukVg=O8)f z*fbfMD5;w!-pSr;t?%34!hP=janBg{-0csC1DWBiS#MRnRnJpZ!e$u0jcI2Jzv*v%p|AE`U*8|u?6;gm+e{%v%vV4f*QOJgFH8SGvCohgeD9Y@HpBU z79k*-#)sK16*hryg`vs2@bGmc};M@zlMrg!bEdfLUWSlAf z)_9)I-cLX=1N0m?iJiBq9S+9O1+&FV#W$rleoiys{&;_JPD4|LbfL=>-hTyJj1ZM0 zoR!L=9B&kCefOHnn{%e?yu{PG~3jYg-r{!Jl3j>?sy5tJ-RdFwMh;s?olI}9Q|M_8>nin z%(&&};RVxkfurX!bHG{&L9klK%xTCm9qDp!B5gsJiRx{N$%=LC9AWo^1!8_ z`MS`Hh8LRoMo0$3+rpYN;Bo|9#%LPJgMTz1QeNzfQaXR5FzR7`ZN6v;}I}k zi-Ei1=T}ltTI-$Ir5>XMkxkS3Qw#}USX4Z9nP5O5Uv|NaP_)=Xd|BTH7}o?lSzq_F zl+N#H>gL$U$?g7ki}GjKm@PBA7^y}KIj);Dpjp zLS8A+M8ABvr6Pjh`Z4=t!nGrV(n80nbqI(_87@HZw(QL%yxba8n8hjpW8Hk^nB*Cp zNJ?bfxOw`_7#n};QE=8>dE#y!PAkO0;N|eLqjtGGRJIp(a>$0&sXT5+s^z5=b$4J_ zj6Qcq>E&(9wh}%}PE3X$o<}C(7*R&Z2cp z;{8G+&3P9**fSqap_@shuZQk#WhftTs3i(=pPY`{#Uh%a9w#jOhSeUG?oI?lK+Q%x z$#WMqAsLnXQb9fm5tqpt9ddBEphhS%oSa|&S2H~YR;g?4Lh2NLe z7qwlX@gyNqTH{lpq>QsvZ*p?69uR4Bj1_L9MVzl&s+(j_k*hN8;5fYcVs1TNfUI6M)nz3*XwdPy zy&=;jCuJpt+46eH`!(26y-EV{^EZyqc6*Nkk;oO2462#FG4L zyIkcp8e5>3`EFpR`Lf&r2u=sh@2*@6Fk(1KWZB4ph)}NPBKyV=BcB^q&2;Jje-yKx z{}=GTo{N9p+1Kg3v^Ynui(+mS$)G1Z1&|poOLCIl=kO&{dsvy>sz>W*_mY;x+gzxK>y9^=K!da&@^cKQXIe^Vhqv-9`=zTIrZPHUQi$St^vmz zh+pj>;9fGfhyWP#PP4%OZYAO}_r+sjspRC+E;42}Qovj;*w2LjMAcbAz$bTBbHmj} zByTZ3XZt6cpbcOXtp9j(<8mqjvFtyY2>E>&u6`{TtiX#&CPfoZx6C@R`V8O_Si9Zj zzVu7^1~C5<2RORo1z4*-wXs^@nYlRmQ||2sK9gEx|BtV<6L%+8Nf07&sY|BFRHf1T zB?mwd*Y}WHxGpku+hRd{^60`aACtNJ00_=+GFjY`E@LoWi#La!M_Q#~6qJ|hv<#Ty zzfhD1Sja*x!Ttz_ZR6}ZY4$7MLrSJiaks9}xjnTRA$XxCEFkzY{dsPLH?Ld=un{s3 zRm<*mF9&0Eiy0MmWGB=EUC-6ki}$(nK$q^M z;m5vIGYKLd?r}n!^Bj|KLH5H~M)w}N*k_=^bu z`LwQapt~&bFOzbQ=!YbB-Wf`4&T4d&_b2dQTONmRRzyID(-S~#n#|NxI$0m^R~2_% z>ZDR2*SVf)D%wnz=vdo{fK^MsJ-}ulkiLXlkyRlhI~MqIQrN77?9OAV8^`4 zFAG5b*v8lY_o1M#{0A_9i3g~;YuW`CcM;ItmVikC@5GC=9ycLRDms#-(4)f>mPf1W|1Q`6?VT)BrXTpEdmBE zr4_f1N|H-CQKF#rXz13Z_qrZMX*u@YX0^CecZq1tDon;m5?o{-{F`qT6R*V&ag)&y z_R1>{>R)5}EwGD)in?VtcX(9(LH_#F-u&0Pw7q>F!t8oC;!Rqk2uwO-O`YjV0p7uC zU28xv(&|&-^10g(BmkaYlo4pOxhdO@Iz6LOg|3I_beucRUT z2Y;}*(t1;0E~U_Y+93KjUwlShpRJ8)gEGsBsw>m^dY9Lo5TySkF0Qv`$MoLEJ&Fm- zH3Z&|l!8n9v&MaP_Eg!+1M9A5e~=~RWAmfCfZ7#db?~LG?vKm0`GJ6rWp60qvOW8U zKxOhTieUe_(SKz0>jU3^^yvT27V%|O8r8SKQ8F?HQ1^2g=QY}nshbeMgb)506Janv zIoz46KlkG4vPOJ!8wc717Q0--rTNyd-?7kCQ;A%(uYVky44`4sQ`KL3B7ym?1Ve10 zebFKgBf=&2W8Vj>E=R|UM2F9kmOFjG550QB<7 zcV|#an-6T3x-q6N`LJeZnf9W40bv`1$D`F5a5PKZa5i6B6FVF1Wq)7&8CLnCrGc!W z92*1VP&Q5g4MW%dXyYb1s3r!EU>@fEo2SkHQkBoPKCSU z#Q3zboQ5n+D$D4;9apk7W-(s(UU~ckcINbRE68wx}#RZZ4Ul;NHC>^Dwdz1 zwKV%cW}>3+EKyWQ&7vKxZxhL(U;&nV5W-OV6B#bALK7%7cI*^=Kz^wUFjm0Z7${GF z-<5I-G{DJM&BF={YlELd{4aGnvH*R#4mTD6N>J_oY#yOUNn*|(B)Ly^j1(AYkYgrY z3f~u?u}cuV6}P2D-)jsvfKrgX0d<%YsWvMP%xBmB(0yilLpNWyoCGm8&KvJmrm|Xz zW;O9I$PI{)CpwQL1&xH#pylHd%Qx&s9ogw!*UIES3J}KHAlY?Z!W@^bF^V6i_* zVcit9Gi_;o)hla~G&p z7~tZ^5aC;p$L==nLN#yhWEEsu@(UBh6%;RrudR`W245m?Uwl>|!`8-dSY*NwO*>Z! zf-xK&Oo?_Ot#k96x2iX9-IQ3>0*cIsm|&ilRI7==EL!gC5ACPN%>w#ridt!G0$ynu zNDA6bmZX3sKJk>*U!cV6Uo1oby=}Nnwx%mD>RdO;SXHkBEgwxTYzZ#_XX($=f$IYV zk2nBWX9@IJ85KTf44Z?0f%(rh2lxXVnGdftQ9#!mZJ{x#`k6s7dVn2L0pNC;&W;(L zZz=&(ojD0IRlC-JS@rxED0cDiYXh_Eg&^~{`FiEOH!jd7QDA5^(6O4k#Cn9ITPeBm z=NyAS(BqqVU-`z-z787zIylS~<#U2;*PaJBP!j${3poj=aicT@!$9a}(%tFd>9e|cAqzn^d z7gViVrxV9?NB1O(i~oR_TF-o&_Yr@sbK-$oM;ZA8M+s&x0%76@x@E)9n3&-_uZbOQI7C)RqqzJd{pu}F0R z=6>(m>ahtt;H3AcQ8x*K%ho6JiHdNRm^&jhH^nq3g{#ZcgCDjYdM|)`6zj_KV{9lV zxsa7bE}+Y@c4j#K10T4iWDRZ|E+;y45@+aPb_EPajePw20h?S2J{rW2%<_%~`d9|9v4ZsN}LXf%K&b8OmSTfuR#ngM`>&0Eq_LXs+m+2z--7E;{-pf*KZ z4h_E70>~oWfwpA2Z+2Q@i%%+|R)Fhbp>2L@(#OpDZyMju<(DJ5+r!zuZ#P3c-aMRr zccqJYR^)Ka7l7YGQUONYQmgulRyq#^g*^a{$;$>x5OZ7gGl{@00N?91U$u8(^AI#nD?jHwclft2nU? zB=<^3jIM&xSVJQj7n^F;+QN@`OYDdEel!3qx2R9Bz)uU(J+B<%aSJt32R=&)y-Da- zRL3Z<%9Dx@sI^bB3Gz3+1S-#JrmeyaY5n<_&JuN6x;w~P7dypRd?#-jcrIEvhMn%8 zz89+`>^Lvo>=(@&PE+&Lom)Bla!xdq<*oBv*rdCtUy$JNK>PS?Xr z_#3wFM2(}T#8z>Y3b#oO&3i&4Rwj#6tj=oPwq`h$sRSfAVSH|1WsEZCE;)zEyaf(b zyR2>;poLHoc9G=Z$akHy%$7hK<2kOtHilzRWtCMcK!$8`V!KOb2p}^AbEj9p7zE|n z-flg>_?-T709DSr|IEdrkNx^?OOrfud;Qh(@9!h;De$gt!27_ddt>%CDJr*ycG5F1qflfml7F;zS)HT%X47+;G zARZSZMe>*)%>2U%AZ$R6tR60Y_G^w*lOUhps0>oSv8;J#G1$%Yx&8j?i^|{ZAX8N1 zk{drCcj8eDi8*mm8oHXi0a}4YfLz@&1g;u+qB2N*b~Pq+g0SXcu>~?fCm0HSW8lcX zg5!)AkA(e**Q*siv67Rktf?3?N?LmLLie-xxoHYVFX2WUZX$=G23uJE{p$BMO~^tk zjk@b(^5Ce%%~!HZ`e#4B^L-w@Pk?qS%xfxW#yWLsX!-n_sr7L;|9EZvDt|xI$qS=qv-!EDsZzpHcd7DDFk* ze1e@}mWtNI5@QonDM9n0chR1WhM%$EZrHKP;f9}9q%rc`zZlt3VhH)n)3XP3llfbI zLS_jkl!HfIof3C`F!qA>(D)qa$J$)9peoLMC3df=db>{Jx#mR2D=p%0{Oz%2$3Pg# z3osy1R;5x4z>Jrud9Lr@-g{#=VP0Dkq<-(aes7_6FRVqc6;}-ZZnRK$yj{rK9_xMw zV^*_$<~2AZcUH6BEAAq|H5A~~y2757^b#OR=2J>9bobl~mq>yg__e7}m}pLrxMw!~ zPV_^^%qj?;E`>#Q&8?fEy5tm$!cHp~+snO+$Bg<)MF;$BW^PU(PJ>b2H?PsH^aXl~U@Ng6x<_l0gryGR!CST&1xmqP=%fmo(e1cuu9LovuVv}c zf|RRtRC2FgFMqyQ&&Zg+h@p?TPSx(RGX~Q{)R#44y}$SF)Q8;jfFr5Jd4PqI`B?jb zK6jBkk7j5uP;EcvebYbdxJ(UNj#~IRmv4fR)zTKCNfZR5 zwXZ{sl5W@9Rsq29;=Fs0&u+aY6_svTQZPLR_+a_dhxj*ZB!Je0;`N^XZ9CNzJ_&5T z1UYEjs=_+i+qUe)oqcNP?&FgMkEWjvqwbsZ=05O<_(! zeC)h!;~`j=oq#v9t15Hs;L=T-DudEPHhF{Go-uAUySHun5Z5cjSm#+f)L!sI9}G~% zT7@(wnMR%>01RskqH#e@DgodJ0`L@@)dABvr748Rsm^MBmnxn>rQr!CI8_bS6#v0$ zieA#|LoU7n830z_&v4My99dBnKj;ba0>HxKKrrpHl4xJ0vzi#yLTzhxWT|b^z}{mt z*asZa{NWJ|BkznhVQxa!i`kCHo8`<(CiF2&4}$$Jcy{{etSTwuuBvEc739WF)fYM4HnWGH z1XY?pHm#-BVEwQ(>HaLy48~V6R+1x4*e-J!VD#_`;K~_(`^MP<&=zixoSibv^@=+X zkpSl*Qa>*8j0-yF&o!GuX858#0i76^_ot(yTz@5vg0e~Lju}Ojrt$9?vR|oiJ4t?P zsh#9diap)w13)rUF5$J0PCl{sqT>ZoUDL&BM&4Mti`cel%-kZNCr&D@xQoZ>4B)V0 z&NmR(yba8;4LfY`3Ajo^y44`T1KQ5)uj+D{_us!O#?w#vT?3E$_mpB;;g1zmyH8VJ zr3d|v_e3@&WKeQYFxM_YG6CxYbMaE`?Wk3-_jGY_h(I1=(=j0!aGzJ*O4+G*Z{!Mi zX)#ijKg`GVs0vf(wFKa?9MMlr)3O~C$es+X(-qG?ukYgCh_$OUZoHQaG~sD(h*zS? zhWmNDx3NLyf|FPBY_>{EN}j2!N7U@0#odM0_lo=(BG=Ze`}l}=t&NAd6T7-_@Q(zi z_OhgFTaJ<^+^``vGD!qz6`0Fs*Er>ZgdjK8S*`7kXX{%Vq|hVhK!WA&IG>M}j;-AJ ztoO}g??lRtPRzl2;y=f|*7}lMa1%6DYXUm03@0vy#zk>eDrBe%or*l!HUa4cySiHwXjbW9M;s0?YDZar>AVpCo?OcZ`vPHDbqqY?{-)m3CEAyRrkx> zG^hhj2*|LyOfwE`WSW`j$(qzc0z10l!4Qv)uUBehkl_j)Iqu{`qvywIY(i8Dvj1~- z-alq6`p)7V&CKH?0}^iC?(fdthG1@EmqmCYD9_PaZ|C}y7Nt+$zY!~OTOaKR_wf#$ zwkzH$c{%pRPl#67_Va?YM~It!igekH+p9_{Dh$~ZDQ#0zQ%h<-o|=&3Y9p`WYuD`V zWJUtXnVg#a@K}i}Ly0|ilD}}Vvp=q%RaI3*-}nx<`RUd#5#6JD-T+H3%$vT=yS{~8 zNhUj`&Wno1yLsJa_PJk2)aspn)SY0^wgYiN4kTBI1>|LuMz+7cHO)?|b@EhkB_8$A zBj0Ci2q99tiT+0XRoX8)*+9eYrY5Lj#Win=V@y2WtJzLk{UA#f`Pfx#V5>kiw=0O4 zp)(2WP2I`l7K&JBYqg?a0#EhI!fzdBSGUeiePWH9`n}HTtN0|?x2{*!h&2P@yzq3! zi2HkZwe)eh6U>zMA0qtQU+*ceCsB}kds)lN$Yd=Iql6i}f4;yTBhT1ZU>?56`-VDY z%4Zm9Z*B(LM5!7Dka;v?czk%KV}laTgmR00I)pW9mY0tT zqcaWyj*`ua&kTKfv!};4*u{$?KF4v<++8q~M8>b(-KFz#TDmE|B5RB;*_eI!}HF1FwfzBb>T0NLVyYn|gIGV5Lw9~S-o&vEFC z|1udkl?WKxj^NFcJUPv%f6G5hLrWv!D|I|i)32ncdfULIhnu!bST_-F(Tm1CA|xU* z>zJ?i91Vm|eOTY~r##z0c>);C-L9wf9ym8kOT{Hto|-Ra#swGcU;(#7%#A^%z<5{PoEhNeG}xR=@U zWrKYPe?t^x{fTUKeeGwyPge8-*=?w8Rr;`UhpYKH@%Y4bXmz;-hZ>pUo&&Uk)%`Q$b35$9E-eE6=FDmuzv!6jePsm?#Mf+Y-wtE>;8v z(QIr?(RRhuask}VDgw0Nc%b}-Mj?tDCb@f`tv#!)>zWQ(Kf*q}e5E^?36$)n`5dvl z{07e0B^<5b=4KT0MOCc|lIFwsAiMWk_8rBCDVY|zdHS1B)r+<`Xnh60X7uDcxJ4g2 z>3A=K1=1&%b-VVw@mTrY{Fbb2m?@3=i}WmnQ^mEa<;3%5f)S zJ-}*hB>>174`Vs&Kj;=pDz{@?8L!Jdl9JMb* zL=9tJPm0}YVi*bvHdA`*Kh&(DCVfw?Wr(>Ax6cr~WVE}EJed^Dq7@exlQ^E6uKbyZ zjG;7qBkMjqdebvLBI0)aUabDE!2r9AM*<~Ai9&k2BWXYPuJ2XkNh_XnD+<&({2oWA zY=a!zN9*uL-s$9fo9Hn>j|{tT*}Zl4NzRiU&NFXUy7H*u=6lQhZx0U4RiPmvH?U`z z!rfsIn;2ek&&x!;^25<78yowm$pn>%M58qkZQ_@&Dn}E7MTWd5%@t)oMG0Z~9X!g) zpXC#vml8IlJy86zca6}-S$BG-p9&t^Gb?(ZFta5VL7s*xbMND4a0=&>(nT70dV(i{ z(U6DI8bihT$F^2?PmmcXvF8n_gG`X%$dbe_#`oo^Bz!;+9sLVOW-Dz)f?9E8gof^M2y)kVZV+ z<czoW@@g&P~8P;g;gHrfimX$?MOkqW;YEZFP+|?U| zm8e$On{U#<*o?2zr@;jom@!JD&pgQ$UK+aSZ8Z#uj$<~#{^@{d#)O5Ip;ILs>GL>? zadT}g;_&dNkDO52<~8ZVxs-*0ogP?uIdVUnQ!8KaiiX3e{<+tu#X8lNx93+eBo9CA z^&VHbsE$D-j#ksmQG*%1eBhfHqz4SXJ@aNahi$23NHunFP#t4ZmiAmGYi-)(J+A>> zoo(%AG;M~xo`1Tza?BIX6b4Rly~K3D*^{*oOvSxcX+j_I3QaKi(}#^5ug9 zc*xMYC?mB@U68T7N4a!dTWf(paA}!R$+O<=kWdkW-XEBmv{f0(o_PP@U@D1mG(G(# z`-Ds5M+Hl?Ne$<*(p4{&WzPo(r4U)E+H7$~q1S#?=}bNLoLo@pXT-=Hc7L!2pN&jO zS!VZ1l-`vhOTyg=k*~fcr(u?v$x|<3a)#f1J{Pp-L8)k`FkRF~A=b(t9&Wfr{A+s@8O|r`yRuT#6Cp%;oC$7Yg1EpoRS2C$*QP z9M9iU*y&O4_32~|1zMcNBRV`|i+FB}47@ot2a2`KMr|pY&m& zPG|vs{w0rkaA&8g>&>mnxbhVV8~rN7r0NroF~P{4D+QF43~z31p{t;z3@f5{C@x*2m7e1^T|M7u3%5skK4o%DJN)j24M;*Whp^Z+#?@qa+2H$WQGJ;ZFr_ zg?YlKT+^7b`g3@%lwJXLp!93sr~%;o7sIBuqY5Dh-WU@bJ=hR%9%d?^_Dt;FDz|`X z_LsHYb;*-BSQIEWZfsONJ{xmaExILy!?#@h(B&*O!bn0(kd$(0yV{K8<(NTYU7IXq z$aL-Psgw;vWL8XqE%*7hLmu`xA~~8ZWAEmUnOEi5Q800TZ(Z4ss!@A^Mgb3YsE1?2 z4O!%!2!C~6aI!{iTJ6Y0nJ_+TOz-=-A(HB->}YIfGt2y_c&u(y>P_1=~nr zuNxig)cjt^=KStH8Pj2}By~ZJ-K?I{L~YoX9^I-J%Btb!<=*Zg4j*f9!-2(!Xs`So zB3BQOuF-swS6Mpc`A+^{!*cNHX$-UdD$f8x~iLQQxqz)trivCu2k-OnL3H{Vt2F8lDk6IX<5F zQl*9iW@1s0z8L2x?fYq$L7{%ig7R+%mw(Q-`&iNNluA1=Px!jq_{CPTkPtMp+nRn;l7jyRheCLn9U%>&t*yp7p zyj&;#3c4GC>4ij^L7snmqknzhCqSFty8E6~H-2HL|9l-^W4!D8c${*Q|BGdOeFiw^ zJCZj7esK)=^L1p30J*%6{!0J9SjG!riU3a#J^tg%kMVzsMTSuukV~WD3zENn@~2?` zT){v7!}tDeK+JL^eyM*6>d!a&m#@o6ng__mI3qjoUw{0s!UMGG4gu+E@Yle`=l}AU zzrVzFO+YS1>cfvOeXYNKpD_sc`o7#iv+@1QU+b^u|402URmA@n)z5c_g_MlU6!F}_ zVf(O(ZwvpgkM^&h^yBL*$I!gfFyWv;n%(~E`~J$$t^prY#@P4phuQiQFFlu8!n<<+ zdd*)}fRxJVpI5v8;31WxyZxtb8G93?r@ZSoMotl=*o*Q~{Yl+K9Ig5LzuI;U9|=~gwZUw;Es&?;uc1#(o&x6U zXDTlg=`#OFGQ0#4-xDv?9)$0)u#2j&Ha#l(;<_?-*DJ$|x zT*1((F~NSN(D>yS{G}?!A=iI|eR1MfzEpevn+I2R|pUU?4zplJgt9Ug5;}{yv*7EQdgINE3>Hi4t@89`9!n;%l|9_@= zGBq}dDi?zIo4wmdqXTxVT|<9ia6){11-wM6F_Fh%7m*4-e^}SF1B@%$*li!(4zzdRo$4tU{|z{Lk}Pkg>dlQip- zVg_vS-nYDN=u`b^ul~#ZU;K;p^Sip>5szZs4UduqESg|HbS0~+_TD?0CwRJv;Q9w& zZFXP$H&5}e7t25WX8=G4&6Qu~z&-kXq4(7-8DpI5u5Kkd9`R90!dJFp^nT?nzCQjV zd@F&_OLpd7k{zzlNBOYQQ7;6ftaovMxkF@s`kXuCNO!+wAU|1Zor zX#x;p_n%n)O6C4|mduljkof;=x62T{^5)N80RK~H|3@$UKa2f8Ri*#_Wo5iAd#ozGv)si8#S-*bmYZGn7k z^!SG-9m)EFt@3@2mj!gJRe*i?bVf`ycU)Ll7#m7EMv^<0o^BQ|;!rnYfkGNZwJ~@p zbL4U?uBL^>a26bEjDmD6-J@9C@>g{cKH|TCSFVOHq3y51rA$_ z@w_)K2~_~V0+4`lkn3T@1v)=6>@Lt2kbJc;itg^d@pj3mP|3@yTHga9w9MjN?Y2wO zC&6^Q>5+2R=VaxGBm7Ig{a({?>QnltESTpb8XB6bVV(f$-+AQKSo_)WCc{tfha_Qa z>yBcfS#>s@nq9rw7|$6$tSc!O)QMeNn`YNgS`KYBzg81)Tz#92meU0-ssH*l zKFJ47+l{XIo_2oUp?hm2Z=6*Br)K^07|>7z7=Gg5;Lf=_{j-sDi||6Y{V9!?%S);A zW26YcD%o}^h>nDs77uMXq5USZ^QM2-f}A^Je>Ri*X&S*TLX*6swt?AWG)R!P)PP558Fw;Xq?q4{utH3di_F2F} z8u#buXcpMfBHL25c~_DRY`=v^bSmxp3wWtKP}}dTjIyA;nPtg}EayJTjI!e(s+tgz zuF?hmEOuo%pKtr3t9d?0#IT?y-ad9EH8tO}%1Amus(h^j5M0^o{C7ciG%9~nAoOIP2IWob%5t|N$ zM(x`IN&R;FlDoC602ZV@>GY-uR7n(3c#E}|sC3D?Lw-lm$?21&o`;p4osbPms6A|wZmYR?NWbvgw@$H49mmIr zw$AXAto4(TLI@M~tp*elF205Pc0;Q^%4@5#OEh!+Fkc`0MF(gNTw9l!$*NQ9UP}Em zBb_y?JxSE79q8fTEf{ddGd=PYy76^k#7RV0xE*Ml#$sRrF7^-PjtU7;rtY4!ckubP zKx10tv>@MAP*I^B6qKoR=1wQoR`|}M&aI?WSA;5-x1(c!=qi-XZg6Nx4`Z*cSQY(t zY`REAw?`Mko)gdN!duUvTv#GJk|>0t$;hpK91)?;!Y-tDJF@0ne)Ucag0AjB3_({? zDy)!4Al%a0ru67*d1!TVEn;cSba&Vnek-eI(rDD@XTpeb*3Pikpv_^dp;ZCWp8sxQ zUcFxj^~ACi{3JJ?DZzi-r1)0pj|88t+qWaQ3R|maP+MCONj0|z7{47GlCfpPb=Uch zHEWb-wr*;|I$FhB#_{O0UYY+m>n|nd6b3WPWq1)+tM#-CQu#>FKTIbG1H%j~eTuqf(~U zFqxH&-qMqa-58gMj`v9t#(K5ky#~m>H+!ey!J#5PJZl*P-u3g{9z0`agX)*Y>Qsu1 z{d(Q0laBPhMFdGd{TC$G_YvMhJzstK9%LoC7ZdzA!27;cajIImiPlRm9;E!32o&Mo z?siCPy9qgB){$q5kkj=x$x|J+9)@cS&PCoUUxT3yvya0i80#S?PRAvCIX3w8Uehl| z&oW{SSphy{feUxPd|Pi_Y4^Ujx^skb?*Tsf!QsGDx&^>ReV^1tc6lL2zcy9^i1^3J zrwa_=Yx+Lc^|3~tmEI_&tje$;iTXv+i(2}_wwux?juRd*@Y30}Esrn+xQ}O11NQI{ zKD>;x-nN)zyK3cTJ(rxMbbiG%?#e@FY&R&ZBzI3#6>qIvD=vp+OwS|MtoVx3GPmQF z>(E-c%JXha?byNQMh;Ku`-0$=oBNyNx~x;2BJuB3kqNHuPYGBnG@gqY3BJjY8ym3^ zXlyz&hgT-Yj2}LXhxf4^!_ZS>@7^FwV2~{6RT{@&rDv{N8F07(YfIJzwxF!4`^0u1 ztySCcEmC0Q1jjf2_OJnsb#6$T_4?;3l&-h=>Q+ktr4PWK?j zjYC*?)C>>8@my%4A~!5~df}a%D)gHQr?PQH?_^)1u+m!DD}@zRqkbKd(GH6BB>deV z0N{tG;->P{W9tknWBhu7J)WvwqIbQ1gt?7gLz!6Ym(DWU7-sQPI#Zvu5>&mk)q;l&vHY0?mZ9%=5s8KXvLzo zK1M$#k^C_fi%;lKvEvzunCnpn0>Fr+ZOAMZyPtK8W0Bx{xKx;Q8@QCRAL3AhlE?OX z)SL7BdGpLkQqGj_0lg#NzW4TgC}kWIQC^D1?&@tGwO!-VhtdlPR>3d zJ!#=0b$A;YyExMs;_YMS`{+)n+1Pi8ba z-`iliEX~RL8Hjd$%^pfE;|l6M=>i7ia4>@6K@mDW9*8fN-SHA>{R0YcsrEG z;zjJjloLx~@sqq`9Z!a20Bj&8&l8WuOPBg(g4|MarsmY@cNCn&^@!G%NqkHMKYs{O zsWDnt-fiZOEpEWKS&sL|RhYMk` z>xJi^;yoW|p6xc>u0&7TB<&?o18mvtc9A;t1JP4XPc!wFFulLh=zzTH$F|qf^Z>uwyQko3_Cb=?1 zVE0zK*p0+S@|@JZ!_MTVcQVpFBxT~X?N z_YO{5Br$sHyQe^Z?Jo>S$J&(>pBeARib_h7ZR4K|eKjKuGO4YvMn8e04Xj)Hk(jeh zp$2}_!sfiv)#(+Q^>VRlu}~9KOY)M?yb z>a~6A((K5qr4U;|(8jaANz<^!*K_SuH6ZHe7FcFj8(+JVHKCE&F-a05>f4Zclz{NG zcR#-hUZeSd(s^sap^0>{&CV}*UvUbjzh*71-ga>ON(`V;$Q<`{KPY-%zJ)Rt`j{kC z$s?0xY6L$aXG<(;F2>5N@brpvJ zyg%>`YGz?^{BI(hzQpZ?Wf0iI9 za!@O=y|qP7+>cUokMY>dQTX&J0w(f?H0AtZxH$Svyjd~7t8SN!oZp+Ou$kKW3cb2x z=ypd38*DzOY>_8e@|Y&&c+(--am#7otDFU*Aq^@pr^KaIfnRJUT9{eMD29@TuJlfNPi?n{nHTE_X46=1+9GBX7 zT&7uY8nrUf8Y3wS4zhAp+=cBZy@0%TtD!ZwVaOMK)=cYjj02kTdwy zhK9-UaKkwK9K&WqGf-diGxpusz;6gO`x@C9@Adj!IBdpRMI>v;y$*nfCLaYpIEj0y zbzJPfLB^87k}NZ|yj%RvSBg0OY(J|Ux0D-(D2EtAP|>~iT3WHg!^7>8kFKrTn%!Ac1= zc<3?y8q=^Tc4#@lrZ*}-+9o5%J{Ke@NEeCUMdR@@o?VpWc25jkN#j)5OT+bBcW;<5 z2w9t|;k;H7qg&Mts-s?KLBd4Yh?(3nITmuo*yET^wOTupm?oP!&M2=4rf98epTZIG znqPKP#;CAk?Z(S}`fG%|pagaaA>O1oq+atnzNOHCnHe zULZTYx(EhMJi4svX>1If2(L~#T71A2Wa@!Bt!xQ%a!%EX=WRv}m%KXwz;^m-DQcXj zZ#!uvR}HZ%5D9m zYRrlsGvUP`y4USu))ux2S#b-M{s$Ap?&cdYc6O_AAjC+so3dWS)Sp8o4g#~DWIfCX zVepOGzZ-e*l6RK)pf7=@==Eo|dH4KW3sADvsNTqO^k}#R$(pBIWKV)64Hpq;z*Z6} z{K98amo}ro0xp!FMR)jp^t%3DS$x>uZ}2d~OrfaW-nwmwo=ci7uWV;(r|OVLjZT|G zdJsMu*qXvRHla)-OZEq8LwRfAwG_sPQD=bzt1rx~EZdqsOe5_dR+0)lkA@a$v!&)k?|fSE zj@V;wmQ89$q4KPXb`exQeV11zqfz+utrLpKxetNq=%$9d?3(Yy3b`Zy@2Th9CG< zqE?(6+Iyl2VQ`v7N9XB^fYx=_rP|yyNNZH4)2;3BIWp~$5@mV(e}tX8^F3Yu zR`q5y@{%y-6L)7$!O?_PQ%^zvy(`ueWlFbRuSlo_^7aUIlFB8Dx~WtyE|<4|isN1Q ze4cJ+5Ew>LdgBJUNAQ@aaHX%=d6O$f81d?!1_`^0SrG_5449wGdWa~8blvmmGdM=R zbYGrwJHP(c>B8PcV#(3`L3bYW?F4eH`?Cb&aDbs3eYRLAE5QR-7cp35rryJeQIiL-;G_%5vk{)Fm{ z@d0Dk9rNRox8D-gy?m=qjD(dR-%%0Y^)n+DOA1GBl*2An48!aZDkDXQ^TjaOW1Nvui07<2FPoA38IaM%-UJ>CsvR%a=&4SO4AX5^Nmbf~P}4~-Z`0U*ZR zJS7+Ps1qnFxfc6oNDZL*$1`PN-$$FyOx|TW5vmwkBvm|de4_fSSTVple{Y>e)>!`5@M+A|?0)F5X%;xqs zbPQ-@X29?V=IS(yh5-hT<=TubX@{$lOOU~Kj~D9hvZk9``STYqgbuCg??u)Fw^B5J zC)NbxTvG1-%_>^&r=lHUN-bex)|@SzkvT&s&$FSbL0ZW*w8z3&z`5#Kb%_8xRDT;o z_tn`YXfiU%BPhuRtp1a^?dDhKO?W3Ck+Xzu>qpZKBZ9~8*CeM)H@}EanPCN};6d-c z^mwP5X_^%&ZG?x8qP^uFzJj^sLKSgDpZ3+IElQlHy3=}EDx)Ry93|LjyYl`T4!&u3 z{Hk!8Pt{4|0jSuvb)VEEauIQmHxUyRwOZ27gVS)lShZdd=1)WszbatXF2Ou+n@U1K z!IG7PGTGmq4-tJ~X<62BF-1~Y`Djj_C(Sr>aPw;v_6Q9_0;9WxUEo1@SH;@K#nwML ziD|v}*oqubj^2if8kly+awZ7IwC5CXJFOWT8@Hh$-ceIZZ7Bo<{$Aiom!2fYV-4=m z&FNe2YwfT@lOvaS>&KUH@h&nlGAFQ#xM^oumF3=#b2;39;bJzhx)Yi_7k{g;G~4fyty{{pU6Y=ft}`-m$-jb1(}GYYh}kEAq$lSGEmhY6#?c4wTZ3bt~memkgVg z3`}qkY=VdDnqZxY;^}f8#sqzm+eDH`V4_PY(ez!skR6!5Y6V(JgQ)H{dn^ z6R{EPumih$$G&x1wQG?xc2lgh0d93PBOJn~kSSWSO` z2Oi&iyIsdsz@@_sl#be0^%=qkyz3*GIFTzJaOpBFhkxZ zU==f_=2RA++E#VdAf{x8Mqa)%Ix|z7ls?CWN6qNfD<-BXYWom=H6~Z|9D`Y3Au-k}^=)7oSedE5$&zB!JKCXPpR%RL5ij;Wrkg5-Eigtru)=+Mf&o7-e^I43 z_Bvau&O)p-jF9%Eb}k+{q)aDr+%@?Sh&SMTTcg&Q`|!;jljGr`$j*gq-nSjD#UeBd zAti9uqP%Yi7)h`~Pp&PRk2jxZPt?hb>wwh~#^+bC!mL_ZlDj4X zBeA>q6FSsPFI9{7mHFa#sn(UwSg{dG-wbY9+*j)_i(fV0ux_y-qXtNshqwDB|GLfh8t`MQ zoZgCW1=v@+O?#DlqS|^0U2+6r&c)`2om|b$&FZPW({!6=9v)Rx?Rk--5vaT>|A0R9 zP5x>$zk9sX6JJ&;&jIW2rliJB=RJ@LDEqZB>eV=~w`{ejn|4Eijn42`)_F{#Hjhew z(RQMUi2oOTH9YePwqY$w?@f9bP>~WR&?FjVKD_Wy1=tJPp@gnI4FzeNQqqM$el1%d zMCM_5AVF*Yy{8V0J2Ue^&@eQ zw<)#0&lrjuN9_sONuqulN=ywOZdRup9lNRE?+jQsfxI0cSqj}kx}|G()?<}lu5mfs z{?tBob>jA=isPys!ab14YeaqvX1t1}VUU&TJllS=ld6w%`QR$^(DFL)Mz)SH(t)PkJc%xk70!c6m~_@W}Br?nVAlURZBzGvyKx4f$|#W zh+@7O@Cw-KyADKW(`;@U<*6~5RD5dwv;G=Tl+!%3!nwx#!%!$lN(i41kZTLE>BkCn zb`Uo=H_YR{=wRvE`g%sqm&tGHy3aYZv>Nh=h9NMZJ|#OlJFMzJB!Hhgb~|gSb77M8 zK~TeHPG_!9d1Z}ad_ueuYWgzig|6=^{+Qyz!Ur9lf(cqz=8cDQ$HHT8>elnZ@u=}A zG%=fGwE-BYtLRrn6UzxNy>odg>j=GZW$t?=7p zgsdvPx1{D=u2vK<)={YCPke4fao3)T*l5V&>4YLGJq|V$AMYdtyP9K@W?_X}n@y>; zR81ySK|_pgzxme;k!%8PWt%NobK;43Ca4!SsC0e!iIZBIlMuJvw;N<+!iRk|KgS^kQ3NPM<|GqOXh(mR#UK6`sGe#&y6dRwzL%52QX-~iuMTF+uEFi zD+Tjwmn^j^Ei;@}PC)wv+XEoONceYByvB)}+eg8v^48(gM=5FPv`H|UHqDWf>u(F7 z{M3N+C*-m!%0_UKYF)&jVSycm_wW9VeEwVFWT$&^^7R05|J0Ot{$@bJsZh#)MBo#^ zR8IvaU3YgXuf_+u8Q%ehzegr|9Z)_d#IJM(Ab5LX>G-iLuK2xT zf`HPiSFet^1bb^OCa|^JlEuug-q9Qvs^GimL7>8aIK}@xF2DT@hoGj<@68LL5A}OD zDwe0K0U}sP*yQA7s_oVY1QR8;Gvli~b&?Jtg?NvTjRdWGdnO%ir(o@a!N=PvG1Ok3 z5vAS(-GXf=uD26FJ&EH>sE0LO-QBfFIht;cX{%gLW?@(Nq}R}Ok9)4JK9-f+6US@T z(c|rv&=n`%>NHt9FgC=Ao^}fXioWk`HVvbOB&!}P3-3RN%zH@T`BnvgR(2SNFfy^U zFUII|865OmB&%T%8^D0-a5N000u1ls&zh#jda3aVJ^8zh3_Gdw;0fO0AtV@|7zZXx zd9z7k$84(k52lm#v97h^$wuO}IBW244z0B1ptUNu&8kCX(#Z*iKy1w;=R{2BJ7VgE zw?xn=3hY7un}%XQ@o{`T$48U|j~@xxaRG>AW#fPgKvCIm5#-(?;uw!|<`A5APOB+` z8#s<6p|8Y#cEf_vV@ioV$66nu7ytH@mE&us*_Dr)^XcRBHEnhaVyUCbx8DOZ8E-BW zpP%UMz3csYi528FngYRe+Zh9BoU*81@m2>rH{HFq<>!3V1$77U9Qw$b!`{$|!onjb zUPh9?F?cuMowJ*M?;|r*$ZdbpK4G%bNnFrzK*gIuPfst|D6Mq+#5>=tJiN8vHzY}t z6wLc{f1JP|=5P}%1roV+$d9)*j1@P3jArv2<7pv*4NlIcnD-P zd4^e)D!DX@Fy3%0=@Ws5-K0=+3-)6Gf1>mm-P5dW!Ow?^Y&)BEnIv>9{A~5M$ZR}% z=VZ+%fP#5TR_;bmB%RZItR8*wtKMei&IklWT)EX~r_(BN{IkiV>A*!@AW1K~sHk!J z^4@+uI7H6Dq0q&);jM2*cI7j2ZhiP52EYqNClrqR-E^-mEibp*d2jpi*2x5tw->h3 zZVZlmY8=YZXa`#O&d@m7>Ao7K7u$&BRsdQ?9d7xJ2Lt?nXi^?CZ?RUmT0hHRtyA$? zub7U%laxyimKB{QqWlj7-V65-UK*0<*)N|w4;+~SzzBE*`-8In)4$4!_N0H60+((@ zuah;9;}$z4Cf@x8Gg^bwr+|-saC`Pmmq@sD*G5N^_gRR-+z)AcDx1i?6u;%2RZm)HZhbNa=6X;fk?rl%Fo_lT+O49X>W3#y zoN@pHq84+)cW?LO%H>DigzP3J33$oM#B=(%0yJjliygO58V#(KMJsr?N@#)S;rouF zr%MlT2W;|`hDEh#wvVJm(?JFwb;k=8}5Fx zYvI}1#{;s%LqiI+>@&iNR{Vm3#90S^ISG;ROmUU|!(a{Dqo+HPXO|62y~f|6%>@b% zz939@X*XVT8}9_6S-LR9cenhx6YutjV{GiSf_UnaMiEae=n`&ukTxt z00?h{5FBzc@&IM(__(;)xQ?A)+x-+9fUjIONPUeuR;_g}74)0_~KRWqiH+p4aUM5iNWg=!wiG zASkjPV@IY4PH$aC>=}hPp?XB-Y224L+yHMtXt5+>v1S_@qs435xzuffTwCu<6rrS~ zL}CyIi`hNU8C(${dvNg{%g)TD#Iws;Spi4%qypbaT`DW@sNt&DU7*Fvek%H&_w@7^ zO8X5P@cLw)ZmH@Hi?+gv^MtL(;9Wt~Y}aij>iiKHGx8`ny_d>qaO{*yu+h2j3$D!QUuC$)*PTph! z-YpHk+l1jxD9{F@(q6Z#-eCWfJ{TQvCyil62oc2Pv*#g$9sND7e8j;|hiUxr*b1N4 zSLmdhDTa31DfbJf1vU@YVOTA*6wZ;O745#%#OfR5Z-;wXp)3e&KwbA+JJeJp?hujFz)KL`8-|BhsOvu zAKgEhF)8N@K(H0OTz~g$+5O^_-;93acB}p+e+JK42l6+M@6k!ipY(-Fu7CK$+rX2F za6&Tww4UF-LqQAiPiG?4>5XbMaz8cw+c%3rgYeFddDb;8UPP&T zvG`{Q&ZHPF=~F2@&xvn>@*1tsi?7j>XI&%LB!Jc%@;oM%tT>RZ>mUyx+dgY}9tXdr zTBoThYA+xAj62q<^b3@XmN zA%9#_+2Ho5dkSbVQ?6sp_wbxg;oX@l$VG~tsN8lLqn`pddH+|ve!Zx!mE!JBu%~Bw z2JuV01_1&{^+Gp#@rl6ab7Wxc zYt>p$_5gs=zb88Rx)jh-##u>%Grm2>lH4!j^nVTPv7f9ItYNKMlM_`vwxt`Oe{tt0 zEl?)zz?}-Y-#=8S27E{6It@A`f%^2~VV}F#dff^Sl+|I~;X@R4=O3nWdzS5dI*0p= zp?9*PiAvs7*(>tXhnHt9VBX-@!ey+R*nyi|s@+a^B?C}bTqLDqmFChMxdVSz#}ji_ z`yA%{U(1gjm>Pju)d!7c3Ozu@H4`qNYr}FRBoS%Gk1pf=5lQ`nZ*mj+pTKWp4>I2e z0s4ug*+I#GqUlJ2S011|_Ld(%J|UwjY0lWYdkzF}w<472cmVtLI#HHh3JD#Lf9L{2 zZw)}5jgJ*t|FngFJ!pO>DE*%GD&-jq{}tK^Kfy|4eQlKbD#yx9}X|6k$#S(axAuO3_M z#E*$_f1i(@bdqj&1=AH^n+K>==i@0vQC!!0T>Gk zzW-RE`R~*PP^KS=fF{HiXPEwj0=`lN^!`(y>OX1B{gbTIl6Ce!osXL*nq^e-{GSxM z>m*AgtYi7N$9Jx#KDIDz{^DIh{29zAnmIe;d<=>W%Kt}C6 zeEDmOs1$POyWe^N{PoRe$}RzQ7|(!lSj=%eCebJrmx{c@#ug002e!S0P+zI9xLGPG z(cZ^nlxxUG0VE17pC}o1%HZK5E^ALaH^$0k39qamNOv3rC)C+1R_<6k=h4wu* zt>vS|;>5<6?z`vOR8N$$JD65eACJ-t+UdrWtoAE}HOkC&c@?Lfv1d}6xOR2IE&5_V zebE{EQy$^spy2Nxi#gf=m3Vs@x-?f-e^rtr>SdF`$KD5#cl94q30N}P(1CSHn4aAD z?D9(bGpCL~=Xp{Vp=utFox9okd6^4s4c!wDe#Y%NZKogy)owTRMrNyaAWhgAO*$ea zGGfm{5$2-ncT_Ul3m_O%#Ll8ZjL8mlnsFDkT}z(thhQ%Xr(8h3crGI-t7kQboU$i=2 zRD~;BvoM^u$FICTie1Ba_922!z-^|dn$PJhF;8B*DSqH0?0x(e2CWm_)Psoe*ruEV zI$lbE%4k&Z4|Z1BS;Mowhb(mru@@OFUx`ESj_AvxCki0#vUK3Pz{CSoM@SHFdjv}i zQ=oS!8M{|_dV53%UQ~`;IHUZl`TUlTVC^E~LUVQrxBhML){Iws9B;X3g2i7o@fXWO zuYAE1i($|zS*d<==&@Cg1v4QQ?_ zju*L{LcItd8&FL*cVN{_O=VP0)Vx9`0l(o?;Ee%f;xOkI$)VMupMN-aD+MuHc*k*L zoY-v2daP*7r`~B9+8)iBu+Y|bCtT71hgG-YL%MR(I4gw#%Mx_gd19ny8qD$VeYB zd|lMV{`hyF_FhUAL#@ms`xfM(eJR3D>dGk+oRqw#&PZG(>(Pn_A}-d5QuB>?0~ucj zW{Z)~f)J1HE^W?M$F@d->G?*Bd*=0T>WUoZQwFdZ!aV}sf`}D=j$EM z)0bI~se0_f=gGDPq^y%fO5rx@cMbHD*Lt5NQ}f8}95`lq?r*n)5H;NH$c5`0f}pO^ zXX@EAwJ1ywtR=GAYE3G$nP3LS6>bqBE#e~7bGU%|kXrrcnL4GGn-mKJ9wnlkK6nQJ zKsxQy^40oISw3&8WvQ8}@4KLl=p_DcL*I*oX@uFps%XRl zV{1y>OES!@+NI=uKZ#elZ3B-O6R$?g#pKg2VglyV4X_eGS7FXHAIYiX&8cdaVRnno zezQZ=<(mrwZ`svzBbPW6K9(^uo=%aFI=VJmVv(i@D9yw3+pToei$*!Kc&n&OyVOCAW%jp%y|8*Z`OrACjnou^a$u6-1=-5zvUESP7$KC-PEDmyUE20v#VBkzBFVZ$!Dyr6|#k5$t(kuHq zG`+tX)tA;PVghDK_CT^$BI=%m53N0RnHn!eLP2(8KI(dIbAA_He*#4iTAch7Pu^W# zT`bs`*ietn^oLbAAohkNJ%2pS$#+3j0V%4eV+^6T9+*5*9OT;hwXdMvHRlR3736Sw zf@f`4(9K!~EHGd% zZZjiwim|G-YFqne5U4l*92cj!8IvfxI=*5D+W z>afWglR!`8>SfV$#^1)aS_ye63nWJJF)}4X$(d_IRzg!iwUl?tl=wHo1|SD#>=Ly9 zrMQbYwHv17mas8V9y48gAQG7K0?@80m+}(xu^(K;3x`auId#sf{lX?)KMbwvCj`{b z^`tWlkA9%!F%DhEE>m7W>sxizjXVzKkzoXG57bWTgWFT(wq-5q&RPCMkTCb{r7ga)3)WO9}HRp3laR4sy$A8LIcFT;Yr?Lj`AVX<(Ma!%2y7zhX500SUBn zNzO>lu+7Pr(W0Nxbz?b?i`-j_w4%CYy!0)v`qW$h-{mP888JQ-J*C&oo+-8UL%?N z#v+RCMbe_k=Z1MHmyxf}2H1N?3bzOJ*H;yX*shAWj~A&_uPMg!EwH?tKC(+R84u^y zDl!IcrF6wO)#TT6tT|+ca$EM8s#5%e8o$`H((rHZ0M4cm)SBrQ5s(Ozu8`IE^7`WH zbgfy%;+M2~uax@(RD*Ui`1^{OO2_CGE4*0M;s{=pTdb5m{ zIJHaKB#em%#rI~KP$l~z!6Rn4V5)07#4WUvUXvEPue}$~Gb+XNnZ79KDGQNcUhJ5* zT8`p)(Qg$8WmFCy)3dWtI zAqt!sPSX@gmU&2kSf%(#uaEC}PYT|Uv*NmI_<$e(K+2T#qE4!Mw>FRc%r*IBG54qR zp1xd6Pa-dLxOP8Q)xt^NOD6YRx8Tpt+gDT&a+(|u%DDJtRnv8%+%DpaOkhGY>ZYeD zvn-0nv>&0S^D;g<){CgAUGAlx9o5Rf8v}DzhgVps*|B1?nvWy7FS@NHdb^G>hj=Ct zXExit)^rkx+>Bi#O@0&nK1NVQzdb>)pu~FrL*(6A7zsP`i>?X>Q)-L;gOGL8=%1AQ zJ_XBN4(ht)UP)pTSuwnJ%)zvxmdqw7_!!UYEhp$v0BLLDs_16hiel@#0A8~iJn;3g zETZKene>|`e?SKQz#XEGCQ^(QlIRDuRd|Il2M&_~1Tc1%D5gg~Lpj4O-)vc3CIx7n zy3-XXxZ~s{(jLqpXuUg!ZFG|2QSj+I2i&EeD|clT#hh%}!#oS|`NGXEEzj)hd3Ss`pF*u&4PG zdnv=UJX{KH{gPI_K)H;{+`@rLpStb@?58%%$bi)PN#Ww8@No}by{a8(4Z^Q%P1I>A z7lU|D7bCwB?*eB442ILkTaE-KNnH~{VG>}EzH=#K2&y5=Cx9bqJAbS2443gjwA5<9 zn*!hnv;ivKIJF>-p5)v}S-WN{eCbE-8|*oy`_vgds4@r#T*7BuFThuCY8jhCj$ z+G1ci@VEX%#yz(XpE~${)#2Ma&s;+N&JxA zOhhAJ47!6{lx$la$VzfY|45f7%}^YxhI>n5;b7v5P8qcG=ePceY!nm%UexYK!)K0+M51&DL)3EXxSvRl@Oy4mQ@Qe8l$RiL|3*&C^J{K*IO#d8UE!BJ?I z;0SKW!9x%9=54APB2Iq0>2bS0QkAkOi-D{*fOTRF_**;^Lu}mz04M#@(cZjcbw&bt z1;8n0G=olljvgWy${7g-LY{FGFRfZ_KqFtNuqZ?WmdVCaW!-z)9l=@YzMaWiK+2+= z7^-6CWx8xZOU5?2Tq?$F7GG+<`dreq3vgyeM4#po@xPuddZD)3SDau%vhRRNB))e> zG*hUYI>KAqIGH6oQ4&>T(&?}D&cES$>N<2rsF)0&fg|-nDyVac9fCPXD2B?jukboF z;+sk22Fx`FD(qSg8IN6+6lyM^ZYeZs71~yOq+|%yRmrIV=tS>z3xQJ(V=VAQNa}0o>|oLDH~|_LF60y>y@~9f?@%L{8X2+7l~_H&U})Ne>4) z=wwE2pXu>@wIG&$o&`mtcokuHI^x)4E>kV6c6Yf6Z#SH@SJ$;Gdne_Ge}JaFYeQ#6 z`nkyG5;4tQBD_pncVoPiz4sOJ%iZse8Ci#2RW55XU!z-ss6>5$D5eMhfG-u@@gWR( zL?jJ2Oc~zDhdrPE{_GB=*or$ZbxBXmdk6#OU^gmnqHO#Sy;*H{eiGIDx#l-XRQ(eMHeXw6cwSq~R)rZ+S3nxK7EFdqS1BsX`6 z*c|O6=K{Lvx;T$6voDXPCxH6{i}6X%H{xAyUId5=`OK>WW*>TQM{>KR73753Nrwr?c#1GkZJPhD*|v?>$1$WH(4qJ3-T{7Zu~?t)jiw4xHM5bdUYbG$c| z#mn+2xG+Q6oOi}tjTW-2sjQ@ta4oZ=u z-%jE4L;s{93eJ5fds`OKM+){;4zoGb1@oK^&;1gDEN^z`M6v9&&Jg)0x2UR4(Fox) zTw63Q_+TdjxDS~1k~hg$=?!XH0|?H`rE-%e;=AJbtdPGp>RN~Hc`7CF7ioVWi^kn2 z8s%(p?&60H9)!ty7RR1$7f%ppsV6aYI^(6FhvSlD{T2A(qd*gBZr7x%C_*0{@(PdL zZ@IdajXBA*qOKo~i6Zss__Z%->%9#>U1q-^!Hj^deIqrt(2VQzqtK9U6d($Jc#K0E zVK*G(l<}bH-OJ%`7*>_z6{Bpm9NXAM%@1eXf+s*_q1qdAaN%~%cZ~~^;ZkdZ*^5!v z?_}5{loic}5Eq$jo9=^JVd1=HT&&NE(uW4CO2nt*gR38(?(+Tuu=`X1Y;A7aWI+^_ z&xMPTY%Vb?k|civf&Y-isoMPHy&U-utarI{fcHXDFTZr{?{U<*6PER!o3ilVv%mrL z09YnYqVbjMzbyN#|Ii9=~eO|Gn<{_;K0uK4VCjS?W%>nDlo3 zEIaa+*)X|1-mK)oggx%~HJ>_s=OW?f_$1$?ij3Pgq#+)Qst=iFD$Y0=KB+{pW<^Vs zHob22IM$(32Dn1)Gz_>G_2^%$?Fck9J7tlG5VW2Z(BLc8i$(DFnERH^;+NdU1z%h- zcKUjO@wrdQD(~2Pucd`BzbEw$XJySEP~7V=?&cj&MM&X}(gO>(4;OxfPf{gy&?U@d zfxO|k-$*G8K9YL4JhOM!@^4b~niU=>s;6|X@oKC7206%_^U-7u!UzOIBm0JS*QDs*;qL^6ebRCoD@DNe z&vPEWYKyZxeKGVeiOdxwz=S#SDee97+@=NZ9|`8&C{9Rq1r&lFb%ACKqNhwpv|7h& z5`Q`T_CQiN(7G?2!<|h&^6oSE*fSLV8HVNq9xjz>mB(VZu9({sXGMvPe6+0-J`U_YKTFr(i{aqNf7@`F~X1Zuwkmrg8c!pYxgEwjWqy<^o0=NH#b8e=x>np*wCYdi`BZZq?j=_IGxo8M*i_GFM!#k4h0{WlQ#?@E3<=6{l>RLKcEN&cAaEI8`dW&f*ue-hhoLjC_n`K0=1 zY=eK%Wd~V9M4Y7Y{H%Y`@&chSX!W};#9e$7E(QXIC!8t-`fEnG@+3@QJK04Fb&4hn zoA6yGyLX^h);cd=B(7veHL}(7oY?-`m%6Z4>DHqV`rEfeTF*?9Uz7LYsic-`dL_+J zIh;qq((AVVZV*u3pNcaP#I=hlq>yph7hGn%tfBcu#&XdHBszBHp=Y12ikG|R+?LjC z_m?Lv_jhM#sMDpE9FdxF<-Q-KQ@OuRT3^P&e^vd)qCMvRi!0f21u4ez&oqH>*cdwi zR!NNgtYma3Y>YkQ`+Z``D;(zEnP|Ul!o_u9tyTSV(5%~?carqRUEtDsX?h2^Mk(-7iIav1(^=75C3v1d=C@pD$AUR; zf3A_znC`5baOQ%!iv9RE70K>xBXC)+kHmJa@J$fF(z@AU4G^Q|{OZRP-fv*K!oP*m zt_;|Ub`y&z)0`7MwH48czFp(P**?-tVY2E~N#V5O>52C(yc6JCc)jcdhx$R_vo2UL zQ5`2{aj9zc6A{ZN@aY*kV2cCZT*0LGMDxd)bvKOi16RvaQ=nPTwJiLuSkEP!PeP^J zaE>$d`;F97f*q}*;{mhI2IU6WtM4J98z;LKcB4X1a6xX9x%muf{PI`}DH8b9?OqCN zhq5z)=l|GdDZUe9O!(;AAMoHm({z6I1V&Fh0Zu6D46Xd_V);OL;t9NwBRl=r{`W$_ z*8%R7@6BWXG$gO@Bp{@^mW}({15RJ3kKxG-?lPf?fAR#VPCNlTh8O?j30R+a0_0|( ze|U{Yz9*i5RQjcVxI*7@0_5l$?5(>{2E2x0VDUeWKzsn~8XvvtnPo=5uAzY(c#VIR?|%*wJr#cV zV`Ooq#bm+Ywu&P$b`31>h=CXIaEi%EX}6S6X# zs9(Qv?lnpuS{99tU24I^7syPAjw(~n3h>xN0xwA+mEnz~4Y(YVG5f-fFjcwT7dwaq zss*65+$N&nr~`|Vyf3Q(p>2g-*8I|y64XE5U|lL-2A&oMq+v?7iOAR(fUE_YUHWk& zd4>J#TaF`JpLF@8$Cg96obBOE3fBO3)^sH15ZyiuDeKtOt7ZfR)2NonIRNyQ?~F=R z$0;Appow7+>@=6C>&CStsFC>Bo)~V(4Q_o^0Ffohx}4@gY&~5g4{*Zl=&S(V!D8iY z?jPCnre@bGUifTz`AX+tTgf^XOT!Ar0}ppLSX9bD9jxCT)B}KhRDT8{8~{Is_7(r< z;J+sF!|t?Hivew-H327tm}eW^pudRdc%~2^NwXH!dKs|K_Bpr!t_oz*MLWXo$Y!WE zf8=@D$Or+_Fnf>R5qq~oQ$$RvCY@9$Aw0&6);@02UA30UN9X%9+q%iKvf0~weMs)n$KMd?6FV$c3n{o)A|z{mHU+&-xDyzVV( zUeY=KYt%h>hd0Xc8IaU8QFqpUDrziG?*JWeRlh!+{rbE~00? zAk#e?&xx>;rY&}&aQ{}pDUD_y0y>*QryULYh{G+o#4#2ejGQKQHs%M>3g}s+lQ^4ithJ7`%O(QAUP;a-HaHPq0FrWp*fh zV>e;Mc;u&x*CA-P1yZyz>d2a08vmPIj*D*973fy>=YQ9$X2B&qxN=KmC6ZM=*H12K zWy0&bRXo7314Xc^Wkdpr!vhb*x#5_@E2GpkFI_fh z5~^I`y>)4|2H9#EVY1rg7jJdgjQ8)MVJ>eyGZbRNKTFHXHQ*8~Zp}6!kAFDuU=!@% z;-KyBLR8?nO{|oOpQjBX;`e4qjd>6d347Yq$kCOywN*C^)4S5<^m* z%p8>P1i8?TSxe*{;k{yFlymu_j}-3yHbvxYlm5k$yVHxbMYtGtb-$X+_RC3Q3%&sn zOh6I?<*-!LGD`hYI8nsmp_hK?6J_m`z!;A3gI%i8&QbuoXH}z#K(~6+0|_C=B^QrI zzpEJz5eKgH&#DK9?jKE>BsLaWGu+n0EBur^6_AE|eV;gZ?X&pP%8!46w53j*mPXV| ziyH*P8@po9(Rr`1+1*IQE>SpxJQYj^XIVvelKl{RbbGlVF&u6YCkv^xMDt)a%BGMYm5g_$)9^z4V_BE*Ufrn0c& z^nh(~=}1?I(+g)&w&+Qz^pwlj;=HTW5z7|CC?EOUHRV_dl`?hr#MN$$V^3)IY{BuF zZw={8@wSarP}fi=Ai7bcy-J3(yzR6x&M)i=j|DTD^>}D|(Uc;uXz4LAD~HQ%c12Iw z<~eOIDXq_99G1WKB#GO^^nJ?JD*jkD=`a#=TbfYxLh9AMayuBasM|(lqOjxG)lDV) z`fQH+AB&waivZ2EJy{&I(9%sE&TXg9YDg<>zP@&BGgidf#be65z1$-h(a$GUp5#HZ zU6*VXldWC)(HNRI#)=UAY~ltVy*=S}#6&re6*K9M)IN-te3%Cvd|drd?DbYE_>)KQ zC323N_qJaDoReWc=h!ms3GiUx+h5%$%CSr!^hVf20-}JjBIokd{>CbA9qc$8R`Yla z(e-l3K+YNSA-xuaZi6O?E*>>+pJcsQb-#Zf*(D7V@$6iwPz0eiqXF(&TjjPHlUe@c zaH*CQ&(y39Tw*#$LkyjJN4A8hhcTS(1zUUU{WQB>)DaMMm;i$MqGz$xouq+W=DudB z1-s;YR4FNF6TZ+MuF$T^w6Si`Nh9o`X{!sdF!1)UNm!UxankSNDGn~v?2KfMNCG1V zFYX&P(S7z}>8`3v37g`E?RM9A6(}cNT7AV=>r$I3PrB#!qS{*wB#;h*if&d&t7a;) z!o1LV*^dFX%?es?kn4p`ndgryI~EOyJi{trl7)DEI@^nH_?Kt0t@5->O`?aWWCsj} zOKFFs42<(}(87FZ9`WP6gDEj*sgF0f*kp3K?tvjXORp!_5B2dfyYjpqFI2xi*sd1v|7J^)aqIv7OzCppVjZS%c zUz#kFL0t_>{B8-znVzLqa#h^A6$)X}8MYp_dw%^8{i8NWYT#L|+gh!z0erOFuHb~r z*`NM|^F@vX>xhBGN<6aC?LpWPaVi?OmSx3dXT4xuW*uNhR>9D^+vT=X)&|@M)oST-_a_Fy ziU9S5Xj|}J4a~5(=>YV3WJ!HJcC%!S<5^95X-M*60qgQH9}KU%Gwja$;=63E-zKn~ z#GvlzH#GZh11eIpo|e$V3Jg=?&RT(VVEKGK?O5vV}hmH z6;Yy}FT}K;2kU($6F3G5%xBP#u#JI_6$h3D!^&&|t+9u8hlumEoxPLWO$Ro;h$Bc$ z!Kh1u)wGp#*B{*vsl*f9&rJCR(6(zEdn+bw+3j6blVD!`L>0#1NtCCGzxHE_+)+$e z?j%2_VQ0OVTV%9phU^hV8&S6DZgO_fs}bicD2K&%C)i!Ptz4&7TW8P#M|>6P?4Kd+ zFYh?J#u`_ZADCDPcMr|XAko6+sC_Aq>>2AX4!5$zG-x54g5{Bjq3!(1(DqN+j%{fs zj}?{%_c75j%uQ~PkFfpJ5)RBa4Kf6Fa=3GOwlRrjc15 zsJmprk<8j~&SFP|}LMTAP^lc-G!+NJ`vTJS70pVmD<(cZKSHCu+#B0JiyBLQZ+@Oo^(l?_q3kXyAi zsBR7-#CBGSSLl7a;6h)|I+OLm>^nWE&Gp1`fW_50XrTrUl*bV6!fSwzstTD(m_e)k z8oQ+qfITW=#9c*EW;^tx$_+tgS&!)H_I`r(>N&{E@fj&Jnl8a@RD!Ruy~s5$6-!un zj%hfL=#0!(gYQ2}gN2Usi(|%yvq_P)mF3o2`!FKuJ3C3vY{zyC-wqdbLgKJLvlanb z{5eqB2~*BvN3*zXeH)mxW3OFXCt+DKnEhhp(jJ|22f%O~igQ&F+ma$PLxqyn3s_(q z@J#zX==CN6c9Pt#0nvkQ$V+kK5bKW}{P#>cjQ3}n0#KREU?DA2PaeH!Zft|y03PAx z$ky?)?ar&~R9oyc?Vs8_DXLvSC9X=Udrbv*PZ=9Z?`PH3191`mq(}1>-N@1}Ze#$6c(9`_r#>TSBwE zG!AztuUYmo6rq*Tu0Zt!a|#$0K>OxCb>dN9YyW2>5JkV2E?e;|_r4G43 zy3wrXW}Kh&brLXBga?BZdeh~baRZr3VE_jRMAiq3^uiFz<&fvqB3)WA2~$g9vKuiX zisWRK~e|KB*|sKy43usjV34msOf5FE-dGna(SPr+L|yt zQ%(CpDMp$=rztswOf=8a{#W8lp{ zgK+XcWNYiZqwk{s)B&LhtE)|fgfS~0NfBOwBaV{N)}yM+Yj!74b+sK$S2ZHar#)M1 ztHkq0w1ByDNxKplW79~3maUw`KaKLca5B*_HKG+uoIi%G9C^U3kofSDF)Q~($T55Y zTj!lj8tMpmxV=9y6512@y9{UR)Dny*JEHqDkeAyd*+OcUh!L8}G?EYX_WpJ2?F}zb@!AItfA;6RsReS_}Fs8;CHHs+UJ( zy4BKW6#0vV@^;!TZf6c%>JYT(yju|2cD|w1{xjK@GmDLk^hXTt`K!0W%R)+jf`l?b z`BSqN^tSDDh&**tw+`ZU?^!1Bx@e7a6S~tlZ)<6^9>$cXKz9XeL~xr0yjN&YZ<|hL zcxmTN7u;9k=5do{!2Op;@1aea#8=|!Q=)aS6CM_WsfAk2>rOdCPh@pwEG+xP>cm8s2iWraB>sL4 z`J-2+f&9lNk0247mPW52w@S3RSA8vs33K<{{a2(dVEI=3Dlr!QMOW3`+ma||o|=!UX6v`bkz6g>HM_SSYn>!Ix#~e+ z5A^mo{qn5&z%>cNCkN;;Uh(SAdK}*jn~7G29l_Y@TN@Hu1-vi{KSfH6vVg_7Tua^0 z=t*GK3G)P^SRp10gIpW3nE@)UWX^-N+R9z>QP*CUo2JPk#Sh;!Of%-=!d*x4UQ*87H@Q&>HP@ecEfq_v@1=_D%)q z_nnM6rH%6&<63s5S#IhHJI~|-(>urzF=NHc4;$vG1l4q7lV#WlW1RwMa8FCkqJ z!+H{NKeNda;}}B3`dhR#=uE*sn-$IUt7EMu8Pl6z0JOvOHN4{nVYS!L$%3wQKJ(t* zI04de{{+hSxfT(oIKEmBGb9s+*i{1=$^z&HO*s*w9q~y_Kql zl7F1kaD8fBYu|F1X5V%86(yhjcUs0i$4;-k2c}-v4}3>RniW1IXK_OkZ3w2A1Ize$ z<#l2$LRIMWt%%6Qy$3jDZCH)^0q*c49j~egiP81p+8ihA@s7>nID)ns+?MunJBkGxq99-cM4Etr zbZj6UMS6+SLPu(V5D?vph>Fre3!roYL`nz{ASwbv=q1z;q=%41N(c}V{*ULp=e&EL zv-ke=`|`i8*H5|P;>lX;nKd(O=AL`pdby~mI+d>(w4sIye;C;}Ij03VNEFCcXO1}I z;VL&Y>N~N4^z)!_S^PK?aBNlc1e7>#UVW_8KCYpQ_Gi3{Ftzp_fW?5?xrDFfdgYfa zc`J~avL8vh6(z|dGSuZtZ8ZOyOF(8NqsV7-2A+xF+gn*&noVB!HP&APED)Wepr7<^ zC3nvB+0A2yTu076ww|27H4UXThm}n zvRSXj+70Y&)HwviSAaMg8iR3Kl4EJ)tfa1v{VJqc3&`4@_hXj=?Bk|up_?|jC9e#6 zwO*5r`ljy;)|*DaR~v}ru}|-ZUrX9=A>_S838kQ^FtJJnUstK-i=oup zvmBST^OXQyq9%iTnJi0I2hkHZSq^dgGV{8UU0iOlB~53AdXj#OT+u`gyg}w!RWz&u z14$=0vBfxbYehNnI?@CVvPI>chR%QCnyH7hj7A(>r4N!8UiJ40f}N@a3OutM2dGS1 zA6~o^cMsHskRi3(Jlw8I-m5ZOK=+wXR?<-gc&UlKq~p{_S~!~#hu%wUfrq12UdP|# zz8LV;jx>4CNdwH*;h`o*Zk5qbl z1lFf@agQYim|JnCcseUnEH@jOF83D*g>wdfEGPB-g--NcscRakC&*Hy?UkwMh#7yS z#H};*c%Y=vDiVe4EH{IK?`BVK`_Bg6#IkKM;3^AyAhMj}(nDLT2DqKV+!B?4NNPHx zzmG<&@w6f|e(wHuvb9%lV2iqlX&t1eT@;g{K?n*bl;uw*me-_OerXb1W^p#`S-}fm zcbc|u)``b#`=P&=`BlbrJ+-=W_lIPn6aM{7IlW&yzi!_#IPI)h;H%r}r^ydab(|P! zlXET)=&_X;Xi9kE0}q=ikxnTZ1O19mI{5Ue6%YtYD$?nqf>HRKI&+Cb@u(0WidD%+ zekLAI`<^h2eBBC(Fgm>**++%XdT3oWE>&AzQ$2qpwbdMw5NmDsAobLpk;F9Jk^1RA zV~Bjk{VGpoaqqntgP7L}<|aRZTM7`biy%9nk(&<@yZl4)v7zfvr*Wl~#j^_B*9cv8 zJ`F(x*HaI-8h~oN>pb5T_OEu1P9aLE*B-9}t+I3nxab&4w-PB(O$9UaZjMWt;qeL2 zCe#bX2JsH(dVC*UtH7l?t3O#$J9|xH;aO)ip)pKQr{xmWzT^%v&z|Veu(m+)Bf&Cu zOxWep;uI(Z6=y}Osi7Js^^KA}#99?F7!O_?7JO8MX|a~@e6uvP`TpI*AHfnkx^r|_ z_P-g%xW$WI{mAn*%4)9dweN#(?r1ZNH%!7r;CO%?dK2g%t-{GT^GqgiA;Em&oRbCj zsmt?&7wBmr{s{J2AkIA~>oHiYRBV1~)5JXft@=Qop$*h}JDcZ|!hDu4(`&QeIGyql z2>q;C1^{Jj%kHx7d~c^CE^H#M4Kz=OE4cStUh77>EydAuv=19UZ0W3MCdd~y40M$0 z&UHd-GYGz1xAetJ%Dx!%($hmsyq0&t+_kozf52~(i zcp)R)X>WdeP@ugjP-uye4#t5t>lqGTW9*qJx#k(VBg^6@uiDWkQdl-wUMY1%ETq)~ z6??5>?Vw<2rjjmIKkDRLo0m z;wb!*eK|7pYS8|L-WsISUHG`HuqH>Hk{ho?Eg#8QC0qZC3m|v)HMx)KcGv+Z?wb9>Q~uSaV&J=Gj`Z9WkVh4+9Jzw=yL^~b5gfZJ@1twom1#ThJmBEWxDfOc4Q4>z863MUB2lf+Y~Ew&GF^}TEJDU?2}@s#NZRGJ*?(jpWyh7 zqik;mg%a|xlm`p5S*WE{50I0$q*Gz?G8Xe;AJd`@s#Z4Er;M*NN5)ENN^>3C&EkDH zl9t$au$(Cnl{HpdN=Qj$5MuU5##NnKIb7vd)5g@CI7z}rpuj4rG{nePsqP?!b9p7g z3S@d*5XmhrsHHk=!%to!aA6XVh;pt;%xrD{%6$hXjx+gKf@p-RIiU7XK) zhrtV0VFZ^elOW0<_mK`Pr>L=wcq(^AiNZE!_>Nl-=AvTxr zAn7|Na{lmg)Tz%(e=J9j9Flo=ZH;~bdfH--x3!%c2iC9pDVG?O`e1yaVFi^f(NRqF zngxj^>6f+hsD|it(*5!RC*Srf7t5~usfM(k$WJH_;m2!=squKli5>_$BUaY@n{K8W zvch|F8lv~g$!(g1c|8OZO!2=&DU(ExKeYZPCDury+hR`G{{f?F!3A~Te}z*dCjf&y43VoE*dHat48rG zwx5=u&Gcv>zHk31^S&jVI#^--*f2$kP-suzRs?39e zPj==lF+EBX*{J}1sxzv{ZosVAvK5s|Dk+<5ztwQ6!)8Nj@X+C14^Z}fR(X-{frw=5 zk4CPUv6_k?PT*km3)+(>Rax?d=5I!TyZlNMpMCj)>Ln$qMlGrQcAKpR373vjy?gU! z2McHi6l?KBe|Wya3cw7KerM! ziljf-Su&Vn+MvyjzyE}Y-}?^)CQLbq@3}(H)#a)@Izal6a*^R zVy$+jAPJgX_;VJe9E$W6Am0L+75J_9=6SbXC!>@CgWEHzX9n0R0rT%x7YEcQyxp|| z7n3oINqP**AjT$hUU_?CF7YGJf&zP!qB5sa;Da_9U}+oV16!%OI! z;z$vYl*IM-0~-6hS_`(r;;T-{6}t8~?4%zH@q=>qsXo4X$<=`%n+Xui$i>@dT9yCi z36htw|5-PE)Q;IHIjqKjdJUzz4Q0KrV+7r$NKza6UiV?#@Su`6UCuEpD8wQzRM<``H@T>mXG-1uVj+wXqBfT z(prYKhXWh*8$r`%@O&a(BLPMmKSism7fMYU>J-+T$DbfwV{EBzS}l?UC<_1_Tmpzj z!ksJFOm(JR76}hzfWEgSN>J~N!1CP&n2kL8h{-@o$gZbGq9=ZTkv}@O=j(QauP^Q0 z(2r4(PaE^cYr^}mxjM>7eS>)Jv3vI_pJzgC>p*MmtK6%Ix`~jxNQ(2_kbEoRMlmSh ziM-%&^rh&p;h)pgbVp2(Y94PS=F{?u4{q%sB=nW)4P2_DrY_5Fq{tQK+k8$QI}#|S z7CBbR^~IiNjV2{_SU@3egY;V^KcH4)?`*q0S>~|l@E7Liv~4cl5m|pc-+jXhqcQ!# zbxPZCH$fKceYe~G+2O#iXCkz*nh3cWN2|$&*{gN(H6ih0-Eej2{Eb~+%HZ0zgD4~B z^0#O;9?cVCO9oi{CBH=9Igd}h03t;Z-#Ac|I&7lJq>^NGi$7D}3TD9h#-8&C?RQfy zDhbPg(n37aaTbeBKqRz&zwC%##L$eP%Fm_di+lx%sbx03*v`|6IbO&b%DS;ztb2~uu)<}5K`qq;qe}4py@S8hwcB^MssJ&rt(#(5Z8fo5E+5ZkulV1)b)5me7zELgLsdJ%b{2lT- zu|mG?{jBU79hH=wqDL%o{&!xTjMuco(H|-Ab$1iavPMZB9!fcB{jN*03!#MS8(0N8 z`0Y8gno=IAAz(RWma3JBWpxS{mpwLgH|fGEorv%HLmQ=UQUKDrP;ZB&AH=cK zFwQv^8LB|#;9BO0XKXzjNY!J%r^n7CUxs8a&j{99YOs^;)-23 zx$TT@+JzV&#g+Zv1R|%+Ct7k$^Nmt;v4MlX>N>IX+EMNy17GVwx2xLEA~ua}rs^M| z^|<@*q?q1&60Pj7Cgqc+uo5ui`pW&?!=<>UHBUn%#SpflQMpRa&-@M$a)d|`CLSh9 zrig@;pY-eIzLHAtrQK|K3w`SL0qyDw=b5dnnveEWfzZ+)lm_4`y@@IlMKcm(bJG~_ zfo7qY`FdZUQK|C*k^JP{ci7CQYBIN?j>Nkdrm0eH2TV>Ep6G38j5oqKxTqgK3a+$% z?mmLX;{Y@h{`}Nc@^S$A;&|;%e2Y_UBDOSNij_~}+Kd9doamT{q!-o>S z%bG=lT3Nue^;6`YSIqZ|Zxc#8zuKg~1jJ7+ZJ>;vXM{LUuVW?6TRzH5#8g!y%Av1_ z<<>S2>SaTV`<@zhg*f#+Uzxjr)%(2sr5fUH5i??~2ijkTrEdk6z|^G!(N0Bf=+M`C zuL?cpzCM3PPo6GNf&+qVY_hdQ?JDhsu8O}IPzZcr3ZPZ2>8*WYh=snqXfkdN zVW@oKUQIItV$yfaqFbRDC!GG~j>F{G-Y({Mbw!`7E41$u%7e5WkFv@mmy6UN5W^4X zL}VH=5%6Ndw#%Y{*KoRoMgQ)G$SU1G08Ph)0HMRQsE>OwKJsR%2D@NMz6fmCm`oSq zgX09dHX>;(ldh|(YWK=u(td7opkVLd*yzhra)3DP-P9ZeYR30IDKt9%H{z>q>THsUvj>i3p)J<@{#XFS8pm*=bkb|hD;Bgq!Yr}Q<63OtcqQqUd z-q!dGu2fNaP`FEba8yMIbrWeg+LFG}E&7UATzaFpRE=JfZ{T{TZW#@20_ua`Ju`=A zYL?iZ2X5f+&5#j6V^N0%u0)kcD`5l4KhXBU@)t{zp1Rb10YZ{@yE5vjrB;S`WA*Ks zHWuA+q%wo6l}{%0Y4{H>nLIeDE!??186Y-VzWG~F+53SaIu$Zr&bC0=iv!mS$S#e; z#oiV$Q=nWJ5mhf{j5wsw4f#279y(wE2`&{U)viMO(KcW)n--%Oy?28Tjt+QyE*5^@ zRRLD#8I|X%_PLYu_GLmeAa)l5v3S>m=7M1Q@HlJ+4l)P&2zyM=nQ5v@-3)C1mQ-T> zeJ&H`O(Mxv@7h1IYi(n@e!f(CBA`k#18VczQUUa*928(576%9;qR&wRBcI9F>~sXP2aEIo>sD?VEYtG ziMje2I~<*=0wtW~YfG1sq-@$?J8Re1N^p;Vd<;rr*}Cp9$9ldko%U7_<9yw?id3KE zo=)Yn`)d0I{GlQJgN$lQ25-1+neEtIedNFh{13O&rCR`&cI$d9hJBepWvR8XK5;$| z8fD3trq^C!?s2U%_S#`*6fdYa%?E#Auc6hZ0A#Nmq&&7A<>+^rO`@N>>Ni4Nc-4x|c@)nn?^1?M#Z7!SqC0}w_yturk6M2Owg zg6k&<#oU|ExmB?7-}pTWgE~|?!vc(5v(SbV9csi#sMi<^NK99DY%Q9SHv=FZlKr0@ z)i!A{Ro|`=^W6`Y$7-Y=ckwV_Zq^=FC#gthSMk^T&FB@+H^^JE%t#AGCc9y&zv$;^ zJ4|mOS&F2DJ_S*t29kx-a{OAed+5WPXMxl6kH`e7o7iVof4s3Sz`O%f;`^}y_>~vx zjv=$KHeYOO{Ip#L?j|0dh1`P}v}_gM&n;r3j59gqfs|7KsRWjd=%zOTU2$>^)2cp7 zMPK`zUqW2X48nLkCBIkseEH-NKzi~Nf)-+AUhHW@vpQ`{X;&!&Na5$|JXRukr3ir0 zYm|suFb}eEq(473KRUw?H@0cg5uFJN9IV;K(ULD#G$g(G?(-=LA1@XgKIFEoVXTxP zDwC&qur{2lf*1nM3FkuV`%$?q;)CYMqYP@vBJ_sGYm<0@O~p%ecX%z_Lt>yHN1MPV z?b$p9V8sQ!@xWMZ2E+4t2 zT}QcxN=&(Nu6&o$ZjcARfmQWGAiH}8WxPV^FL|_0>Hcv5T$A!WRRMYE?A34WBT`?| zQyz;pY`Q1e-Q0acVfUU-t2+zHNkvwDNskCY6Q6g2L+l4{>5BEGUiWL4&v>f}vcaWP zKK!t)-YrQPLB}VEL*Z=WXQ#j1r7a<@CD($D=clgU3Chf%_TiN=BTXMw-6)avU1}a& zN3Vf)?^g}d-V%cKbMvPpG%cton84j01)9xi&``LgJ-&vB&5*Ua{Jll0nxgs7%0Lho zyf?(GpydPcM7RgqeIS5IgzWWXqyouOXbnfFg>f+9R0RFZ6zXB*&2RwoR}z8^q)hG^ z7XVnox->?+P_|BT-VFZ7J`|VBxFsMS?{8h}wjtTmf4`&NOP;F1>|xvWdliFlof{er z48rLOtGNmw8VLXm&ew~&&W@lnaE9ExtLc4v58`|YONeghZ&j18N>3I;^T##a2aklb z-k{&1Z}IIPB)yFi3cKFkSR*)Sv=hm)i+2gv=rh=ah0CtUi=ifAiRt8*2GhPUI-fPp zX0X)mVD-ou3w;=A*AMo-wB_OQkX@!YHV3;niW!t#-mE*?l=R#_aZj=@sRF2a_JFvZ z%}PehuvC)=Tg1CVNACFLvLuFZ)L4wiiGsVI>(WafB7>xmQXhAVQ1^b>BcN~@lymp4 zN28W=W19qsrUCVIlFBARQe|0{I(w*Vf1&yqm#-a#a6t5^`lfQT_vZ&94z1J6{Cqa4 z^rHIAp%CV*r^{Zh_z2{v_I>gb1R?j+!{9-&oO?l_T1EEoAa4x!XWY&*%(hkN8LvHwT)Z(NLepK zv)Wa=wF^{jfbUTMDx&84hWSK>9&F9X0B0Csy5+bAIC^u)scs>@R}XvFM;s9>zFjgMOqDeFC(%*q5P zT8AB#+#0HO^iRN5$(ClBuv5#U#T!!A!CV~*$s{BM0-HtaWvTcM37u9D@iSI|f=TuS{_elZ4lpchvSH{-PM=7uHak;FxMEPc74ooi*h z&~9Wi&z;9+D(SiGcurF60{R4j-LV+Od5h41jkK#|hGur3%gJks;RC`Syq$9LW&Q3c z>nlZOF6I|TFX*>4O4(kY&Q+=R7KN=wn!;0(l~+tRoEh%&6vmnN0K)KT)ln~EQDO?Z zwpEDC=Q&!v#M477bG(o!Zc5mALm_~6 zbZhIi2_*f7!f@EO8cDAG_r?8MW5-^OY&*VGZE4BacjMuJP}ei9ikjt`v=m)j*VY-l ze)%`?+q*49y_72r_<1Aj?_birXm$_{pHWrg5xVvX;|`gKYE3D5XqoEtAy&X?A@*(8 zrRX?43RJr-A#i+WRXpDszFbgv(`$0GgHrwz)zjOMzAcCQx>HWi2`QS!BeP=t(Qywa zIK13KNPhpGk35i4@aPZ7qb?I}Im0NnB!^7)fEsw~(?%(EFaK_X-Ov2D4-T0X z-WQqhtyX$aYOB04-FGG^sj8}n5M@@RacQs=ln5%0D&)@z5Mu`awlIV;)|43P!2_5; z|7XrCd4|iMcbiGlPcGv}6qdhyJJ7k%kqYI~OW!Mdtq0Y$W1}Y&3bHAm_nmLt>@>#| zj-<~1{Bf(?g=oDI>U|f9=_(;yG~`#}g&cg@9fZ^_8If~dI0;?iGQa9I)qHp@M9RE4 zd8>wO3^4b-SMUbiePX!#!$yfm8)9{24DA6&D7deB5RzY(6z_1o;&Ha2E$E4f@J;&A zv0m;^%!;nNQXbIfB}s>D6n)}^BW!7VlF(1%^NnhdNdP_L-5W@=*8~(#nXm`5Ky7LL zL|o%Ss&C60uEZlKl8DY9)Vq;czeOeUdpxDsuOPbzV0UL^^* zZQh(kH}6PO3yG7@NO_xBK%~|7eZIji(OMKXG8ggKLe`jPi5EYp@pH}({PBIz{#ZTj z+9%nN-jm{1aybKa`=Whj0DA~RHLeMc>LQb3??z9G%=D8@ms`d9(of;6t4yQviOAoS z1xGZZYU0p8%^PC)b4)LusDnASU#8?nw#6xXoD#oR{2Lol=v3qWP#Z@n5pp|^E|#q+ zG7mA~u$dq$kHEm%v-1}8Lh0OANy5p5mDkD9KoV0^-O$_ zHpOR}b6inV4~{lPKO9$;uxKEWKlU${?lb(Vf3H61Yb}nf$bk5NpSZC$n+b%Cpj*}z zG9zYSjJNu9N15YdF*r1ckIIdC%7}X@DmX<50|jzA%6y_&0{JsxeE`bOT55qcI($O3 z&IjNs)L755t=U2qZwJ!yO2-ybdD!s%Ip_2x zIu6X1x{v3miUK8B%fZ;7Doi+Yq!Mv z(U>LMw~5cEcAo=9ao;=mv8E`x--))$yi0XZcP6hbnd85H!j`oqmi<;K!Sc>ofbdBJ zMLP4c8P!LF^d*=o1{WXhsciZR@YbQhOn`q-W{VyI4IM6@i6g&lUkm8@*i;-vQ(ok% z-aO~A7{S8_5>VD#Ae4ZM{4~(*I>LK7o#baTn~XZr5uZVtE|P)Y$vU4|iY$4tn(5P< z^h7=5z=0&P9Q6o3Kq{9fd9aw>RHQonWpPpYJTb`q+-IeqS(H-CrT7#3-3k3G+6~eN z6N)TQZd_4GRy?e0Zd&b1)nK7=$}&Q=9y=8J^NV}$2N+k%mhasi_Rn63J}#IsU!n<;Jko(4F!C+X-%eOlfJ%=8n!l=#nuuuH+2knK)8HcZ{hke8M4)OQh>sy|| zFB%hRZHb!7ks2L!K&HL4AISE$I?|q(dP4oQws+c3z1)yyB6vJ9VfflP9|qR4G4$Bi zx(Bk3%>g|T9X6Hk__mpG^(hvhaKp_tigTt;{j9>Lxg)d`pp>|R9*mITiMX@Lpm;!z z70g|zB$xrQvzVlFnZxjhL{)_$Qvi+7a*Gb(--@~WsttW&ob5GZoPkS$Tb8U@W8A0E z5iOi9*@k=>vw0lQU_kh>U&74P=S+R$bAosGUpl|{9nib)@`Z;xa&_NdFpMxi zdPn@jm3qth-jlzb(=)sM>f_UOzE4?N1;=h%-@Uo(eEpMmEIQT~K8(dDjyd}mLfn^LFat;^#j(OQq_?vZR;QA_vM*Io^Xbia zKWaYmQl*PEgvf}|e5y;$p}z>oG&-?!*Pi`H&uKl`v2&N8PL^rQFfX)MXqQY^=(Ta1 zWl~(JPD#}&yhY?7rQK7ZlCyKYCI3_>tuT7GQvJEL;pxixo)QQ`>S8U#PC-#fP5SN7 zZ1dKcC(pSNstO1UEddLD7z7{%Ry8}x#r3tO`Im_<6g1E4>?+osVq(P9miqn*Mjx9C z%CwZKvYV^ekl&~cSU84i55LBdovAvdK5kQfowUe@X^45jM=vkxB;2(`(Z<}#;d$G-ueETAHMp>Ak^}J#ru=lv}>M2Kd&=JNrl*ICk??J z>f3d6K05J!d3mQn?V)L1u?*WfruCyhmG6n2N7gw^8OaQBOH25O=1fALpKf!XAJ>wS zq@=V)el>3WxlEIxCKjC>W*~tLqNb_>H6CI0J9l$w{}8$zFRE^#*5{E~&lInUml|6_ z=7NvpR0DlW1JVo z{x0xXwXbjEZvp}asi#J~=0oV|+rNzkxot00r7c7(m|~j{xLVcT+`a`PD6PhV1{CsA zq@_Fa_t3f`f=oH%eofD}C3589;fbn^{>T!{l~O%;INr#OW#i4c<5>pu%4^#28_{^v zQl7VH89?s;37%SR5VJWkSU&0Hx{(15tE~=>6_vJ2J8m*)FMnPG+6|`EWBRJ|Dja+^ zH@wQmLxrzYrEpv2$MG7r2avbEPy+L}5lv~E8w*VBoZT2y4S1I>0hykvXCtpmE0>?1Td!TNACK;?wwo2f@p!uE9!9zC)P zHDaY{M43)%+BM6l9&Qz}_o-XT+nNjBY{wh5O4<3Xm+Q;K)PhaGB(CAE4~uKVOP!l1 z7bLsDVE;|2N>DK8HjPX0xi~_-p*Z+pVDHLuT~Ke3A*{hGbHjclH&VDiXQ%<8dXW0{ zpme90%l))raYR}A@!7x=zZDwg8wqdy7({no2an#>RJ%^Cl%E=Mj72ipnGLGOn$*`@ zJP)}?b2i=FJ0XuBr7f=5Lum*o%-({IPO%3?oegu{nqLs+LqZBy>;`8dBIa_qhnX(^ zOa(7yP2;wVHN`L88 zp0izV&8Kns->HECF_U<##SJgUbE(Nh-3j&x(_y4;ZW2eAd37G9*e1d>FDW2qvS?{b zvro6Dr>v38eKq?0NkX@=YdzaBO0#V29odMP#uE;2%(VG1x^NkUB7MVhYkbktae$F+ zjM;>nNt&BmdPn*T4N2puOTzV4gwYmz047rzwMQtLKmS8eN%yp(hZXV`bt3p0aclfOhQ2qx$=eFQKo zZ#NiGLZOytBgPH{rg;WvAaYkXDX2LaI)?BL{R$kdPPj7k>(J?BR-GmEdBncFL9NVjn1Eqj<*LHtnMAlwZH{MZ zr^ejamX#~nn7Eq4SR3`3leWVe)pxy3dhO`%;B5qzg308;l2roTX@>}BYSvk^8qg@| z74|y~Ca4vbhXY|Ry_j7XFnoCJ6lghIftf);==&WK)^K=^+s9p2{JpF5z==|cjN!E7 zMr29Ui@d5Z>b13vq-HS~X_aQT-QpQIbtTOFQl`S-*>sjY{{xI3a;^_125wKzfEtz{ zSoR<((l~gHl)v7X%0=5Yp5~#Wak_#eGThUA%*PtBnjF(?;>Z&@Eoo>(PCYf3r>+}da zs}aGDfZsOOZQ>GM_g`Jy_yUVd8+{S_Z}Ycj|23(1ob3$nT;DGj^&B z+4=&N9}eu2cyJEkj0ws#v~U$JaZ?i^ng>iuAqs(S-q@VT^Rl=f84yws5M0ZENfG^agS{KG-^S7vZ8q%c_9~SMoYy;fb|x0 z@kTB(>M3zn&sjNn@`KzY=C7Aa06ns+wRKX7FR9O4RVSrGdlXNCR2osaB%O{8f#Plp zqVHu%pP0EqQ;>|koGMYqDh?&}0Da?^lk$((l~umy(&iNpwcC9=_m;!?+&Q!X+#b*h z#w=c|<)rIEJF?8?^_wz_s>O5OEeX~k%qxiZLDTO+t)!Lt8gORJRIKKQ;3~__I|C({ z@v}_M?D&Ol9SqyGtnR|WME=27V_>fn1$!bXD4b%1@W@I1+AY@~atVn?ZJ3ZJl?Kz)wht3`q zCWW)YpaMSALhrryAwTe5=|+KY)j;EMfAGqdJmj6fgIWp{=r3~d#a!Hzi`s8q5jD{| zNLpic-4_dYTRc`Xv6fg6sKcj;AqbV5=e`gx<-PoAGfUwD;tM?Qu+rGohM;eQj*CMj zsf)hOq145y(TuXDQ#MW6+gnhzmHzmp_KL3ND>YEAx2Kim6vv(Vw&t8_GZBTMdta2Q zOJ>gNgI&B8f;`W%-#DfQ#&#_b9p9h$rK0Lz-a?taYTLc5v+i|W8`Cvm5*d=A?9 z`ahMS%U3rk@Z#Wm`q(6z{<^S;M&m}nP{$B2uq=PM~_+$CE zLCOgXfDOxR|0-Gey6{}u!EICCCGNO?3+Nv{T+zvT>{;`;xaRD8-POcfR>FJsKY0H6 z=T66EUyjFYrUO8bWOl6L+=1Vn)&D&oS?s;NvNHjm%7)J!J2p~yw_iqDLW90lhQxoYs;UZo$_F&2|FeTt8Kb3=l>Syc-x2S2<*bLa&eh|v8505* zU~A4_EvCE3x=@KLW#b!TDYAj%{$|H8e>tB$y=Bt@2Fixy)BY+1+tNZ&xN|_8UQHzf zfHaux-wK;lnwm+8opB_Fu5-$kvV3J-2N)my4F`)}g5Lev%#NP>J}Wod%RB0eAc$-(JV_SOdZKl_dTwLIagxIP`frW+A0G_4bL2PH zEVrjcDd@nx(G69n1^~Q|p!Nh>*|^!%XG_!`26bq+Th*=uvBIN9|)#+XR$ zIu09K8Sg8 z8-#wHaQ*KFIRUPj-04@p|NZ}*TG1WA1Xye9rT^U^IbiK)4&C`vLH?gpGH?NKqNFa0 z82#O#Gr-ypy}0n_C*mIuigX9I5SjG;xY*wfdJU|7->vt5IMsjD5?W|r3(fF%qvQW> zkha!GnP(sF{+;+(16x?+obyfhZwCFJ`27D&e6-*OR{xM5xm_`Q?Qe(W^IN`dR7}-F z`0jXldAXGQg@hIt?@~}u=*hL|nn%0i=9M0stXk%RuqOS}M{HI7h|;SnK;w$Iz2m{u zjbwR$caf;eihq6@3IKZg-BI5r9#P=x{Lu~N@sxKvBK7!^wu_~q2Jye6VU0hua{7FS~~WhO5h)l*HYz{w7xEr*RW+kJ7p4+JqBJ2bZ=o36o8X> zMeiIj9`(4!NVnk?KmV6Mw~P*Zdke$_ugub$wYrJn2;1{n)H~3wq0F0hRM=h^MkWrGo>Kg`sp`@}z zc0G5*0|r@YI4Fdowk9X1Qv6pei>Y=40Kh`>q{Q0^+zv8`o)<*iQhkaSS zl$YH{)Y-O8;BIndkGqG*iZ7wR|G}NQr+=>XBioMs^G~I9oc?9L|6+>3)kpD{NA76S z@#oX{=S#gD1*o=vI5Yo#$^Z2s>kZ)kkDPws|Cc`nys@_PfIdFr4Exhp^*_4I)e+Fg ziu(r6{++J%0~F~$out2h)QBZuMBdy`(D@rBnROO0BLDPo{&L>`w?+TAMgLEvXuh|Z z>h@L(xhErZp*7gCS)fUga7Iy$U_-yvw6S)@AbtIId$5Og8LKB-yXE__>{lz?e=U@L z2Z0k{Q}>hw-DWeVfC_|HBO@YsL4YdqqQmDWJMr~wg+Y5k9`)77zAPAXSjkA0+CCWu z<(0;6#RYGbwZsr3Xg|A}WLwUH(k=c}_5S0CC)faoXl#;qjP&}9{N&0uBp7)??a`L5 z@6Rt$l|_dI{x|0EU(dKEH6a8u6e#@PsN8=a#H`6*aXRn2dv5>t|NZ-$EVrF-n&*gX zZeb5DV*5JzTReqIUsSG`T0o+%ST(d{K%1Kdylu-RJ3Q+<6d6fY|9dg|_cQ+Wp?eKD z@LoRFXsNaNo{L9R<|jgza2vB}jN`CTpY5d~=dn`8f@0-FE`0k0s$to^Jxx>T#bFyU zf(@j89%#97%gm+{pqLF=+QMbVy7lEXUr~oP)ly1@Jy7*Q{lGtuIe@z94bWNs6g-~; z6z2*YJSF)OoCy;hPhQgY{KScJ0Gc-U0I_R_`aJG~eA2JyGN6iiMrm;*OrQ=*09jnF zXPP$S7*0-vPj--fnirm#$NrLsEQNha)kGE@2-k6HkCn8KtH67Df^Oh`I>-Q?`-=4 zA$91tr4}LK%#HR)5lT3KNcN>EgZbb9T38y0AoCA`B4EB&A097Ya3@%Fh}NU3FLO+| zWVF6?%~jE{s~^TU1I(u*xx%Zy{5Dn5r6I^?A9h2HGc?7h61e92=K9ia@s9b3=EP#? zW=j6lN~UIUwdePD3jVB!J7+aH$zi+D)fX&IH+252o?!9K#%B!A6s8}4al*S=BRssX zYm#cvTVXH4FykS*E6q8q(O*i$_b z-!xcM-iU}*l+*~WH69f6OamM7Ot+!Gd-6`qRJwmxw9i?XlSUc3xS5XI=Io0PD-74$ zZy!2*2S0KRw*}V3mVLPdvfd!l5yChQ1Hq^=1}lay;X$rfTj5uXfk@(8bz1Uu^7IKk zOi77U3V@%qJ{z?Dg%KB4cwfbTS-(aD0KPvc?*MueP8Il^J)^nxBNv$5{h3j3tdxfT z=kcZ4v_AbRglF9<@GNv zfIsmh|25eGZL>60)*_aqin+uIQ0{o<2QS~^@JPR`*I<+50>#}RD^i*=R#n9*SX!;T zDBI(H>%y<_su7S{y$&d<8W!b^q!nDc#ERKo<@x-<_M(618)5c*OWB}3)Y)PNKWGzH_c6!`=BGtF1b!w z^q*`T^-i7>sTwg1le8(40Ab5}^v|lp*=H2u_(|sn*5u zot{w6MnqGxY#JY&ms@-zn2H{3dcxVg5>quCI-0CNq22}WI+aJ-{=xJmq~z-2zt$pJ zsJ7+a(*-?CxiTEkqd*SOrM$It6Tqia0!FkuK%QxSI4;#x1_`* zn%df5-dfigYy#MhS1O>?CGwCL(@w85A#DDAl%K5Eg96&GtjC|4!~(6I;8nAUtOx#g zV>whG##iBr;aOSdwuDK?ijL-l{tzihvz4i7t>@fSth)VuYU615rOewBmGw@#UWY(n zx8)*}C3Ndy3=w0T=@5r9VSe6yMEc}P7kN1u(9x1s=E*?g>AN42BgWM*vEm`gi1E}7 zfb%F?On=JgAdkwYN)&^@GVufg@3qXCmUUl^hWem)U6>GmedkLLp43ue$(cTIH=zB6nwlaN#2OUIV=~_OA?&bOf!Y7oTv-;+n~P4-(wkn{Zmn z0LWQqo3v}8ZKkDbFdk^vx zy@P0S>3I2$Oik2q8UuK~JrM)$D8dxy9-Qrxdh-E7Hxux5NL0a-LYRns6m-Ck<;BJ^ z6vJVWu#ucxIm&nps%KwPncp{+T=*VC_!StG&>qU%ALiZbFCz8& z^u1cU*0xjaulbd0FZC!neNXje+Tl7t-j#bh3iSR`&nG|8hnNpNc>ukhtutu6YR8_E zpil`w4*YnOP>_ild$Er}kn;Fa(37oiPV{FIWotU?r-6&*R2vfY($rX8{CQZne4-bwI`?W?gb}zt5Kn}l(0wX_$6+%t9?{G_lI zDTik~UPt5Qz%4{*m%1!?ZE=VZpV{_IjX74agai?f#qdeJMF|Jq9$W8Aa~S&c!_Yx6 z==xRcSA3Dp>7kd+-?higx)zmKo+2|%Rhc3q5dQ|%(U+G)%aK+@sQExueJfmp9YTkQf z#!(hI1KI8u62T`yTuAlIm`qN<&2emZ^BHhPl0f#HLyQ!rJ+|Y7vDz^lo9eTSjNRO{ zm>TgAy&Ws0&OM<1mu@rfW#EP!`Rsnj`9Q$K>QrHk4Ml9j^n<2;L&&wJ}Lh&XA9wFpt{f2;FjODVFwX)zwvnmOPaWlxrR?abGG2a0M-Z zy@~bjQpP%s+W6Fq^k&l%IwcLFyI^b5f{nF|_P<;PRbrwSL8Xw+LqSTAjZQjjn`;BQ zpn!gcrB^}I!#AxA%_K@S842`nM^qOr(?RCviCy#&k5U_#I3KG~-lSiMN7;v;pM3R1 zW^&DTc4L2jjY)G}ZjuRC4kFcibkk4>BC!1tDFLPAG1qo1ZL4*ifnYWZ)!;#SIW|EHJrz2hWgD};y{-AvUDNn+ zE|mXV>2T38guC7>Yn3bjw+*uj+vei?Hm|~5)aQ*lWF@bWg1tR2r9={PZ1FZL_X-lV zE0t#e`5L3f!7~}gM`?il8*X~v`Jmkhp`!G#H}~3FCqmCSne9rGL$qFMvKYLCu3u^v zn4u9Nj$u<29WeHl5n_R9W@@@{U#X#XrSMC3tcK`6a9-|Ib&qTtA|` z{wa>r@h^wrzr3hKaTC+Dv@7!0?gd_o9Ik<`8fM5Thcf0%R>z(bocuvJ;wLv-aEe0{G+P&u3by!T_OL9)x4wVDF2=drmR`}rEc0S=R>W9?C1Zh zS^hNuskZ|AL3%v(<9~hee{N+}1AX5te0euc{hQUt{pbB~7=Z2W{hgx8S*$jO&d*P0 za0ARC9xFHj@BZfv|M^IjuYvtFjw@FYiJx`#Jl9D3?>E95FX5QxrWC|Synox_@JNNq z_rGWS!;HqSoBNMP4FhPA$^YKyzissUaryu6aYXPeH!rWJq`0Wax-h}%H&Zt)MP%ux z{gS#v6AHb)A3%tUj8g)HIPDNXm3S_O;0}}w5@X);2>+aoe^0E+#6Pd)g^Spr4=xu% z!pueS(=9md>*fV`jr>n)zhSB0@%{<=uYHpFCuo0(Midq9Fahc*Gh1rR|D^Goi-&IE z0H%VkIw`xYY~B71)t^}TI~+Jc)lFP4MD(6+zLPam%By_CG5B9{e}1Pp5|ZG5Bi>z% zTBhxtvPps8sT|OH8>~(uIZt($&;+Jiqs_GY|D}A;q z9lOnt)H7M&a)-y~E~6m{h<$pC*x}&w#TG*+Q^I-Kb5x~ke?vt3Ib%zh9-)y`drwh( z-Js{MLr2EUeeTzZYSO(EKWemeJ#{$W5YmP`G|p!K>stVPh=iK&_DGQ_OtVVwHpO$T zQQOoRLU~zE(EF>K zKt@sO+PV`b=-|$fB+NhW;i51$F<~-6_iGhQOE29!C8cLN^(P=85rlX@LKF*ExvWAJ zlih2nqL|!$U7`eV(i(q9L;FvL6#ufl{pSGlLwUNv|3Tu(eK^bj(#t=>p`%|9vm7Ys z*krAJr@3orV@~+Y5|mOPei`)f55?>+vQ*~r#&hNb(qj|Za1C~9=qvY=JyOxP;k5K& zZ}c?koWy-avQV)HSAnSc_6dHO9Z;LEVMO#f=`X`PQe{TV_n>3Ltv<-A&BH35UM~~9 z9OqeE9bk`__8lJJxlv-bqgD%Y)1N;1~0LKZ)*rYXufvnTm z0tESE*6cnfzQ~g0U}@i92JBosKOk=rYso1ADVx_n@}Frp=s5Pa_r+W!4IO?4Wd*Fvoa{wfjqpPtDXg;yc!Vcyy9~-Gq-LI`|4M?~+GczNA zhO9Te5Ch0+uhXilxkycAWo1=5=p}xgK5~ivywv^;^?!No|5E?|oZ|m;sQp(R@&BfP z#E;2(Hf0@Hva|WoDrI+6M5fS+&(*l3{xnIn4*5N)E@F%VfJE!HpYn;}sb$@no-^x? zeNVYM-NvPO3$hsI+Euu4@u_8|)^^d6N#>$*o->}r6TV&Dn69!EibA|S53b$#jv8W6 zB7pL>|(zCK&x&mb>v*q7zIb)PWzT5IHSxp=wa7~-7@NA?7 zBvxhYgRgA*`)3zQfNHc6gP8?IMWMji7Q88m9JGK9oG=l(7dm-gNRJDbK^;?G_~v~h z?{N`c-QXuYUVF#P-Nb}!IWIwK*-N#?<2`;>$DgHmLXNI500?d!n0MbU0??|eqi}$Y z7YE88WoXsdpiNYL{axGUq65mLEj$_p)!LSleK7l=Aauttihrln$kV-~X=PPOzR<;| z5n#-S7H;-13;~sZ-Ok|xsGHQIu~vr;wlPI9D0p>!-a*q9%^=Tio(PzKXh^O{Nw=#K zC-!Uh!G~2P#Keyf(Pb(+%^~vps)~1w4v>&J=b4Jye|4+;du+)Sx_>A__NQuoEvdIK z{mi$bqojLxqSt5VM$19Cw>UtN%K|my!|$DT+BP?#!VR=sQ)ZFNmw+UJ4Q~1+IUz+jTSV+q(_c^Zb_FL439`)_U5#hiS&{SF@0Se|G^1>GRB#wqe?j z`sRy9by2>1v#~{AQDG;y2Wsv5<{8wRH+TW?w=CPgAuw{6b5L&I9Q2f=Y=xQ@+XRwgLWZO;54q#3J5O+wVY>p?QOd7U9+LazZii2y-7FX z01S=TL3>w)A2su8N5^B}WEe32cv5Ov*E#LLK8MG?sp^c;>TrX4GXvE;iDsY^W2+>% zI*tZqa`k~g1xwU>4E=CI%eITDVMkMYvPwGkd>I!^1e{$EssLy?OfiHi0XoNYa5j$D z!KBIK1c&h5Z|6n6E`Shj1rMPse-~L0^7vO{Rh;&lg9*|>nhO-v zE}J@)bal@ZPD@px;hHHEqZfiGqe;9D%bl^hAa47!^TZl`-Nuu3tss?7D6fO5Gz^Cm z`xnc~`)#pE_p`4X+pb0y;nNcOfb09--UMaSngNK7n7ZP096@)0F0ixHDA5%RS$k~& zxD$d5Z!f}q8il>xR|3CPoJrBe{g&BQB>={3v`Ox+nKXhBEFH~ozv;|;(`2ByIkUQY z%1Nn{z6AWj`#PgQt1|SryL*vt9N-CSjVvL2Ut2Q5Z<}<#*`D*YuNci(lO|f#n=>Re zlMM=NgU~Cxor8WirOl*(#-%0SMppK1S-3ro{RZG6J5ML|TtuI22(vVM0%SMU_U{39 zVm+WB!h)EGL>e1{MaD(!p6Aj6bJR{pOrouV%uyDOQZ^vcdJzA+hM@QJWxrJQe^njQ zM{@uL24x)3bs2;hgj@{Zqbv%7qS5E{lj>ff-%Lm@`t+7#W^BUHgewj1DKNVpW8Bz6 zdG`IAv*!P#cyc5HnmUX<<}@=DLDy?-Tq3;d-hEce@EKfY)ZIngT%SQ1wV&dn+v7PG zuTeoLuQ*X;oASHn%4GtSScS!hSi4emr871jUEM;}Iw7aG7tmB&1+VDekS`DXwy*&G z{QYGp*c;PX4k23mV|mogvay4*sYEkba#wTp&di%7N;#J|rZsg3{?aR-D8d+~s0W_l z@vty8z{<}LxMrdXJz&GlvHhl|d`_2S^MlFKSG%(yub%3x-#RTS!$oARZ7foYPo#WX zj*uGfZm$mSz1n-E=gx&y6JOQ6PG0qmf~*NJU2i`l9!pOycy>RY(EQytg8=vCncX^u z+_t#@lO+IPVfr~fp1&6>grgu!{>D!S%p`uKT1QjCn!?DrorY)BL6?+KLY-*(~s+~x^rRyS~NJ|LLiVfN5%3l(zdHzbpVyCjGaG{@X-jwS)(&{t4P5VnKtwR0>&5(3YGkkGH_$65X5H6bHAHaon*be|s{|k7S z%)dFZ8%*SEqFcrR7>i%6+m*xq;wN7G3s3?;Qfn(ofae0ROq9?WKL5?dmCYi2z_EOc zIeGVXLxq3Z4MhMvOKibU^r%eE>jJ+AoR7!(>X%qpniqoNH)dibIVHZXB)}J8ncp*q zS6Z!|ycrCnj&U-Ui_XtwALa`^b8jZ`2%4@izK{s2h1$=HR1qX~{Mu&`twNSH37||P zYka7tN5E;9!XLv(lvJ?*BZ~bt=mB8Ca$Tu{8V&a6jbj!6#$T33j4%@K84z5gcZvkG zr(Chqqx=Lz&7B;V-l4fBl93SK3o`*o^;hr~W`{U8+o7zv)n|Y1g0Tna(g*w-qW8`@&22(NaBp^K_0`#Uo zn42%MXMj?}4^Or-5gbe)UjSX78E^omoc9e-Cyj`I5BAx!&RNo0+kLb1aONr@7C2$& zTc2S78dtw<;w?~HsPKHyMeegt^ZEXAxiOAAY=Z<4!Z%SB_}n#IEw_a*z0{Z(>hHwW zAt1D=RInQ_*}qoNb{og(yr>TvQ#0_nS6dY~-*y>BuY~hcrvSEx>uX@+PxZLCTGLJ^ z{lo2rX*U3Y+#OXoo$+q#<}M5!0}herIZ5uCR&j?Gxw-~Zz`-6yo@dv`M->e_<(5P) zv*&I3Cc^RwFm2Cmt+%+_`wa`;fRdR=Bgr-&z#;ZS`ch7sOo)PTs0RY}g1tMu&iINK_rfP}V+s;GVVIcgd07Kv)?Ys>a ztX#4+Dldow2-T|RkBX}2`MOA53}L&IHS_g5{`DVg7M$P0HfLzGsOFOnP4;dsrk2Y| z0pX^)X~&Oy(Ow|(WcL8~oj|i|jr{KOg=^KD^DUdmb>ZXqCZqYr3x%_${V#pY&4nz4xH_(qR+Wcc7Z*DorT5nFFvB&BUx9vpL@6g1j}_P2={1wJIwr4@a8ym4y#~|A%?T*eBY;ALGNx+u#=$|>yeeWSy(X`W?em1t-cFhxZz>~Z?r96BM8^N5Q?weWK( zyJ4u%-38BiMesT_@p%6BNDYix(sZc_zFkiY|Ge*Ab9*MTbdpL3=u5qZv$jVU_Nc^)Z!b$?Z?t`at{7-|d~K#jOv<6JC*) z+Il-{Tq))zdf9Av)TToY)^3RUPNRv>KI%41)id8hJ9$pVB8^T9HGnFAYp}E7&75SM zg3~>na)B$}$eYBROhpRzXEZ`jX;Nxo{O3xkvy5OTnN8DEy}P4Wo|XR5BgUlDWcKBZ z8S6<2%d7&;<4Xxf!J~tAyNM}NY{I7H-U1_7?r`VD>!mrD`B31YyjA{9+hz9cb6Yb8 z)ETbRBrhMTc8ccHwdI9lH0#)#e%qdH-m7_`)uHxf0J>8f@jSKlHNFu{;`-tJmh1E` z^jnt$yY##sR_e5MY<);#5^%m`ARf2Prt{R9eI(&J-TOO}A2x~B73t2;nKg;@E{~ug zCDF=I?z7wC)@DCo0#c{ATWsD@pjc)*H!rtj#;wQq#Y2*?2o6e z-Fln%z+j{D(D}FBtK9f!pY2$010yxd-Fmja?=7gHaaJ~#WB_xEV-I|_=NZX(;WJRy zO`>z{$xmKmoBCrn;~%tbrMSN2?iDMvxum1-_?94C_C^Jq_w-Q>NVH3PI>AJ%ad+en zr^c0?5Lij38Qs^VQ2bV+w->c-*B%ZOqYn>DNJzM**tN@%OP==X296R?0Hr(RFby|p zoh^H&e>G&15%h4jEtS)3?ji_S0elNz2)*4!!IuLy^Vq5V#Ikfku=kn*(Htu=v)IoEO5x;tW7P~K`q0YxmXgJ1VA9(SaF^_d&?j@;kbF!I%#;eCn$BA zGbCA4OzL{JqoA3ib%rs4vy}rKJ4;LI=4%h`9;|zW4S1b&9QS4$DK(~rEJus9@*Ff5 zlFa)}5}|XzVNMApKv#IgRL&?9^6ytVunyNKG2zbw{6IDrZLxezP~P5lW~sncQqcl0 zH}%vFWTG#o!E~p=(fViZI~rn`y6xPFA)6@4U$`G4L>fI$*Dw_<|6mV;DXIg4zNWLPTQ64;81J)?dBC zaXnMKPU8)~F^}`oxhz(u^4BW4_hAje3Fc$UW59?@$1{)I0+BLqV5O5rf+(x zxL47uQ*grtjwHNwmQtgB@U5-db{{NlV?Q7F&@~F>G+YVg{sP#xxSDNZb%g5ry>=2j ztZUwHjnyehY6TP7t26PQENR>CF1ymXe5K<@(=&AiOW1hGy8EjH`*ehfX9rpPzT%LwsLa111@Y-2unsay~sX^PtaGWGd+16*D-=m^pd< z3elU(rtW0@I-ihQ=98CDlRRvjn=IjYe%_+vxn%nl`_+hrPi;_jP2ITiRQA5h6y`DOjXIA# z_U|lCd((9tFBqHm&!l~bqHpD-uEyRy3ECxe*PWkyYq4!aIs^Xs{*s@bmy^S(S4T4! zxIo#5bObt<6|4PJP3NTjy#2=K6D(ORPSG=Q&HJa_@_D3@$e_E1(%HkgW8pU*EN9bK zYs7b{O$84km(|-4f*-q}$1ajk^0dwcJeHO475U)bAWGJPDmO$afrQ>VHy6MkY{{do zd($-8RMAl3(ct3odZXUf)_oe9x|x!aU!31O<{`fRRxnR7JKJ${6qQ`5{C$t{H@;q% z>MtKCe5;}lFC0}mYSA5PK!uRxs@neTo?+lnBA%sKS`MczN>0d+`UAP`*)mjOUiT=# zt;FD3&`1#pW}JU&N5q2n6z?*W(DQjSA@%9Q zO1{~;*O4-e>CBW%_M_P4?o7=ly5%!}{K$=IoZ;pOCF)V~38Kap(2E_LXc^M0+a_c3 z(Y3zVIQ+9D#Y_n?JEo*(Yd-H8s|rdMhC=aJ&HAWAS}<`f^hhRsfFlO#eeOtrgDz4P zXY0fwZ03-#Q@iC;hUUytzsk?UMX?!6IbjTJal;jkwl5q*i>?9?)H99Vw#}U%!7kmM z%%pE2DZ?3N!{%Mpb6v~XE8TJAxe)Hs2KzNOvG>=CjUIiV;vd*QuVK{Y_H;d(Q9qwz z>=sHs%LxJsFmWzlZd~HjpDhXrekKmbiY^jibq$Na9@{>1c2kEm?>&o8$&`fKxKO=LCNL4r~|ceTYUQSW^%K7-_~|e zcxa=kedUg%S;@=Gi`MN3d-*A6gAW?P1Yp5qQh5BFVKCX~>6R%SgbwcZvvk`++O!<`^PraqGlBJj=b{DZNmAJ~)@Hv;L@>+eRD8sSBt3 zh+4^#Ha2JnEA{ZIcYi|G{S=lLXZwxv5KU3gdEA>LhglE2dbW?_esjv1B^lkdo_sp& zd+<>EWfuz=H{f{S6CcwN_h_P8iPgko(w4PtCX4$~_b}8~d@)1p!2+dgaI@D{48TH* zx}>CLiRCmP2_&p0Vp<4Y6>3=vO>3nyYg42FgH;`qXT3%WUcZ>#9MoU9_Eh6K^D8q? zKW;e7e1}O(7-`k>x-K|jpLtoG{3I$S>`{zlJ~0i~8Yf2^VVoM~N+{g4&-p5A)7f!` zz%0Gq#2#FR!dJ<=7Y-Nw)Hm{7pe=aYg?Ijw%o?K#3_vGTD@#1WB>?4DR3 zg0Td=-#G4K&Fu6nR{9tOm2AAjo7bBYqBwZCe3yb?{aNGL2^BNtC>zDgEjcbKOE&Q9 zcDux z1j-Wp{U7WZqgPX=ZLWxrB_?Ly2%~erRQ~QyvTKS5nmSxm#<2IHz7j zF)=T{roUMYLUlavpyjLBe-Pc^9g0OT?bCbf?Ytu_BDtV0vyW{mUeWO*@|J2jnaQ>` zER$ImA+HgR_Q`>QHAUJC#tHP;sOxTiw*fmG!7Hsc^$qnhFGVF3(dEGB_6Q+)wki?j z$kUlxS0?{m@r&Wy)+Ix4hnzdQaFT_kql=LqW=1`*HV5EjW|E4emQ{BYt;zc&x&h}?3 zK(dd?@hy7|`hSpPlXOAyMxh(XmWP7xpINikg-1+*R01y10-5_=mfm|`;iZylJGikKyvT}^qJ;^7=q)(5~l0WaX3w^K|xkTGG zpJ|qVL$u6(`R4m;WUP0UNxa5qz___6&&Jr|g?Rf=x+w}V97b5L;5|QkTSHpCLcwrw z=P_WR>9Eg4b>LInrT%}1&_&fC9g0pviLU&k=OF1MV%c;7{AtyExx$LdwISUXP6 z(MgXz)Byw+I&QMXSU&08Mz!>Gx-z)( z^f5ap<@5YBFsvAAEmaKMoY!Ub?Y$H8-?E%#f5>9R`+UA}$&=&GHOrifRW_di?o9E0q`;?P6)gGF%dw-Ktoui@bkpT&F_l^MK{ z06dc(SjH5R`ji+FNzY9s71fau!ehV6GFRvFHj4H{%=`?PeDPMUGHzrb5b zO#b@K7eelobBWJHyyc?x1)qSIM^CmVH&WT}hJc0aOSvSTC~6f8sYE7XD#@fVm0=cX z;6Ql%Vu01_k~$dlv=h8jzx9MA0fK1R_1%iEM{mPZVi69-)r3kpm;4&q-Xm&BQ-loK_ zA@}jD9Gitp>aTD~MZJvTb0YZil;Y#tSYPP=!BT)@boXA9hYPJi+nXphH-i@unF8MnPpmI*i0RRxacKgjDl`!PWkM7;LzE@Aj>GAww?NwUmgfbh>L zM>Ky&c7fs_B_jfhe+djyA#AaO;x2wd*!iJRz3cFG zlmD5`a);!3?9a8?HI(qh!m>Ark8Ta*Iz7?KeM$sfT~muo7jd!uu99YKtjV!pvT8S_G<28y&U*!hD_N32bGw@?6cl+uhQ-M0dtFDcJJ%+#>a=Z z0xoyAUaXL1zVC0t<$z*V=|J_bVVYD=!%;-Hqw5O?&5BdoqtC0NIK3)QU|w{626DpQ z*SS1SoM(O4Cf$q6rm8tL%LfcZLY@+h8M9u4HuJERlX--C_mg~eRFi>(=*g#bGO5J2 zC!^y8vl@iJH{d`Xj{rrGWFe_n{C!)Pq+=et?-Y2#YrUY)FXl=;DQel(`uf~DEOVqA zKiUVE8~2fSX_qe}olm3sHb%!)t z=MbF0X^Um{Hj+|OrmDG6G_=_wUr0L{=W-={!Tg%pXtVPeH(R4OmRWvx#!25q4`mcq z`b7Z1B&yC&Er&hxzEPR?#~m^?98G=`<1k-_kH}Fem>A1nJEOb9HYsP)s#i|JAZClA zQ_0UCNkRZGRa2QDo|0A%e`lG17J35bA-&{AeV|!mvrYEWQI$G#CALUCpT%M}6WM9+ z=XhlgP}oah)Ff1FIw#dzD&$t%FablR*#irbsa zAN~ck$d~&A*D0qD*ptJ0TIOk$ai7W7Y*q%+zHn)37l@>w9G!$i1X=0!+JV*hoFhPr z9hJj;T0)GJ*9zzTj~@U`tq?4?J5Kas#jZb&HKSz`_`{QRSntntJBChm)H(R`dUhP7 z&i3RV!}~u7xbhR}N7nbXGzAo5Pl9;br z=~#VDD%$+V*Cn}v=03Q?%WJua{rSrif_DI_EOuKo!=(fiVM&UKMs_vz8eP^RZz)@_ zdWL1mFvS$6@5*g4E26XS!l*TTRy3W<0BCnJ*7dlRpf{IjX$hPbc@9@CT;O%*-I=_N zVd0w{l~)T-=Z65O{6QJ{D^4uT7WUyhBh!4Hg0Rp7Y|t^-%YDWa1J*bbK9~0@prnxx z0t7<;OW3C5Hg_Snb+ct`F>G8d%yMh9SuR8G0PMau4Sl-cNdLO` z727GawG#psvQ5%!au14O(vgvlWzIlk;6U4{dy_b%Omf#;mTifYnIjjO0KNk=Ml)+Y z%+aNCnOHnU8%$yi0hFus-}#+v-g`XQ@?@l#o!FH~=tT!5- z7tL<1B{=!e8))hytPkkD6kaH{`%7WoJGo(|&@!5lUIu~Q&@yLh_G&tcaF zpZ~7x$*X6bpfr_nuooc#uSKn4Lj0OmgEON_zV@)mWRr#vF%%z8UwLn;K@F)NAphhc zD8cXcuHBCF5->>la@$V*i21AYUYb$9(i#C}c2SMAXa{|jk92@#1#INMx`dKG+GwxoZug;Sh=u{d&_ zDyZV+4g2|>HSs>NR2*yp(D2~zO{Khv23Hl1d4B8Rt$x}7>IL^V>N7B(8hN~=GW!ECfgpXVS zGajG1nAf40B%Mw@h!4X?e>59y48#q4z|7vp&g!^PHg@eWLB$*2|6WoZAU!M^hCbEitN3GJobQuAC`wSE=*~qA!0#lqQe1m! z`J7R&TBaj_{B9*&XCr(8m&fcBV7P;;Z7NW#5;(2JfzX7z4m$Aj`Gg?&j5NhpTm`GZ6GmLRzF+mqV2_l z$iQ#Fnuk^z5Lpg6Cekxhz?0 zrqHcu`byh7UCmN`u!OMl4v%GDWmY`(e8yOz1jflLgq#*qRMIi3#>oiiWxQ%g zAgX9)s8=Ym##y&x;8eYFUu=jplVWXYHyGB&uWDlSn7{>hN_qX#$r5QF9y8DmpWS$w zb!3)4HW2F2`Ng=>D0r=jO}m! z>sYQq;UQ-WjBn@gz%kTVkCsDuwb3HgL}34CI;!<_qXv=+nGE5{C=vm1?j&mnl6=xN zDx~N60%)jsd%Pl6G$l>kH*aAy%}K9Flha}@pE2?A2~{Bm?oHKK@7pOh?i)TMzGzxC zIkTm-Cbq0z#phfBcz8{2eFS$W+h43UgxB@H)~wf2N#9{P&~9}!5wUtk4T%T&I+`Tn?2YH@)AXDBBz?0b2yVj18fi-w?e?UKEQN*doDWW2i>4TofYjh z47dRnDc$U4Q&#K*LRRn776e`8ZnrC~rW6yUJGwZ|-3NireMHeu^G&nbLafY)Zvhr7 z&_d+wiicPZ`4>W9NCeny{)I}VJw@t9>i9w&f`WZMn;u+cIhp?Pgs@Cpf;i&i+|Poa zh+l`VpLy1|$RuYTu5>9K0w?0qUgd_!M$@a2?_icprd4A~v}!|k>e%y)t_JB+(c!~H zvY+6~?klZy$>!Oue1u_KLE)@Tx^$1%4d^!l-Z8uEX*;fd>g;~vNh7Hwek4Vn`}K(i zevLy8JCM=hgO18SzhRf^@-FUAF{)_13*s?dsTL1SP~Hj|>2+MKKiM`DxohQut8bnx zR+X{PXSF&<%2x+}^G8^xOdozo-Vr|MgPp)!8!H{lScZR>?<@HrfzPMEhGp@qfWtab z=Yt^TuBCnkukpO(pOg=%VYWW~SuqRsrr@5;I?NFJj}--v4VbD#gTE#(=(kB(7`Cl` zSeU<2iCs*!T?g?NzS5Cf^{eAOABX~f{YE3tN|&gh%c zTemMQVLCI>;?gGxg9EhgnzpVz`&Rwf^r}e-rC;4fGLfZ>kozz4RSFb5PlvbAw&n^* z$8%Lw`Ze^)+?p0gLrTPZZq(U9uiTrBG zE*nEV1^&>`L~h!F9<0ug87sb5cVw$xKN3Dxm_V?+ewG_YV!;eZ(E2M`QSdq2wkL~9 z0;}iapa&$0wyxo_sayeU?*X0g7bo<<&ww8(-EO7(OYMSBq7lV`80qCHC+>p~IOn_d zfh*aI#wzKfbB@|`x7jpFKL=c^svdgP!j7{m@W5qzX51&u3X>{#XZ@asmY)C!DQ~A1 zW3K)b6Kt8TfttW-`8d(q8vBJRX;M+EN-!Lh~#q{Hx8|KezI;Zy$v9+Ritc8(APX{Ft;> z@8cil9tsFPlcjHfysiZ5#12iLSh^w}nJ)$OabzS$09%4=0L{#^Pc_K?8qpPAW%0QM zH&Nk-X}_rfkH!E2eoz1#Dc$akQ0s zTsKQZyb$~DzN05G5?^8Zeh7wC@y_)lC6j!L0P8@oLW?wU#kXEpr&yH?uZD^v*A9q2a=ns3bZ{g?8ZOprHW>{vSIWtCICo}4xlo2f zWJzhZwLZB5mg9L{?_-yMoi`mdah%8INOn!n{>qA|wa-S{a^`6o=z$HEs9U%%y{E!7J-PYudMMk*gdM!)OUZ@Tt}*V;F2%b2rInH3Ki_`^CaqNk5ep=yVD=i>h`QM}zFq1oHtw4! z%rhQ$=XE-YGLQAQPbKTOv5iwpqjFwqb5m(_ zrwW6przb{p%#2@^ixq>ss;|BhRfi8Xek6ja|!hH8#k4~=;di~ZFd*>=R9kB z1_w~d0&{3{xPL6pTaXrwDm^dC;ZQxvxUVh=x27@@N9MnhwLZLIUZGLwjiL?uU<-?j z_(D#8yhwqt9bKpYGdU!@{06a*bXkwntnerv;b@JXOlXTl!1JZd z*%Af2&1~%p`mOwT!}bIC>?WB(sJCnd{`smUl!1unKW6HUb9zTVa|SG)Ay#_LFl~+2 z#D->*FB^;?H;ivjR-tra-sz>u8+w0ddPKTrQfWCUXYwL6GX$5JkFq^hzJZhv1x?22 zyC-^U|IS@ z0|2Kj5{!!Ys6Y2P6^?gTlT;1KZ}46nb@ES{|!C`YnJ+ZhY4 zX4zxKQyS^mW|)s>zQLAFX48qoN*3};5EXRV9!n1+Za`7eMgMGoUFNb+Vli4>W!6L@ zj4J3)F7v?|xhr}1;+Ws!eTH_~i_U0`3ay&LM6#rD+7*MC4^tyqN6H}EoiE%JtHd|Q zW=k|ZLQS3*M`P(;G+d*x{nWI%$4(!*8RNn!G<@qmEWJ*=U4)0*H&ggNa();3%)G*M zAWM4(+ArZM^tO~%+stgfpN`y1Pm)osR_x(r& z8;}{9j0D<dH9%jLmjxKW+`H3FO8rZi^YVsD$p9#`Y{#NE_h@Xm`HqZmz}BbnK$%x^TxC6uR}>SR zBWHR+C^)o|K$z7?N@-w+%q`2hd_%!yq6^@9TWC~a3ZYKnE;L{{&3tD;Tk`BrnP@x6Y|_}bocI3i>oLexz>N$r-mOB4nNUnZVc9BF`+#CU2HSDK#OrH6@pkwHbX!mcAZ{K&;alES@7Tzu>(>usxr9XTZXnslkI|j{Cp? zgEU$nZiyzy(|O9@OS8n=8gZSHE-20LyfppwOM)HoHVp*+YFi~iI)%nSl)jcSqqm(7 z`qZnUBuGtL+~W<;8k6szITC8K8J<7VnrdcEXPb zwTDLVO6|Hof{%t&t;hLb2p(Nv(JQriOC&>flS(#YXSQ~=5nL*%@(U*`jc0uNl;eTX9iONcQ!pl(^>zEE8=G6fk4j<$QW&8^jFAFVf@Ddl^)@wvO%6d z0$i+TNWrG!+xz98Htjm|*XY2ivJPl8(d2j!RQ}wi36qVik z*kQB&_=FIpwFm$9fRx(?G;)y0VjuwE1)LR|l86(fo?LmP$jsEKJr<=V(6uLt*e~xOpD)*OzJb*A-W$8+4NBb610k0@2L-{ zHOoBXFAq{~w;c%wsE|JC%OQgw9ol(c9Ekxsj^qtQnd=`{3g0_$qe5Mth%N?udN}WX z-`A{Eyx~N-JX#$nSPpphBax~I?;6JFN0(#U!e z%g*}wt=|SEL4KIS3@L zW~`VcQ}k`0&%pDA==ToQk6%e`NhBhvL+NN9Vb(5R{ras3w*vK#VLhh3wIx^>3nB zy++&59Z2LV6|suIL#%ZX{fPdZHKXV?g36~yDgBeJ_&_SSkqk!HlzkxTtC0Zht+8Dg zw&B9R-YyB7{@&#scIy_UzfO)WBV{JNIvAgr((F1&uz`vMAv0UeUFydh*iJhn!@5{t zGua4W;jUltk$I24ER1xq?Wv(wNSso}f~Luc3HDEmRxi?&MJy7V!cHmq>KSfG5tQk; zDIEQ3-Vv+uijrOm;#6&RSTs%MD=Q?vwcwI=$RS}6Cp8Q9Dxgd3Diu8$9}XwW|J&MKV(F`Od5`3Rdq-on%F!w z!Y~Ffg@X7gf?dM0<;bGe_kDt>bq=^9R2f zTwp$2=~R+6D(<|oi!FU>crMX*{P|2fc2KxpYmAz-%kuUb0V&ZGL|A0ShA%e@c$6Nr zSu`nC%eWcKKCrAx*zu?!Mvb6TE6(SjMvFBfc+z*9==>w#hqu?|=xEMeLJW^Pa2Q#f z-;?otEqD{WNA`@NWe-tVyFx`i?CNKt5l$Yv&(LZ6-0!5kzMKVnz6C0AhdP(`=*}Wq zC8@WrZxyIiRerXWtutxiDTFMfn*_Sa6>H_Il~huiN%FCUia%ly<_%_vWNC5tG3`Y*hKwq; z?e*xH%49e_4$UDLFKXbkokdFZ7m9O9;&dox6Gc9@{8^`$uBdUSfFw)9(wp^JMBdJh z`wpmaZr1+s#Lpy_6zpf5^aBJI>lcWH1qK<8yvJE?@gzAOg`g*V%jgCvk}eG-a^q4r zye9?#$?pK~s<_Ks5F)T$2ON8H#oYM?N7_W8yWF^U8tLJ&S4{Yq=S=yPlcg0CDM#y& z9GL)OlSsXzx><00FA<|=VeR=MvwG=Q{nr6ck;(lza9m*1FF-G!LB#aJ!4P22d?{(-j~*k)LH{4xxm4>QgGDFt#vDS!1ogM8;9c_MmLlzC|YZxoDQ8eTh~^72}V%U5aMIN? z_t_-pJYbcqOCTt%NA=EFnUY6*=Q>iea`Qkk{c_729Tr=^(5}}2+eK?q7$=omY1ql8 z^oo?DAS>HiHsE8RtdTCCpNPpwW*(QOCIag7Y@RYB`s47?Z270_^Zg01Rd8Ii3r>yM zFdSjJWjX{kWT&-B{?xkqx|p*FibiO!^jRi2jX{O(41u*g>G0D4Ip%Z8b*7Y%Wie^I z3U~r^t57O>UuNx#Vt*Fswh80=Z!>Q}$zX=rLWPQa(fEgj#2ft!IP=^5&O7gny;J$a z$=)EeS_w?3(Ym%HaY)RiU!X#V=lEURk`Lp!2a*iZ{lykTpiV#145UgI<#gChZ}ZDJ zZDwVH9-+FjK@0>bX)VUgfQ104_a+v$+{upbxcw;<)1LTehR420)y@DNqf+*h8;)M~ z0cV#3)V`Cjk`iWHAwn5p=iL`f_-&c50uV=Da=pT^gg5Ss$}nv7cC3myGr4=DHTpIT zD@gpwUC++xr&=K|_gtqUjqHAnifJqRE70Um6L_?Ga#6$r$~Z*k?$-*KU2d$^TV?j0 zD2Fo10&e~Y=5@;RjDR?t1y1WHsM5}eI8gUUgF&$}t~aFWjUE>bJvS1FL{O^GiCL%K zEMrYbAJ8qPp&-Ho3!u#JbZU9oZ9k)s$w`qCiir6fRn5CvtOJ}GmyLx(-A;Gr8NsI6 zGPmO}Vyk}o_B~L5q!n0K@t*cPSTwL`3UH#8apAHU`F|LD%b>X4HrqS6TX1)W;1E1` za1RjN-Q6L$ySuwv(8k@}-Q6Af?f;pX=b1Y1sha68UDef9-MjWJ`@XK%$$$R#f@KqBb1AIqB5>^S6- z)MTcQzy>A*Rb~xNY>Jxs!|)&0{`?#C-Vm-(w=sr8+JR;7=O2<|2vAvAmPLN)M$WJx z_bSI%uB`1Ej7c_HA5jwcyL2z4`NG;onjCq2YO!jS2EZ4JKpQ{Rpz-n7bos?*3ewX; zPpgd{hb^7q&iB-IF;hgV%?Znd`KwEP@}WP(8==e(WPHw0kg0#s=wO)PLbyU4fnN>| z?LONew!v0GypYqwt16w0SmcNf0{&I2Mv>lhP;o93MWw&+PsA;@_-)PEO_Ug!J~Ux? z@=njENfU-2v441o1iVDimiHSPo?=SwoD~Kc8~+sH>U1ba8EWf5ToMoR$k?sSp#?l@ z@Gt&iW|Mw&zQdB54r!f?OlH)q%%@j#!1!A#$^@AkW^NK*r+UUIH4OuXd|8kdJ2dxZ zaDEGr|11KLHTVUAxhwym%M{X#3`V5`S47+>p!50)`!Mz2YMpbgoAzWzm9Lio>Ob}5 z?*i=AF*==Yl1tUP1rRVO%gpKRP8WwNV>G3f3{H4>+}>{wN*~1Ce^45nGe@_{VuPEC z>Q;tI^Pq~G=r3xVp(k9_A!3CE4B2qx_1-gBHltopvxm}|9{;HI%crqVl)2E(UYra@ z;WPAWGv$*e2HR@64$rxN`-9+^aoLEiPGqJ&!VR+srkB>I+}rLm{X;Wqq5ISh25?=w z>HmfPWb(HYnkFy@_1@p*j*kJ#TO2s9E^pWw`LMLn?uzN6ULyJ0 zJBjooPFPF>p(DMP1U&By`1!;}$ZfGOez^(~R_c!N?B9$g(OXnO1ICqH z5vuNp)h(56vJ(#MJNCATNU(bvX)1-cv)a8@SmOx2@dXT+ZJ*yK)YsY1C%<>|Dn%=* z%qA~=I4CDnvTM|1%jYjd`$B`63h3sGMNkoBOAo>ebf1HW)*B(prmkAe z%}Aj@p2FCnF62y+AM+(@f>?O!jcnt$9r`AT{6cJyyqj4wIlkYTUzQz{r{%RI6v?X^hn2ag%Y(Cq#2ZqsrQ!0jabHA{vkp}244U^oZ}x^DgW)fsd~|*1 zGAHOC^V_Tpekulr(wQ+;D#E#dF{e<}jSn*RHs~AC^~I2n6CMs{Crs7`&-0kfLrF!XXJHWWRV8i|Om8RNO`45<(phzDp*k4;8U|ioorCKAtYfvYhHm zRd6FzYbkDa{IZ5%)@ygZ5>Ax>@n%(dCZ+c4DD%+0;<(ClF$%1IuKsWBUqk@43j z*n6fo5l{$4^haKIW$_sEyuo>hvltFK4F8cur~vmmRIOAWIng>-U=BsA+x`9sraEBi zSp9;quV!LvyU}Jon)z_wh0`9^S_0yNA{C4E$AKH|nnCKIo!umgpbovwU=H1@w8tou z7ZORPEXX{Wc(qbQqrK0H{LeUXCdXnXcf)*e9yAiZQuLQ96OERVA@m`}bhdheP2b7D z>tV+^z3A%np1>xCtCN|Oqfu-=EFwYo{6qZtnL0<9M||$o@x^)*>+x(h5iu@{`Fa1F z>aR%tx#Nz(yE-Vsns&prJ^4D7x*m5tyflFi8u!OD{B)vzv){4AEY#Vu76yJ@m5O%&lewtyjT{9Bl9bbm)YYk6Ja_)5Tgr`gjf?7k=!= z5-3_D@JO!iFvQnLQk`wS2AIU>{QaAb#lm65XvTRYMn=>40TurGR2FZG;>~XKpEo7l z#9~~)WZ_50NInL_Bb(81VR@NG0hZ;PT5&QX;}xM9yzN@^N+S{IymuK1QE+AU?Bd+K z!)!otwz|3;jBNQ|Y8{eBG+0786*Ju=o2d%e^*exQ3Ndlv8^gWF#~~DDbW-%!q46JG zb*#!NIXF7j^o4Zo@5dxn0{r1buUomIEOUwvR-H7GG7^Iu(ew>wZ$T^fj~|vp)z4^E z{QTMVl#F5=4MGx7S4b3k8q%IRblPMo$8jZ`L}_1lXVCdWBWFtY^w<~9PPk-1Xk4GzDkxy<_zpZMaf&1rC(&umCZsX4MR2ptQCe)ks5r8H{xT%GV`iIx z?U>=oah|5aUB7|nD^=}AoNY37AEA*)2#xxE&^ID4UJQ^7Q|Yut(#eW{yfh|3$<;%QOxxfz?)Ceu z$B#G9aA&~~Y6l3QcD{KQi1kvvBT9|emmG8jJ7h0AAi(qPy}L1gy}#dRVI)kg6%Q__ z&)m2;8j!|b9!go5uV+{S_}Zp(S{L&n-G>U`OLhTJxf}uxP#3`!@n^Fj$^)I{ot&{2isKsgy z2B_FJ#mflN!?I(~APyOTR||CR2ia7LVQv4;lBI4J5lTDSY@ijDygoWX77N|>s^&-r znvef%LaX4nWTBKj{oqKg9gt4`-hr=xjIXLcIiDt}T5ZMVG3;>!C!Tv&WRLYqm~q(-8ylU=oSB|)%XLN z);_imL8gE;bjIgcQ9KExzvQq$ zr>Ei$Sq^$2!X{PBP*soVFGO@;f&atRqjg0te!8bnUXe7zcfZzjLBP`ytNbelBM8da z@9Hrvp?3Z|iokfa<@?rm$aNJ1b&g`uu5S9y>DomL@ukp#v0%v(}_nD1{7B5vmopO0rmoQVW|gfSn*zpNHjX*TEic)wMg7R%?9_fCTQ zjsvwRIZC&>RY>w!AtC?pObxK5Cisl|KxM@NdJ?_gpKmgam2W_JZjrHcO1NWc63Edq zyUo|+NpWze74}u?ClKN+j|O`55ov~qpw@z>D6qUBlr9{ zW9zD85hd7{$@+(E^zP#)d1`%@!Iy0FJ*B;M@hK*zJ7T3OSwv6(>|%;;CCK$aM6O6kLyI52WcdSw-A2 zPsqOX43{bz;w(+D8;`1B(M6%}B#kX85OHGDf5`E$h3w~_qOgw8jC&Ccd+m)k(gjXA z_yhx)Or4%C36m22yzN)fU9O*wZwkVLV`$ib&U68iB)VPiz1Wkds=mrkMEpT;-wGaQ z{lGrVW)tD4C1CFLyMR|{vfx-}Wjp>g zAT$oy5ElQH%FgSX(Qk#A9Ivmksr7-}(l2B2lohN|=arjoJst9c9@y{Px-Wsk@B3j1 zaKfaLeG7|EzdOvM(mP^Y-dV8$i}wKv_$y4%rIYZ?;yFf0)jZ>^`tO_8BHNC z3Zd?R3L{AP?7#F){kO**#T`#ju6O~Tyyr(U@n~$groIHMRz%19At3Ze+QPR*~EVVc= zFtaax{lye!NH&{RGgyiCg9R97W8YzK&wZB(j0!R7Q*h-sknxw|T(2~i1RpWoSdrr? zr1HZBeTkmA@<%Hn{$0K$|3{P*>4!2kk=@Y{&6%08?}Duh@RW2!ZZ?+v%Vm>tV=}lV zKQ%X(^0aWZnBa)KaCFaTi=HBAOqg+p-cl8*orX&PTbM4f7;IsRKitCTO13p#t?=&* zNXNdO$ts;ntP8j)@zq^d1`#RQfVyv8eH{U95X21NdXs&P$%XcNgR%T`+?I!x;VY3S z)6Q1u$Aaoq?p}d#b+69&)2TCt=3~Vd^M57LRfpc)eN*AlRDww$`EXlk5f6E4wrV$y z59H=(l?oMBY#{UA<0*a1ZaK;|gYf8PDNy6DgbJu?yrN~=P)`*>hpPI%7MqQHP~&-6 z*bZ>lC+#SnO1Aj4(`DkNefE3t`ixg#&T#HDFSf9P>**aN zV7tk?_u)hL}{A zaqok9%Pf&ji?UY}&vZ2qkH1U*ZXVCSpqnCn2Zn8Yjl^L|Jj93mVAUqdYh|FVLvV%6 zj)Wk`TB>pH@`?SJ`EP{}Ebutt3et_q5dZG6me0o9eb|sms_3-rdMZZQn|x~HxfJY` z$RpkR(<)U^%ddFBR9s$C)x;XwT++GA&c4p2s?&%Pik7 zo6Q$Dn*0s>7^bwm8?`&6jRKJ2!)oT(k1S*UYYI-D6)>l;zO?XJ8! z52_UD|0;6e2%#$UgNLJ*%fvXGoGq+lr8e>}ktcA?9g4u$h2!4AhLf2Zrux&7FeE>0 zkxFXN8bj9`(A@>Gc*L&#Dz7SOCekzi)kQy`7|4?Zjf|5&djZjKG^wI^op;PAD9juO zza>M$yanPF%U0p!NE+=Q8*6Y`jem)8+NjGfXg#N_){cy3PMJM16v}0ZKOcx1TWY)w z$)Hb_W~G1A_E`7^&K$vOc!-DzN|V!-(qpzQ{hA3$Qu%q5lR)OY*%8F)GrEmZs=bEX zn3?9Bs&*6bi#9c8lT3clT53>g)`HV|{qW7D_=>imIAc7kN7(6R1o&;T!; z=c5(mT_OoHP0F_xGwSZExQd&-iKlIA>fa{Q-}nZ*A}A;&<3*wS-f`C7D46uFW5Fz{ z{9NTLep!oty}npz;A*nmlFO_)x@oaqGf`&Cum5H`M{_MrP64|Vh_(a zCLV=1XA79XJVK35HUcZ2(X|a%S>%9(T%6v12z=Ix2Yr$eOgQf5J($s6@ zYRQOW>hB~=oqZTeNrL;EQ(B)MjjLfx*i#me{Ii_qkiOD>XJ`c%S9ZL9U0p%_aZ5vx zKT6Q+nIsD3dM>P8bcFs%&=Vb_=jfiGeh%l! zBXts83c4fM;_#3>Q3=6{Lns4|GILFgEjuhPsCHZ3+CA~}zq|lWc;CDIy~{`0gX=2D zxVKNf|Fpt!d=Bp6ogc5UR;Yx+c`NGs*N%IQ1&-FK8drS;@bBKVyo1B@5Fg!rgYq5{ zuE{UUP-yNyC^c&PHDC4&AHIveks2kpwPNCE!{^_;6%^uWw)Ftj__k{6^l*drcwxF0 zJq`@_{_IICe(&L<90opk^K7^LocCwmjXQcdE2%I2-sNG*2>kreL<@P3BNxXM77*q0 zHD)<@c^;5e9NNQn6%}|vN%x%6<;O5*+EmABj?rCL@)mAK4d)>*>>1jLrguCZp1^b( z0`gs%wA>G9;Nr%d@z{GJUHs)qklQlmeV8J2o$m4m{=PrhY^bPDV3M-i}5n}5l+f;;` zSlP=%NJGP`u8rXMWRwP*4W6}r_q#%cJhJjOA8*eL9SEFO5aRNyePm|qI(G)fIy;-c z>hNirX{2_Dzfz|lxUKmH5a~Q!*@L_+c&HiJ^z4TW2UL6<7gD>xpf%7JE|+s8sR+WLVH%vXL&BsJi+?( zWIC_F8!Kk2hvD_(iF^LK;ie>!B$7RoWE z+URvm~!ObZ_U}3)9Rwu`$ zN>L9iIA~+7Q(e|KVjQZT=tXcFD(6qGO zgXJGTu2S$@7MJ1~$niRn1h=PApa@@Rl*xw|lmmEYoz(yE_nKS5$Sv&1IdHaEb>T-? ztvx|Z6x0!1-69#4P|NFQ96F(8e>m0iv(;{s}?|H|C+ zepT%ooG%l>HeP|kCpwpP`-Xfi8zH=KF0)e+j^tmyQ0aI&I}m|LE1Ki0qmXG>4T`3( zm&KFQ^EP;mJfRbCz2u{_LUiLnCNZki9O9{#h{ik2&GG`N!tsCr`#}>Y^?!L0=m+~P z9wV?F_nG!l+i0W;*SiX!d6jw`3f!s?l8Z%(2hDL5G9LM;+B50CHC$hC4n!MlSi|lt zo;DO5FGn?5t==__(nFkl-7i?>4pvYhre`yIN?K|B$WIc&6%>b;!sUV??7V4V7!)3q zH@gce+Xvp}g(vujOxa8qZbbE)7j*2=$gX!-{l166_YEl9J<6wYr<(59Mv{GdIDU~; zg@Md!YCtZg^~75y&oO`tQX{nkCRU2x=c{}vY=D6p(t7#de5Q_Pvjltx|v^Sz3X zEmQ8lreI zFpftPP!)8%JuHUa^-T7nUy7`B+tH)^Z<>;yUuxm<11 z2D&}RR_PS5F4#`TvihV;vOD)(@mzUi)iidZa+_`IwYb=LR@C5g+89x#s4BdINcA?C z4?OFYww2(K#a3mt3@Tr0$ng$qXw5>{u1Q%B1MkDiy?b z+F>KL*j^7aY!jK0|M;6s;44r3ZaPUY%b5nX%gt9DG!sMydD6K)f}xR`&D3O!g|>>M zv4gP06KUd3##`CgJs%m82!9KSA^AM22>87)4-nu2joz&X}}oS$@!pC@hb|KG05uF{t42=8{`(yzvL;& z9KL@Se2YoX+BYZb7<}1`9siy8OoZT($)s4-G>3Rfwc=Nm`R%?W8C+K#rQ@tJ{iGLlUEe&?t-thB;u#Q_p%M8Y>V6it}-m0=sCJU}ND zwAVWCO0n)fH%n(X7uB)-yfa$&S*pkUSZ<0iBU%s6@dB{@088oVgeSPRz zs~ys;QYK6}nnjIb$d!^n?i&wEyPZ)ytdbk}>WHYjU5Q%s!g?OtD+yj_Vm8ExG}` z?OjSmW2x1|bv+|#&E<8~!ASTK_A6Oj*SJekUGGGo^c=w`4eMUj6~s>ak8DAJ7&8@H zkHeo%J3eAttnPL=M^e}w(9V$}Hh)7a;C}6HZfgrj(2L8%jSBIr){(14J!H&n+JJdx2X3EG3EKqV5 z_Es36Hdj^$#>haS6L8J8b~3amUKfDd1bUt9lu??=U&=e7{btpPO#iaCIhMPn$) zb15rTk+855ri5m)^4V(^n0PfPe}=Y@n@eg0@MX>SylrG0=n3XMcYYo4%=zMhc?~z& zF2(tnEs1MU%kXg+f#JNfNS3R_HGz{!D9Nap$>21F840)tzSM)5neWV*znkB)lNmJ0 z1m^XiO+dhW{m)Qh{Yi)UVV3!aA-U1{Om=#+MK_=i1TDU0SRhr;RM5;)koVg2kE`jt zgaA|oaQ@V>lZBFyZY-ab>xL3K)e5C2o6hrsP6wN*?=%+mbLyOlp@dt8E!Qu1M`9Q* zbB3ladXQjM>(=LVOx1In?6j$bdZIszr7*GHxykQQOd6pPYng-PK^Tk-4G%r}9>~8! zgxA$Zt$ixbp0cUhh=_m`mUz?a94Cf)M|6w}8jH{iyO zpYZc7GKd`l8c}@O|Mijh2gtDkYl?ADI%hL_?}?3b?sbA|Yc;XcZYOi%1;jU6O=@TP z#UYuHHP$er2rIz-Dbp@!)ixxActv8(_Fz08`O7$FQe?SMDW=r-Wm)=t<3!;s+4`6j zq1--)`Ef83_d)@2&t?5D+-PI^wLC_yIrs#PX$F2Iru!LxXxBD>_*v4Dl~sx1kQ^zQ z7e_N%qMhC4+)qyDP)q2Y*LIgCs~`1OFpz6*YNH&3AR?pGp@kd?ayfauXZnNBto8_ zRjFW{&E;$>0{2|6e-g}# z&(NSh2;5u7cK!Y|$QL3N!6G26*Q(cL25}|xvdPZL<8*9C`pi5SlwzlFEwUBRunBkB zWGFJ5IHG(qWTV~xcd)~foy%HRkkc%Rh+~mzHRA)t3h5iZ97s`8mOHia#K0N;u~mL>4`<%quhnO}TSzK5%pFnizcIBju%d`DS+m~o_>kEh9cR;a zyQ$`cyGEk%skkRYOp9yptP~7X3FapwKHq}8SjoLU7mpoY>#{Q7gbJ&o1BW zZ7gI#3A9EyWUOadBA?Trv>L2ogF_@@V}%#>${a_DLsVb&9nP0KpVsNL$eLdNk}#c3 zlIj=Sy*nUX22yT6j_j@%6UUKd9NtjOs^@5MZ^J_qHOl^~`7QFWQj?FHbVxWDfz3#I z?l9nt1NU1yz8{3Cn#YgJz>lyK?3BpI zS7>+p?~tD{6Xp3E%9oa;3;k6qGr)Xf;5^bxmCIt?*l~x1X1;)C@!`el{ z^g%Etau;=v%mb*^l&F*uAPSlEZ$U`C=f~0pmatUw<)fadWsEZo)mH3|EIP zagii5XPWtObHogF#}D=P3AuNv{rQ<$$Ro#yH56E^@oWRbO$m7iJFY2TkNyDq*DVA; znK!Q)WV7BYmbGmeB+wwRGuQUHGI4?~Q(;Ue(&=T^R$5}nrWdu^jDjHUHlZs>CYMUL z66_; zT%^He?+}S>WzCYoFoTs~rZzgl=t<}8!W}kC7$A`=nK&w~IN{ao_`gbDj1C(=Iu@#aX>}fO50np@wQzzv{ z+dLWj7i8@>VAsQwu$gf)=g?CRDi+n$n;|LSj9}@q=LAq!WAeck5ut|@5f`9GN}eBc z@L?smIKIj{nD)?=s;`{YaHAFsK%u}i1o%8Q+U)9sDQC;7#mv}f&zGUAJL@hm6=BKwAd*K=^5VqTP6J*+K&GVboQDQWuqgghS zm%1f8JMY93P&WcUm&Bc*mbn$tOgZyUGn6 zZ(GT5f4brL+o_Uk!`(y3B+o5}q6E6t_v_J3KILAF5OI2ANnY8BoY`Eb`&+mV@A{_r zax%qY$@(CPqC_OkO_2FaoaQJG$42YDkuQ(^*sVPiZ#)NFMy-@q=7g0^VS}qny&}h% zf;0{rQq$`@SDf?nVZ88~d~(3Z1hl)jV&IcN}>ux;Jzo;!MW zVC_ZpK|1#{K$?bB+TcETi|;;6nxb;bk}p*&lac0B93Uxcl$9oya!QEUmT8J z?2kS@5PW?(nLGu4S^d#1#Gq9QcQ`nA*>1lTW;nt9?J!XYTrq@!9Hh&B0HA%Lb&=@@r8=Mny-b7#Q>9UiSHG}2> z6426m3rm{&H9PS3**%$?UVG;S!vqg%a;e&V9vGYL>yI*iyV-6!b1n~jW*P7y&`V#R zcp)BpIa{ik34lSN*(iX)IEynqn8=n#J2H72_KdRc1etA9cAiLt>GCze`tZ}#yrRNS z4T<;!69eA@cTL75?WL2kWPLSCil-^$fR~IDmy1~weFG#8v%nPW$=QqcCmN-6_QcBg z1(lLEXyo~FZEZ6f;W;Bx1S1H__Os6v4jhpG97Fid$KA+@vXF-jNVFj7dbxghv`Z?8 zjl!^D=b)V(c@|5AU8?)Y4>HA0L{-23PQ-KL>vX+H$iig3UOUicKa32RKF)y}_X83O zygmBPYk~+ ziy|O~^>MR(6AKrvSz$70bINHu7i?S!0ZC|yVo4uP6%QCR7*Hb6y?WQ48+^M!4NT`J zWHHl*$FMOqjI%7X^9HL;Vqet534w<*#mmZ-tW95Atu2x4p-a&SAw>2@ zfPNy+%=$*at~5WK^H&>YG^?v&K|OPpYFMPHUK28CbdXL^+OWf)SfAp>Fv}e zUvVJIY^fV0i5AELtu?eFz7_w&)|4>g-V!B7;Z}V`6sTvjki9EzI)U5w;Q6Mr+)^?n z^2J~FEL4u5;g{FJS6)A(L#;B0=mkrM31A^|6yJPgrfQ;i-fW$}4lEw9e^2e*OR|8`#9%Nq_HA+Z25MS&qjBJ#c7>RiRne%Rh-P z<;LiohWp6un|`)|KT1gZbs%v1A^YwK!ceV)crvGnaOv-2hQ#hL+_cg7scV4NU^)@65S(fM6GbzMop6`Z|)M zUCJwEDU{N%kMiu>1FxXEkEr>#oRY>47Vu8_0O-lv;sBbsW7QcZv$Nz0gU#{ojA5lt zpM%@ag3?^Qd-?H=n3tE*3EO>%!g@a;&G^7!8-slr76c*nX&3Z`O_)kCN8Lk-AKoc?{kj+IpfIe+OP&I$i9*xl29eXe;X*e))XvniR~E^$nUDdRS=3=EQS9emg2V#bxpjFSO|G zdO2Rmf4#yA$+q(lL zoitt@<*Ssd$6RuCX33SVz_r{C}yI@sg0 zbMD^>Cb#6-YP1SKp!jS%>{Z!U%dZ15=o0)r@=UtLL|lxkiBK;a8FO4|7i^b3cZ~Ew z#dAiSIVayvp4tvNeQtZ9c)WHovQ}LEM(cttg!>#H5@n@8D865OSDjTp4UulB1ynyC zpjI0j)2#=bsEaA?ILi6ljoIe2;>q^PdyEVJFbT3!(dO>*{mV5s4wr!Q`F!^8E;qRY z0R;6cNEr(d-dzZG9xy+3MWuRhCf$#p-0v|2OrwHTyPNm{$r%u6{EV6&=kW~L;TqhesKz^77Ns{`cN*j{Jz3NRvq zbOHP}vY_x&_8C2z$zU>Aau3_n3&&aD|>bSj=_wtiR|Hn z47{QS_G3xIv1AJr@Z)pMQ7f*zLa-IPk?5TI?Uky#La^4{gm30?;E)(43%0o2l|*EZ*Gy6GLo60WkOogt|NCY7 zIq0H|<|cz{zwe20)>FxFD8c?tx6OA-oyO(jkf;W6h(i`^q zeXQ3R^_6Lf_hz>i3cNT&{@q-4o`x*)J|66hFal?@!JVz}5*V`S`>{by!~|bO?^k%22)+`oD>u`Uko$CwJpoZx^kDHAjHL_d z6<2S850_ezP}rC0u$U8_zepb6r*K#-e|b!Orbi0)OU|C)5$`ql*ZK$u+j>zjDkVzQ za(6Xekt@`v*wXj|9asBJ3Y%@WgksWYU8w3&iabPALcJdccJF|rA0ugO*aLd{q1nFo z?{}n=f`aeU*}=%Tbc57`*6Y;rX(v7r`RkobyVpJw@!SaBI-Srj>s^HVPc>fho*wgi zqpa1l^(RBZm1QBGfvp&;!XPlZ#F`;ejKBxin@o|Ea|M@?IztATBuaOAQu?Ss6zyp+ z1xnMw=UIJ;(zg0-W{jT8>`l3^cu9UvF){=NlaqL~Ef-`E#O}M9Es3zr56`EK)F&roH^OQjT0YFa&-6sKGoh>U^uj8}!wiquX zNE|S6KQui^VQ_!D52&F2AAI7+5?_)XVQLsD=d?{zYxQY0cUhAY0Pn}BvfK&*k6PIU zz^|h6j0TfMk8>zIu+;0Sz1}4i05dehxuuZ`xRxA1{MBB9hs^v`LYu#UtK7SbB?P~_ zzGG`(y&2k`LUbV}VL4&vCpkZGOP7bqWMA}NYgyHv8h*|O( ziI*ys!V8x3g+v>kij8Cge;&Q-e9It`cdyz8KN6N~X)Z7&ijha@nz7(kTU?BKdT# zzX+}Ui`44}k~Kh6;QnO{1BcQ5L(<)46H>H6V&8)=xKeMMO^iD&$E!)=@ZW-f^t1`JnDZpWF#>7pp48; zPWR#8%3Ey=Xgcm`u;gJ9%Va9md8GYk)cjHuP)#9cxh=`Rzp~U+aLzoEQH+x+ykZFQ z?Q_BWkz7ot0i!th4E4ZmaP{G2UMw)gfho6^jI5Q#q~$U>`|>k!Vm6`crxCU=@|vnb zq`u?FpWF2^3#GiqsyGaLq?+kQ-P7U5*P5LrHX8GLhEw*R|1pH1ug1x18pm9DLH^=9 z;pg6PuL55?URZJeV>o>7CA$Lm32gX21lngMAJ9y{aj;n-8mDJzB2z=u-y zP+0spDX|-);7XDMRgDY^uvOgiGxN-kln&rw8RLudknk7z!oL3F-N-)Xn{wn&I2zAg zc#Qa`-YKK+0ieb4|8X1t?+^bVfq{|`@EeJ3h7RhVr#t!>2_R*dfKQR2*30>A{#S8% z38=Zo`2*DI13B%X9w=&frVT+dpwobBm?;zSjQ*B!BA2S+5t;84x)@NUzwT`yZ!>L; zci=@1o@tl7UsiD~kKw!AcJN*QgDi42e~loI>~ljtQ-VtD%E_J=wF&n|UO0WE3Z-5V zO+2at*=F|1D*B(v&J?Vl^dhjY!nZo5?l*D&%S#~!bJ@Us$MLlzC+X7ct1(sThn?X| z_v|n$E4@rfc%NrXE#@|sGmB9^D)|!^M{m6F1MX#7Y;u1gu$De^Mst{Yr>o(2vwom@ z;3aG|H(CIb_PVWclbfRQoEq7p?vLZgEnglLps+gns|2Yt(|ZO?3fuoQ#Qx7H^m{_p z6Uuq2eV2bor0}3RO*KU_mPwsb+J^C0z5jVrYSG*?zndWjImSv~e!oIep5n+l5Ssa; z5q3`js<-vKVuIqp)SFVA(}lCf-&!y>_NcD{RH|bF!sBe^mm-wCK@lVg`n}%;PCs+r zn7%u8WvkRVD1~@g(mJo7`#{G0{qS=y%axs(=c~(K;-;viR5>gO&anUk)XIZls#aeAIT9#6qT+wr=UmWozfG*`alf06 znl6^#Kf@ND>YJ@W*>=fxO)3A`DdGz35}gs#>I<9^cqlChnesl_xvRjS6ef@&V1x*Q z^DEjNTarI{oV&ZfeGt65mZcqZ!l+J9fZg=ekG|h2gaIvf)N2RrgcvLaqW%EOcY(}` z-89z!`L027fupv7jNIbIub9X}PPN^1?NDgnP*P7+LS@2W;G$qjkhQ{7SN`yEz&>>_ zZ7a{glkd%tQfZsz;^a^i;@G1@o6*^mQB%J|1(VFn;ND5cWI$haUc2FG{VxYIc$i;- zE#QYs@5KLN694-vBt~d@Lb}7d69p{)^F{t^fz%Q5gYwq#kK@(<-z*vb`D?;hpp_$t zVUrKU{+n!xk%-$jySHJ>J=e*wE;ISxJFli&(qshfZ-&a7e-jwREyDGuGZ4?wI!A6(A=M z6u~o0hs$b|4}$RjR#cB^=@)H6Xl)(km^#08fNb+KK=#O_Z=xSekcR9a=PeIJ*RU-} zN8m)p()|c^jzTg+PJ6|v!XdHF``HaZ1 zWVL2{@Zm%bH8s1#ycylE!tWr7eGEt?Ky^26`Cz*U0$2(w)R@LBKrVO%_a|(_jUM8e zYY+*dVexM7n;U=a11>i^FV>^@U?xGisn}QQ3}X%z;~_Cb)5NC0+%asXSpd*ej8?g@ zaNb<$%+qq=5D<^O{6x#H1xy@4KOUYbaG=ZJvM%TkLx1CCTtBN2rh$M*=Oi#j2kmH1 zQeFU^uqzxI``yA_4OycV$f=K$IY8Xi!`c1$iLw;MVFI18MJtgv;E4# zV$~Y0KKR+0BhlFDKS?V0M>*KXvq{qThYOZcq?<(`@oYpBQ<183i~CfLUKiJgmgM^_t!$RcUtO z);=`<|Cl<3X@26Rd zPb`?duj@RI_(9*F&z5ZPmu9kp=_9fEm{T`Q*k5jDqThT?D#N|50c;D8BA{)ORQBcd zh*eJmGV{Q-vpzpe3k*L!4%B*NhiE;p(Cu)C4~h!hAx(M4b-Q+5N7#B@X?3adK5*K9 z8O|wb@pEg6z`;j9wOh)S6dVu zb_VihOJ~31X##6a)><-hgT0q(2hviNe&2O-}8YZwD zb~}T&?1qMZOJgq{$OW5pv_1e3C;2^l`GFliA~sF-Ft}88ZTXhYarf8Tye4jUb%|ON z&Fk#~cIsTY2eQ>%ZUg<@z5Bx@h1Gm}L?xHkJh7u0(vfIUFhlw1z2S4C9y45AXJ@DJ zImgXv*Klflp+frJcU;6^%Uhz^M7G{K=gV%)q9v^?&%lMguzP`4^L-dYix7Y4@KEoES*~)gNNU0E4=!IO#$>$nX zAxluFaLinMjw25EJ7=p>%cRYxu1Eht z_#2D$ymK0xTdb3lN9IQ*(&P1tj{uLTxAsMB0_F7ihQi>!!MYP|quTcsUU>j1QCELD zQ=qLx@7cB0d-Tx37uE+OTq%?H`+TKV-bpH=_cPJFojg!l#9sHB-;-~*QkjVHb&=6} z4C$ZxX2hM3ZMvE&ZX2B)yEAQJ`Z{BGd&7x&f*sEYD^2*&so(QLXBusE#*AEAm2-M$ z3!L!dV3U9?PyvUv=1k++${|2vsdwnZDv7#JLGbUiM*EuXC zYOkGV2G+6+g>c=y)C(y?OKau{b6cNR>CTDy0$qN4T)TjAcs9p`&u2m|nO;FG6vb`n zCbQbVLZz&=A2-oTy-G&F^Ttu@uMf`4h2P&iJ)yIF7i*0fz;O$D&acpM1x{0pD^8X> zB%!lqIiN`zNtrBD11;8z#bMihxznVrOU-^Ap}}@0kXf(w!^&&3V?z*z+eqUWyZ559 zmPV7l!0~ELS=VdV1QI}pE{upDIx(N~VEzZnc6Y}6&{nv~$K}_A%HWCt)X5g zsbDoz*wH2GSl0M+=zo(0MbNvULuA6A9ytFuSVEy=Hhb=seo51;QVAhiYqyk(D-lP!R(k9;_Uw?TKnoShSUw^e2|SH*YDMxn zSkzG%x*t7d`$Ey@qXm>!iakH+w`ae>JQdHpBMS<|Wz4Xtfnar#si%XwW1cWNJG_P3jp#y`$&{R5}Tr3tZDSWj9QdgU0U;sM>`(l$F8Ms0} zm9*Iq?dd3iO;iB7db3QmATsj+C;$MR{J`CMC1!Q5W~NupCdXyg70;q_P-$)p*$!j? zsued^HvEJQ-t#(>Z@pIM0^N1WQ5p(NA>PG*pD()9TqwS@0tMf_zg2sU? zMl_eLY8K!s-q*$1b)vOPpwpZ#_=XO1y4(&nTkk5WjHM0ABnw3&5(n@%cu0791>ZhZJLz;hNv+}BTRo54Jgu%oY&Ij@ueVI2 z9_kvjyH}?3KQS0~v1zsl+g9G{wmKa_id z(AwSFK>_1uE7nyG>KzHhs~YX||E^}jhvNQBPof7tTU!76;boSjA&}O<`KUGBkd3ohjfrT%?qh0~8l2ueZByWqB}Nf)&1ci)4{?mj6|Ep7jUQXnts$gD@`FdcKPJ?Yb2P61eE7uT z&a=U$xzo?N?ot=)Aw)Rz>a$l^qYYPsV?@px&Ex#;hxl|jB0&x&Lz8FeTqRy^ z_wzKjMC{oYonv`J{*FBj)|!@Mt#hqaz&k~y@pP%=%OBNNQbss(eyASpsUmZ`me`(w zE5JAx>~Ee8Wb!PCS--rCA>mVL+UpOC5gtO*vsgG^`|&60na}FxU%ZU0s5E*;wVY_E z-n!G7k)a&4V5gY(3vTY~Cv(=q24*A7_fSBo9>N(0G(0F;q}nltlnSrG#-s^DTrk*= zgtvZb!n!azwOA?3cY2R{9T|jZQ~)D+Ta77ZIez+RQHWUiO{dYSu&cpzIH@3MOh@f> zvBq*Epkubl&Q8nqYL`yC+9H#2CMGDQd)3F&ZNbFv>8NN{4HDiX!L@pBgL^p4`(X`kNm%Kf`%IE*{2u{2IMo|}(~8j*tAk1q~; zvco^=u=g+`J)m-&j4M>E!9e7JxEu;*T1olj?rcHo@1U>&>M>U~kV-{tswd3P?X5BC zn#M5_so*cEx7kS2sD$iYuE>q$xMk@(U*flIwz*dDCJ6aKZB27y8&%mqT#CFGGAh^2 zsz$$9Dr4~ZG{5jyv=}Y~itImotAM{q(2R%ARjV?cX#aXQ3zEYE*NCH`hIs(|%)kgL zo#_ceD3*|0LB9hY7{t&Kv>sTudb9@&W?bf5L(Cdg7Hr{o@t+?UM99kVw&&yN`LtfzoOZ;)ms*IDdbzO0(g9ZV6L7a`{^=Nclcn8hfYE$&c0#pIx8gk*N3=tJ zC?+o|5ld4OZ(6DHv3yEFy)!pk#)FA0soN9In>2p}jZO^mKysEWF1H-9jcR{! z-3l57qTjq;cW;bv{$``SLgUM@=cVYIb!yE_>e1dyEbsn`G?$dS4Qo1|CE@B(nk{Tn zz?fc`$|mEUliFN6a}al~JRqY1{IAUO`A3ggIA*L4pC^?NS<^jFxK)68FKM%~oN342 z(==JH$fTWL9L@%rl(3^nf4$nkM-%O2x13gPsoe{CH`p-*(yUu$?kpQmGIY1IU2i|H zN|(~~@%(Wy=9Qi4)8AOG*OvLQZ-iIb^E0jizb%0*eed3}lF-?FT`IRxZl6=+!xF%e zq=0B@R%xYXvxK<$y!E!s0uH=2G{l!dR=5@Wwy)Y>@~R?n8RRI8m#zwS*_^if?dr85 zFgsjC*#DctgPCD_Lo1IizyA9I{}a=Ho?}(P22X=>1vU>!gJANVn}pl>dXg90E2+Pl!?Vn!amsvs1jWwb=U9k zoGVDL(WqFKHbQ+0452&^U>nP1Q)AOH%u%il9!rD2R=*bcN(I!3Wgl;XX!t-2u?cf(N`YT66Kvx@twMiZ)W-FW*)!0xJ&_{3Sc% zF!qIuQ36fz=2bA}Idzdic+y(64?fA;{jm%tFTZ8Ch59F;Q%{mx;D3wm2qly>Ala0I z?yrG%=X%Nh+}`DwhUL^z-_@9~&RO^7dlBlJoF}zwQPR$w6Rt|^B#bb|gD8h8-4@D| z?m&=rUD0lZxkilnI)iSM3{W?U70DUmGT$xoD&xcU=NuJ7!#T2g{vRJ|#xT_#g-~TJ zLj(#T|EkEZB6_jxxmFjQ9F%w|Q$(n3=nTp2+0^)__L6Ywdtlpe==6b{w#;CH*m7e#yhbTp`ck@HHt0Bo&oCdSXr)epkD8dbgE}=`hGI=c>Dqg}c{&_XnR4 zb8(-K>ORCUt#+B*>y_VvxdSVMr#MYxd_8H^xQkiw3C_EwlIeEUT%y&W}q&>rS+)uVnQ1{^RpR?I2CdYGiZD#YOg5C0IjJn1&N}0NE55#P) z`D7LL7NcUdkPCyCa{;`T$wpp&Yv_*)8AAno4|D< zp=R*gXVc*i^t#pIu5L~j>m&%);o})?R%HbnenLzpm&Is3i$GaHKJaUfxQG7gW2^B! zgSS+Gl8mZF@#mw0tdp<*G+r(f>>T|Acz4;Ov5@89-C&!HOKl1wD;sXi$1@e3_Pwj% zl1r8N?ec<&SRuBsNrp8!hk$jF{^$TiL8aRj(1~LU8bP9ntj)hvTcX@~ZmDy5DWXwG z!%9i>Yxja@Dt-Qag~M3l6tx^QXaV@s#*I=YlRkT*W;X z7G{-%WK%RIvjtGBi$I2+?!WRiA)oTVn(YlIP@9@IX|Xi@T!5Lo@AUDy;R|fo8TsT? zn){CAWWk1fb&)yYf(F*P3yA_UE_?&GkY}k{G{wyO_vO0%rbANr0n)_GDu zaO@Ni_->YE!4u=uWu*R{$A(HYbj^K>LJX|+eQnvZW6=?k;^3b+QRaoN$N*IP{e zpp3lu6&w+}L4&ic!|gf>OZT(a?VUsBY+C7HpXX}Vg|O`RUYb*t#~u&ZnHHIu(5Ll2 zSE884&7mvEHGkL>4Fvs#{g* zgO3Rz8u6(e9@pawwjO_Lolq+}4eb0>t5p?KjY13{F~43sD;|~)s-00R_O1^_4kuNR zuO;T54mU7pfWNFOY-zNLd9~d(l!kXo#S$xQc)wN*{}@_vvpu=KArzdmJP=QizH>u7 zFH_L2w@>wWpU)0kUEK|rh?OL?-q>5AThQ*^S@Vgw#2DsX$gKVs+Q zbmH@GtGQAYAUZ~}PIWCxjsZN42|;b*;I}1ntzxI5%3#O@jZwu)!_G%GhRGu!jdT3! z-1yZ%S?>>1Dw|!b_t4gQy=m0EXVx)4s2z=%&sW#%V$DgodGX$%npq6$oUh!=TD|OO zRX5WM*eNpWVk1G>3KxS<_C3d*AB%icWxDmA4SacoZB{0hlSTg11+au`t-dDMaTd6l zClxz#|7+E6p;$jY^X)o;v)rI64P?Cz_eJ|Xg7Zc+ht4aWXi}3SUp?r*vd1T{CAHZ;7`$w=L6LjW&JM&4#OBkjOCEB$%@RZ`4e$@-7q&6o4H08 z(>J)mc5Zqij%gObq}?NGklkhw=ZI9PrR(#vs6GQG1BwO$Im=r&zveFe+Y)n5Ln(;) z2wH3(eW++6bC~(*jwHJ+Fr5(Mp298#vY2)0#wxDJsX?dEV{_=x5EZ=n>@?`iw9EFa zZpo65K{h1&D~y+*Nnu5!{>yLUIdQU+1?yraAOE(aTD!r~qgGOa9vW<%(Z9oq^hZ}Y zeek@HQ{i-v7x(Aauq4ly2j7O3@PAcFP65h6)y}jp6vd$l73bSn!a4E=# z9Far4uE?@n5$tUdbX(%OrVT5pA+5qg@kO~_4aDtVh1&y{o6FjJt8Ga4Bb-mYEjHtK zlgUbBw<|T%rTR|w)>lveL$c-92R68~QqtJw!T8X3Gz!PFwcG~lFUzv_7jGR&kZh)5 zVMFNsk$8i`KV;>Uyv|nEfJuNSSR}DjeG-p_S}sAd@dJbjqNN}_nKv)OA{a!!8y`d{ zcL40U7=aa17!YvbZNjgKY1LsBQ9(Fyh)PmmsyI?Pw}F1MS#Fp`u`^_~-X|97MQlb3 z#fr=Yh%~Y%d5H0+&l;D#T$SET)}18(To@3!svc*j# zsttSUU|B2j42^Q{5FB<|T|<73V!T_&+HldFtyY0SpeQO0;}N@Tv+dJPq>d8^FJnY*XIdk>#_G17QceC{7yER?ju}j2=a0{Zw|ZTZn(J#u$^3r z`-bi`XqT|n7b+_uT4sGYD9~))ZzcaBL6y8cbbt%zgV7#3MZ(A3jW>LscFe!fVk1eP zTrjS9?lDt**o<%K1C3KcFWBQNUIdinxcaBkGIVV9$1$A*KaKmLl6K*^?_Du zzG|ZzeeeO5-E-99HAA8+@=`wfy+CBE>sfK_F6N1T)486w@5i{eq=*f6`Tt4Fo9_WN zv1Kq+?q4&DyB$b)rO9W=)ziCqPOHrfroy0Usz+}HKo;+!YwJx>2E021cBVySz7T2x zYyoy`6!|K`8JoaDK$t;_*$=5VyVW*o;FM)so?J_ya~%nb!d@Ii=a@|Bcz)J)#F~^w<#!P=^#4s>lyu_^cn9ZmOFEX`UnZL~Glzpc_FIrp#CCmZ{_=S?$}a zy9{d<3y1FnbWInW$~!bOvhO%?v*UfV?$Bu61;btkY{YS+^a zP3hg*>ZP0>rY`?_IN=BMt4(oW$c;r7A{*slMK@WkrC~G;s6p2iRoQ2h6n9}TN`+&H zWOzE?z%m@fZZ3mH(IsNub2^s5RUNyZ5a6`8b0C!YV)zyMgmJbOf5Tu;4R$HitRm$t z(`|9|jOo)ZStsnzCuDX0R*Aog2;xmB(`0EdTsx9ad3NoyG50VM!NHNunzto!ehszD zZf&K7rIjs?L<5*Fdn!WBc7RoCa{(Xgee)Np%;xj_yhrO<^nT7MvI zbsW(xPz763RO~}hQ?6=aBv7UyiDcof;o(h1UZUOy%ey6D@Y2d=u6AZJ7{$@E#mTH| z74NJRb#g&Q9AdK5d((d7Qp+1U;WM6Sfh6e%UId(cz3#=eTa!lUliEp%-NKOI}Q4SWp#@KDGb zUwhbhty${)xm3I1hx@@Y(v}$ZumL%GViiQP71i0+JhKS*Sr&525U4ziQ`=iRkNf)~ zo_0?TTh!i zVY(l*0znkEnTVn7W3pJm}=mW~Fi12lGzy?Uh96&RY0W{KC`bibfrP@xk6cK}cS%2M5d zIT$>68z9&0k5!+MI0>==q``{wT(dFWdYfmBqe#phV1F;gVx5L8`P)zrLBqaqg%QF8 zbaKlj_h}D1e}rXdov8JusaSWWrV6mut+&q^g;KM6oZdrAasqU*T#Ko^sney#&zpC@ zmIlmtwgerwa^ZfYq7DAi-g3NNL?jBrB{3P-n8)a@vg?AHX2re*Rv{8_5{wyuU5^KX zk4C@(Y2y+TFn8=U>%aEb>Ev-b_*GgYaY_X_gG}){2t?9r)awwG@;Cb*uQbK+;zF2k zZKuAX=vn-YB@onR(bktSqD*NNs?ji_&`KaI{oqX&v5pX|^!d;NBy0=g@ z_c8$78B^SPyV?9sW-luCOVf6upq4ndJOHQMiiTAb1JN<0@8uP1LFs?5nJZ35uesi6 z*1KmYjirk~ANpsXiRiUmQ|Pr`O$ou;ZaQD2ZdoR2bp7qlrp-%OAY2HMHqrSr`{@?e z%t?L?hUsi?Tt6mJ5SIU6ubzDwV4JKq*Ol*@sK*J_F zCr}WXFwgQSgew5nU-u?^QNGo_DS`anLbI#2zL{V_m_;258shU9whWfe5r7CJbYSfB zk91h0l$wVd$!hvbe36X9_G{fuAAwN0F0(N_=d8^tH3lEnhMNTHF^95D6&WU<9O4~A zI`0Pq%)#Xh@`bl`qpPlc3+*PcjB7wFm3MIhXF?HvLy#M*&g4N)X=jird#~ z6I?WE3hHPF+bFjpAFsA==%P=6tbS`}(SITz$`o94acN(mKZRkno-b#D>mAc!k9W~{wfIZ&uBk_sN6u}N22-e0kFZaH zgW32$tVTx zH_H33=HWrVmum|$9P*cosaCyvj1i;lSbS*|-b;P-t(z#ZPOj9hE?iELVmVjnDWo6C z_*jJ(9bkD8nZO{+A0-R=I={xqIE};cb#c;>nMvZ5KIF`(=b<)j;`JGwSZ}J+gQ1+% z4ynn%%d!w>FaTqGX^%9;&gc2D*mCln(G=mg6fr}zIQ$w{VMAvZyqW-Lu{Wc=)uKnz zT$D;UGSFZYL)m*E5hH+3ufN|+KOUcIUdV>Tz@Ql@IID&yVMde~YS`g?RO_1(JS;0U z=LG=9f^Lyh>TR@}a-`x&)d;rs!4(EUVJP<1x!|)oO!?nmmtI-_BA~rz+`d?mRUgr6qJ;To@PWxQ6zex+nk(E=p#_VyexeCM?KVKpSP&Co6A;eNvp zxoI?+m_xR!Oy|(^dkBO2*XE6%*rb2&eluaixull9B(6GK?ILoV9Sb!|U=YW9`q6r!v@Gq|q!%_Kn7oS`m6R2- zUjb=UM}B%)NfyqY)o3;n^5|_Gio&4CJzvm?0GIjA>Lq?o0M#gQ?2Otj*Y{j;1=})W z&e z@~62RQ$1nU^-mKOoLk@$wZ!hC+DyB9dA#3GJTB<8(76AlJG3WV-2G)}tB)k)A{ey+pu+8J91r-s;unpkk5vlLvXFt&7AD z>LmZthyDeFpgvMWrSYp4(_zrF;_VJ0(75L$a@gfx{oYBUa794Vg}k1K&#e7tl@@~b zggrLc++z=m5rs2j$rkW=OZ$8v z%8ypN{?P2ZB2^;^0cLoHg(^^I^=eU`z9;Q=HXU?_hzt*6#B7sPnJ>>mrV+JqvGRx} z8QQNLZDH}%g5j84;EjS!E&nC+Nk$CA28*CYvr0elU%OVPz&mMa#q|VM&GMP? zZd;wNwJ9m(G~=&IrHLxBSm|MwHzmL&gGpAy-d^F`a9O3WWpN?NfA5bQ%v|$6m0P}w ze;p-V^C6x$7zHXkRHrH5U);aFKG#p)1K;mMx5@-V&U7a(6HT8dOZX8EY7f@7+tf%c z94XqkZh8&9kE?Ff5GR=uwlnzA2Y84yC|&|t9({bdK9)O?Z|)I=a*%n3FWuog?LgdBLX zyww{0&ymepR+Sd<-3q5=1fR>ht{<=VH2bD*iMU*r!J;kE8uPpM1xux19nHC*pMPH- zot7V%B`pAh17nv9;J8Ja<+j`K>{K*<*IuQ8^C{EE?rNgWkGOPmEr7@qQb3I*i#DDK zDn*i8ghxG*-XmWfS#mN{O;&W>st_~Z?yuhzC&XC++*mu))61(_eTgkCwy8eJ`1%M5 z*%!6S#md1Ql&jppA1=06!mn>Irl4B&Om9n8tz}02tlcFzFf6hvMgR<$kce9)+dBa_ z(QkebY()ax7`(%__D0{spn&&NlOBs{BlyUF9cHdF10%e5XK3l#Px&m(CAfR4v$8i3 zvXkn|%NgK{nsbGhXykRpm=kwis=VPL>qZ3WJ=_|sG%lQ0(yB{O2-b=(> z1z)xFPeCW`1JRlR+}pDm16r5$+$Q5O32z0Of4kIxk4r)5d0mBzxITe@yWiczyMa<^ zH+Fl%l2yr8Ycx$}sHu0Fqg*eOZW2a)%YiEEEXIizN7ww}(g7-_!^u8g4UIs3SD$8BS8D(xPF<&CD?W+;w`X~76W z4BXu>DEC+C5k@*410Bjcjl%0n)~hOd;p%d0d-n<{L1GH@s=j>A0*)AoQK}&&SGaigH81 zR7cIYq^*4iQ-BCM6=}K%ch@hO(Vk~xv)gE?P|6#sN1o08%IBmLQUCP=i93!?3RvOJt z6P-dzX0${pF5d>%!2$^Gc-8u)uV2H^^n#x!=C3;ZvvtRCy#j#r4ANe(YEmW@;PV`P z{KtgG2Dzq#6HG+ZP5ePtUOAp6WPL=bIfs7yndM$MCZYju1N&A%SV1=nLsqcW8U0gsXnXKUHI`-6^uDJ;Q9%_~KgE4iZ_wCs~a{gMP8 zE%{0rx|lMxv)fv=U$vZ!=Rmm?>ila{r135Ss|*l@sXz>p5)Xs@Y>dlb!B}Lx@w`Xm z{SiiQze2^#^UcQD@{@|2v^h?ZPr=HS!R>&8aLKm%Z<{%5SMehH&FR)ok)p+K+7H_V z46Tf-9@^hu@a``1qR89Tnb#>s_3O+UqVbCv+T~uZZ&Jr231>QtB}NHqZtCJ7@ff0k zmnh6Stv@7VNOSbksK~5w-T=I$+*~Ob8@fBw7uKN|A4j{*v%%I^+#UUhoDnJGXuYG2 z*E&%dMVR<_GrM~oM(H5X2e$kqG5g{wks(KN5h6U{Ad zul3BbRwA`rrabk)(^t*v$y8FB^R*_1mzLUDmpRk654>l7${%XY12zDJrE$Wb9c7IdmT&0SHhIcKOoEHn@qst7>$?B`E?P}oX=OP z$LD%9AEEKLS&0 z6mL5L26rb1u0T9uIg`{+DZ zZ#jKj?}ymTV@DFrue3zuA@>X#1fHvMU7IFEV=P ztVCz--Zc{dgdWTlcx`;vOm|O-!MW2B5|Tk_4wUzUa$5h4lrP_9wUPN)u(36mB7>C1hHEDsh5cARy?BVUE`#UDA8N4PiXDl$?3QS^_1)&?!_BmmX97A9pa{8Y zD9XFs?)8P!;H}rPPt)TGBI$wo?dsZlXXPyfgC-qEjrP5kO5_(hm&U3ppS8JEya)in zJ5}eG8F5QhZhtSJ>%lP&jD&hvK1Wvg!8Bx=7Lx&m++!j%oNg*$L>{Fy?TRd)Qb&`#++Fzn8sW) z^Nlj_6Ylm2GL20eMc#`d#)qPiif~wIeAjq~5$Sw3uYy!7f3K~O&XtRBPxi~~`hW>b zH2%;GpVjAY0EZX@?E-SUXd0z>HJiBsf0NEW?*z=r_gZgHiWFhBLWNx52KmcsB#Ai( zZ%jLcTaD7iZ=gsk%gvUV0i4cG1_lOUDH#ROwEX~_4UI{=R=RSn zx><*69w6S99p_Ng>ohzec*H`aLy)wPftrIR9dn^Z!>dGkJ#C#bO2LF9V;os3nw~2a zEwMlubo_Ftfzt9rHN>Yz+a=1?k8euoD(&uc8IqK=7x7|JWWQt#9)g6A;{wa}UQU^k8};i4@t4;T_-}al->US_!hIXFUB=1VR!YhGPp( z<>^FeHip?IEbG8{Vp*`|7^lEon2%ONK}V>LMYYxLSiVyj;>Hz6X+i&<#H`1-<`RZ| zUu6Ft-bxyzJ`}1w=)zsm%Nef3)4~saHU|*mOIptRxV#jXmrKm-kVm1 z!I!A!PoFUxbmq^DBSNssc)GP`61hO$U0ch9ekoiD4y|q%^ld@Lza(-tp?ey+q_v!K zKk=t4H`_~8ANP5McdZFsTs>WU%U*>UB@{iO-d=$lrfUBJX*rWxdJSvrz?zQ0ZfSIv z@{m?lz8gzrGn?O#<~Lb!{aWxtaf3stidrHPrN^IM>%h!|a$dRcZWL z{HC?l_6(=p1TvYdiHv=I>9Bn@E@iGsqfK4GUIGlVTeIr`yxPqfm}Zmk(6fA!B>MAf zKR;AFAzP3dzdStLOpfuo9LwW>(o2xnW-$U{G@^ZBF|&K?(tu1XW8J1%sytwmjB!q< z{kVga94M%Z@P=a|-0``;H02>hUWF&S2=)F z@Hr*Jp6hOi%>2vLbO8f#IrF7=gY`_ne1%>fVPZABGwzJc?Zp`tV{pQVY(G?+!`k=@ zy=JHAx3Io8c2P(MZJ^w+KWlIRcqFN!zO0vSrVteBL>QPzdiZ#4PN}htlp4w>0W6?5 z%3SYrGy+qD5wWH;d7NaU1~US`lTyp7FMTvt6_GxXua;L&k|N@&C~`Ed_|tp;3Gjya z5xbs+WsL2k@le&HX|ieh$7PdtY)pUSv+j>vEN^E7#2kc=Z*5^JU>At(6&4 zgZ`dXIU^EMG0-4q1p0kETf)G2gg@*EQbIz~^l09Qyz)&}YEumb zelLIc8n{-e(TKUFArGAml zcm~^s;~5wiqiwOM$!4R*YA#M6{|0W4)G&6dPik^LIk`&Tf$os+?L@^z##o^G0_?Ky zB@1i1fzsph8>_%_sm=WB(@lwbz5ON*y2i zLFAGs+js$nOk76O+;R#-GQE4+Ih2nDe0`gl@!-D$uwuQb+0;LoR+7h;^!dUDU%N$I zbBxh3)tJ;m2C87iY zDdyxXwirQUYa5z@M^&@{wA@**A2Z-Y%FRY4NxL6kO)21H-O+fuBtzWspe^Jnb(a1y*oYYmEgM<1c&4D3TUS)275%_Qtq;oWMDn z1tpjrtt3v|Qe#3RO+j4P@wXcaI({K_FqWj`0-rIy)vbwvB-^$u$tK?gW4gHsC@B?I z#SR`E3z?Kx*%0gq=^gWQDUE|P@w!@7RUCZQ*a=l|X=#JqRzM@+lx~N)>kx_Bzx%w_ zAVFBJZn#v!h+Y#1WYBY0%+^}nX!`Zo5FJpuvP?D%5^yy-X#PpN)KhzXk1nfO{i1mz(fE~~E-pQ!?cC@2 z?x?-f=ZTM&Or#iLp)v9aotS*byVU7)A}9M@@4fl(Y(%LvS&h_B^YXm?x&(ku_p}0c zB7X3g7oXC5o13d7h*`T1h+nOtz<&_#MF}39ONwH{jZ?wdMW!;3K; zwJdA-$>S7RxzTJ1{&20`^tEgR$n;CGnJFwBm~C_YdDZ+}_uWn#v-e5s)i%VKSQndb zMkS}Y;2a?VmHf+>{|dDPP#0928Ebj>2Lu1NCI!yhcbZB*-cKs=g)txC4PCSN+>(F^ zQ4*-9z_!KuG^y0)dQ2v-R-{l!84~qWV+FE(hlv0kaIO#)0oanBYCxC!{PM{e8;CZl zd-L@Ud4;Mvgn=py_?cs{GgUeZ%#QRxo;+Ry#UrFG%g>-42hTvt@_Ju^xhPe=zt5i< z{~U%OmBR{*KRPy_pu*DA7mCWT-TG2Y7<`!TZ264to}A$cc^AbrWBM^g)zFw~9eKO( z=4?gkZdU==alYu+xi*~QyS=99ml-^LgT(LZ@M@@$cRziouk4#Tnknj! zdsA3OLal^GxFOnYR66KY74`CXYj2sBpgr%A?+HJgZb^4)zz{;G)yP!DIKPM`drsWk z$Jm`ID*B=db@HC$%FmpTv61Dghz! z92q|snC&i`(o-m7P3DH@US^$a|Aq#SG(4jp-3uvNN2#(0rW-cftE)qr5i+^_GWj!j ztV4^vMEh6irGa{kY_)BV73S3}x}NtbN+my6pG$o0*>}{peC}uxF)Lv()5r6#pz8N^ z+TEcfqVQtg?w@c2a>8!^0*V@Y+Xo0-Dl|iwsGnkWX1(=-N>v;(-e{f`xKwV0i)Gtt zzPN)hTik+FIwLTjgdxz3~ z`)WbQ?dWz#Hi{^_pNp{$rhlSuu8#6{@?RVKrByi^BJ*QuyNwUcMGdYMDVQtM z9A>^$pjDq_{&iKqQEha_7*uW4>%KgCavKRkeo193lA-4I{`;P@l!58GMybU|)BA7d zU;PcG)e)MQ^b;Oo3d810|7_47UZ=g`3dP$Rb96G26I@wDlcAX0bz`N<|Hs}}hDEu3 zZz~odCEX|z(jna-A`K#4A}QTN4I@ek0#ec?N;5-uNlK%{&?6lK3_axVKA!Wd|M8sP z^?rD-_xgW0pB#soXZEw7z1LcMt^2+gHcEC&tZL~ePF_K^$Nt%4Hu6Ui3&7tw?QVt){|Izr>(B^EMbFf=Fx3tPYvTA_ zQz#wobS8J!1dzQR!Vw+fgb|hbF*#pF8g-$*o>ol6qhJBfk%1~2MDK};R0>eL&=UEZ`OsbNQ?dsmWPg`rNjft_Sy$YLY}yE0I*{Zn{$3@pNCj+ zZ#etSR4jR9kxq#+0-UIbeuLbeDXXXkK)TEQ>m%jrpPmq}XAyN2BCu@_?sCwqMXX%^ z=lOt0nYa`+CBJ^%$MCmWu>U*k*6;a1FN}vyX=(ygP{huvYAHdokfU`N-pQy$>sL3T zgB!e$<2SnFa@Na94#C&mwTO$GPmkysudAzBeyG6HsI4k^Ru&t@brqi3dy%)%K3fK4 zdt&cmtGV#n0^V#fPb@%214#VaQJ+1y{pe=U6R9>aEv?VJ3BfEU6dTVBu^3lyjc0sD zf>!~pkB0)~A4`vRgHo_V`gFc{=)5jS;Ly8+Nh>fu*;|l01PvacY1_h%F z@MT!S^*p{ZMd$|G;KlV7+I|~bXsSDhLAl4P`lncGMUPuOJHq(5XO|7D%6i|lbu{+7 ze`36ew%iz1l4BE=0T^ioMbFbNp|cO)_}KAl>Oe|yX{AQ1@XWD;ejI6)5c}+}#Ngr; zHtoCawBD6V&W^L3vQQ@pzR^H-h!k!GklBad^Mcr41 z0!GRb(Jesr?S^mZvorHf_AiZ5ssLAKT>F^6JMnvpfeqzbfbgrSHE*qr!xln7_XvTD zXWR*Jha>^?`^IoV$y|3UZg&D1ICb3dyvl>dW*5IpK*X*?KeX1b>!Igf__1y>?!&?> z;ao080EF%m8$}<~uZv>6F8bysfhQ4phWKUZ8%7eW`^=wQgvWsj6@GcD8a5SZYmO_l zKoj~=MQ>+&C5BbQG~3_psqXHP*t(PPDt|H4Vi2?4?5XFH>E=?8_Gq1o;;P-#q;WGg z(&P`bfSk^VAI&Bl0EN%z(eal$f!2nVuy2I+x@}thh?N2~H1;GSjxh)8vwDaY6S-Xi z$Y9VRR6cZ&U~-};K-!+5$`PS!s4w^O8YGwKks#onB0l@+>uj$Rzjho6C&u;3_{qtw ze2?v?=8S#X=t;;6>4=l`T1d*sDRL$;7J?TUA>vey^3}g!1s^yIuptiCx0t54rVis6 z6tF>bH$4&Ws>J<+b_|tn_1?;*xV~CeMgcgVpk>eOi3BxGcPZp`enuqn;n!t8#@jhlz}n>#PETj;l<5he0z9p^z(Ol91%e$ z{z1YU#fBGeibMGJS9zWbxh98nehHh;dAXGTZ3-N1K;?=zU4xXlc1Gw`A8_XWe6ZW0 z=(~@Cgw(!Ck}w3y?&f#fUc!F8+%uS zyx;6T*KGeu{zFPWfXp^Z4r52e>Qpv?Sud*(lqkGENLzsvD)r!X)~ z(9PG3Ss5^5TZ975iOjk5D}0+;zoJgpjVV?KQH?ruOUb&Bc}abyJ}T}jcx0Swy0xFX z9%d3Jgfc-%>>+@P%zdF$At!f^B|HnM223r_*6>_&bMYP3SwPI{Yc{im?~#AcM2Jg! za;bVHfS=bbF<#v>`ZN{KrESWrV9e-eXhV%tjKZVfju~$ov=?%@GU5hQ=eUD4)^L&K z_w=0PwCLl}J;pWttHzNLIdx7+$hQXFs(yQ03Sf;)19wyN_Q@7}yZ&te$1!6RP09C; zoX3={B==c*0QhF%r{yv~wCUc-qr0RUxdTZXnuLhqm)h3KNbN?%wrx-OXOE5k2Ou3w zs*G#AyU6h(Mfp&IyabN%V~!Sr6j!dYl`@?`F0UFGBU3m7!XL zagU1o0m~V){^DuWOZ^Vf8dngs%R{G|Qy9)tftfnAKY00BIl)6L3rE5^L&_a`N?`9V zZ?Xkxqw&-8g!N+>I}oK#74NvLSBf5TM(C9ob1eu`JT++BI1|1#gqPmn;oe}X12cS6 z>a6w9a_xCSydrjTz_EEkd&VQ3)|81#B>`#)+azXFB2uV+^;6vLbhyyL)#VRVBR1j3 ziC0O~lZRfX;6;fC36|XR*Gsr!x?Cq#sGr9cs!6lUEW08yZ!7^{k$i@p1k@wMSJ}wsdqCxJjP+tV~095b}O4 zWssjVuV;EPr_gih&C8}>9ZjuQMCYf6_V=Xv%)Z}Ye)-*!;9YIj(Pefenyd`S+|+86 zb&BJ|4T&`Qd>8^-f@xs(3q}WzJ<=b#K8-HUY~;i1FmKrKi~NWesMQ-^AbBcPL%e%- zv^C9|{GkD~eOJHjMDbkeB=kL~m)PbIXsv#*x2$q9XiDnz{AUj;6Z#jVydx!?fm$IG z)-3Y$fUZbZwVp*bgFZH(ByNm7KjL%nuAus^XYsSTMvwGJN-K^Jmz3x2;TLLf-GQ~_ zu!duO{Sa~gt5{xYDHF1d_2Wr_bs-3ucmEuSI*rWkuSNz2xM>xw1`vh(SmeB!?`CIOFCQBqrYt9;|OUimdvd zf*yXsy}@lH`8ib>W)Jc_30|&a1$dPMsJJ}rTP5@}`CVZ#2B27eD`oTL_zW=*hrGqQ zpG_A(J_^6LFxNb7v^Eu`tMItGyC(}9XFztCFdinyA)A%LXfAi6S0?wPkaPmb_E+9w_yNcvz# z&&$Y}PfgCf8c)=}`XgD0F<2*Oywynxn=SMtCL$cybN48sw@kcF!5f8_r60lU#!3ga zkEjIhb^b=x|6Q~A?yD0kfU;|qMnxZ{N;_z1aNomiSP!QGD{CNk_rNqEZ$ z51q@t?NF@<|FQjiG*GVZe93a|0<_T@K<&A$acX)!)huRRJDi5OkUQY?Ty)E<+Bj1$ zf+TegS4H6#Zt(G~?sGTXzOFRw^kR9sn+qhtS1dli|G~sWR8MrAS-L_g<`9_;oI~koMcujhT0X8=SHx3E}xMCN=Obb-k0hF2m59FNuxfuFi7X-4Vh^!fJpkO5cCb};6cKX zGHU7bYv#LbYuQBV+mrQgUO?`|EMRkuP{?KZn$-ryx!MG&h;$qriv{@my35;sp(Zul z>5eak4yEo3aHq!o9+~lCbLxvk6KojpQ#+J_yfF8jH=Ig6BPqI|Lo`J|ppkv;>Eo?FVwblrZ_t$pQEA zJ#Qs&&x*zAIN{4+?qccON-yx6H8JdeZ?9ST-J$4Ga6;@fdK#kJ_s{GesTKg@W z25fDSMezrNOyR^*oU;QdZ$BRHWdD?<_t$5sw}m;kLYj=t*jRs?+hZ3Z5#IxBlUf&9 zL+jPP#OTLSlqx;3t}|VPs|0(h(w5hw=%gPTIUzn;&bMnGT{dNR$l-9>t-oOCz-TUF z%n*p(!{8}|-X7IC48d^m(e<}2`BNzUScJ%%tI?Qt$Apnb$SmdYUAJ%MbGW!MG2TSK z7v@B<-hftO1U7&3`&zZVw{z0)gZJL zSqBQ8^ky2hF7prU$9XI-;4rfhAmz2bH7#tp(tzBYZx8i2+^{lWR{H)ao>%ss$?T?v zP57fEUh@aK#YypOT6e;M#+(LA2?s@bur)vcW65te@eeY!9kpe~QLg(do?4etfaLIN z07;|0^fkun8$iQe+n$})9b1djkjnm~6xTp|2`E&p9U}EUI9~L z;kDfy#rTH?{K|3cf}e@evgX!fgY2kG2qxQo74u$UB`po?{qP4omsX!^TA@{Njcn4VAFboj9sT#fATX?fBv=*j_mmPjeXm13&96F zU#|7l3j$p`fMbpJXuj4({7{nM8vxO<$cixOni1V+0}?Iez3-B>U(VKhZWn5Q`WnNQ zN>p5JH>QiH|1Aq3^*K$~ahz`;VYcJ%M9X`F-Y(h#%CM;Kacqz|toj00VEO_Q^9_o~ zJyBEI8IF_H$pBc5eFQ)+)tb>#Qc| zrn@&pmO0{c0f|qX1I^N7pg6~@M%u;*C>3}#drUrM*QuwRttaC(x6NW*8QlSNZ9R5| z?>?BI5%LeyUJr%%F84AQ>NKRN++~0gQOZ1@sC+5^`tUPrrS+_x$=26-Ce#*YUCHHN zPT)_?_s*58)oc4#?nB#tH-FC~uW;@ybiB1NIp!Aj_lA?}0C4yq#!VVN|H^5AR06Ex zn~~I_y1KzGtK~%4=|Fuc0CU3HCUR^)*Eo4O*+3}55q@^{hkV>Ug;K__=U*m1zQ%qV z-jwx9|07G;Bj%Io9Ic$PJQ|D0F8wkVmdBC$vXS|3O-LQegiAlZ!XOet>S)6;s<^6|fGIPWcW!MwK9F>&p*nrwY8v9nkvje|HD z1QjK$8z?g9iQlbYnq>%%T0J^M);#4lTD7@dP4BUDX_YH|!Du8DH6wmL+}<8S;}oB4 z=rN}1vOUXdz7XVcvYcoYt&uDyc)r}WzyCOpy#7fVM!@ESc))&%aIidq7-vngKY4qZ zof!*7QEb7wLH^TzmaACTv56REUS0X;3mJ|(L~Hg6>07!uM2y3uSn?qbq#w)zPC5&Z zpbgu&Kmr*z)wy}>#n?!RUk$Ir1SnQaXwq`TK?=GHTIlI!m95v-kQ8-zpFG9iG;Zeh z6hGMy)xOe=I#(k?F{$(Mmz3Bk+qlQHupD_;{S1`fGN9i zU3J#rN+Qp!`dS-Zc4}7Aokax^b@jB#&ehcfA0pOqt}FPo%$g!NdUN}%sbdDifa2>81k#0nttYn;e4<+)(n zEW%~So$mepf#6M)(5#2|gfG(3O`-h_q15}OBJ1Ykg$Nh83CJ4M{bN2U9W=PIC>9^b zDs5U->-Z<*W@cs_)_Xde&G&QMl31>G-IV%oA3%LTN0TVg-=z;5fP4 zJEN=vy_H;1gMFVbjwBfqxsL0WKNzu|u}beg(l5{K?EsGvr@u@n0`waImot5@c9yv4CZ2nA7@Qg@-O+ABkYD2^cv}ZrnnDI;w})5{Mdh5*2OMYnjvXwGvc|g65KQuN*37v597Z??lrq3;No&6K4FSQ~ zR?X!doP$oUfmC6hm!{KR%hKBDc-BnSZJSg_jyyFrFWnvoDgOuuhK_&<|A^i!vxFD( zEB)??Di-|O{=hN(K8_5+XSL&F`KkX0t`TA@%ur-#y7$~&I(nLgCFg&?zmQS zzQ__@tBujkY|n86*4vc4jHwKgcB-}K;jWYC9dB#kwSzCR#kQFKWBdPO;KONHSm1$z zEyJG+#2hOx%}p+Bu1(nm>VmgAa!yy9RRV~VYFJPc`o>n2WB z#>m8*3*C*xG%<^=Go|M0{{H^a^t#bJP^Ph0nKZ2aw9sWp@)BB>L7>7#hVec%5Tu8# zw{pOm?*s%mEQPbkqBN8t;R)V6uE~dD;v?Z`VR69$mQX4sZ!2E$41w(p8_Qu)riN>exI@Pkk^zWwsr#jl{VUqRa*5+N4}MrV;J+NT{BD^2`^WRh=XGA(pk!TCas;qpsCLLh)@8_8wSyFZHoCXKOb1HqD&Ud|5RB8hsgE@RvgPL33vT5&kdA^ zB?}QBdPHZW$%lS%yHXK%Zy&e5a%(XV4ifi9=G*Kp9 z;I;iq=Vnlpif3`PbdANK4w8a)(ypkaJ&c&C%>*=?AbA*XmO<*&7~sD)oGlM6WH~vv z3$QB9cT5$+hq-_I7?P>oPSxZGBl11S7j>B^&>n!6^@`x~V@Bf_o-~_X?19WDtrANL zo=k3=jb6XlSzC;@RE&8XG~Zqn?fZ`D{6!W|?rw4nL|Qs`2c=%!`T{fZ-p1UunJFOc zgl;hMS@G1;ssF9f!wZtUg@j)wqeSgFdKNS^w}&r0Q~JosB+L12X#*QDNw|3aBFOd~ zfbc~waXcvQ94wl=PGm18b+#DY*2mLCy6~h>Zhb`ceQB=yD)>Z!QV1z7NFNR7j_)$@ zwbBP#Y2eY>?KG1_NsCIHt}#s>Y52Jt)*dh5wQE2O`Fder62@HG#FGUl-3s)EOzRv< zG9^E(C(~voq;v^PPJZo=Q9S2-M_Z@OGz1mlikj&A>|@s7XC;5T6?1YP5xd14vz~y% zn*o~3v~N0~wl6k7?jC%hFVOM_fFkM)cF!;?sZU3|taCStQx>pWq%&N7z0i&|v$0fE zJ|4D+W|7O9g1bX)3;v4!ycH{`#NQ`Kdu&+fj}|HGGv3l`f0aF{i2W`*rMVb;5=NP< zcQ?jO-M30tGyKRK-$aHG;O#db-#ggUq4HLyFb0o}mD|4^NIz5Z1qmT?)uxB@hLi=z zo7(m`g6g3rCEr=*h~Y8fn4q{eobJneM((sem9t(qcZvyhP z2_0}TepsZ}8DX$iiOBpw!Imbl8drKjFm0U?<8$|>GK25hm(umzc8Y!WNa-d3%4go> ztKMgjS##)%m*pq>Y>sYWIR;hN;yIu#l+db4j={65%Z4HfZFmrm|OM6|kRb7BVDt$=PuQ z2@5z$1%MIDUmK)$De)qcL4as0(opi^V?R+6HwzrXeVCvpGri z1m7Axx`;E2y$nv-MqmJ`hg;OB=M zg$mSF;Ip9VHb9{*L$XFOdfPlFGGweM1K?bQy5{?k<{-%x(=m7)PKfeBJ!PN~4# zpV^7v=VzU+toH{}`4->YM@;l@Mq3BtHB+sVBPTo)XE!9jm#8Fin;)nr*sq%a{HQ7+ zi?$%~*u_+ba-~@3C{6?g>e7~V9#9dX20QElI>y- zZVw0uA<8$Ip3!#DNnv)I?-i{!YK|iaGVASLyBo4Ah}mpZS;n5K)y;hL>$rVyD06+K z@RQ4e9ZE%Srvc+;<{-M}Djzx~)%hGu13`e%AtoR?61S`E0UR(=0_&GbRR${zpd6ak-1^v*{6YYujysKyK@$*e2iYpV^z0UQX9aLO{PHA zsXazoTuk($awLF)-uKgsaJc2FaB>l>j$aixGc=GXh7uq=_CGyIdo7UoIGQ>x*|bW{ zsp7|~y10%yo!@@RBHo9xxPQ#~zxPqrGG6?P_dJD0Ik07EzwP(H2NXzZkEZP9;7}7} zF$C6hgzsu;EdL!QU_ZT8eFin3quAf@oa#@m;FqmdDYudaq*-^S>|r5#reNmAvu(eg z6W88BfguTDkL?-JNxe+fOet{N#I>#v4rnoUiiyX(S=(rVcH54c5WcJ3kjBa&ezfF~ zZS6(%bnM2_j79W&dZiDv$1LtcpOliTn~Xd`Ra+%BlS0MS8yjOs@Jc3V52%A!F?ccR z!l(MHAl#VjXy)R~90+NP*5k`3rC&7AwN)X_NHTm@CO6$(C$d9>_{34PHwmYBIY%s# zdEJltfqpA==Dl&+v3{qAumBq0!;OSl3=odB1Isy$cKU(fQ-MhXW&z8c_nTi^DA_WO zRw{R%u_TG-M|nOhdZx8%U6v6n##8*uT)vWNx~?qvm4b$6SP#biEbQk8aB+-14|RgQ z^rnT`1>6d=1Dl({XTeMamj)t>{Ub$VdbWzUu}yap@o?#Sx2Iipe(ZEhXV5K-W<@IT zc~`3rWz46!4-RHv)|jG$j{}aO4E; z^N=K;<7=XLKs&zhD6EZz+iGKw<)#iDFad`3GNGS8*zIstJL>UCzL>5~fS7%+HkrM| zIh`j&;{vwG)UD~@#MNeO>(g@<5=B(!^Fg%NG}rl|GlWpj*i|j(jenglq!NXhFXVvr zsB=)~1pz+v5aaemA^GqM53UNCOL)!}b;?v_iHGv)bQMa|UPoHI+%2UK7&PgESa9&u z&}XSrd8c`rleIixLRYZ?4`sAsgV#y-_hq}@Jfu@x2p-y(h^7>mo_efW_@hR0WGt&`(}F9s&=P?4X9=gPQ%LlsRt2@`X%Mm zUZ2idLhgA1F8L4U;$gN7cbG6Bw!`aYd1&S9)}6W&iw`YSTd~qPtkJ=TcHsbX$_5EU zAR2sDZeZG64suE4SEEdj#tlX1s0lhwXe;nrbW+DZ+DkXf`$C$*dcAnBxtwMPVVSM3 zZ{W2dog3+!zpBEg@-z^g1S{jHLS^D#Mp5YzXiNlyGMa6B->w1gF6Ygsg?jZj8adIZ zPo1_5$c^qu*EoN$uEPeNTZ&nh&8n37(JCs4P#VH@fU1?<7j7l%Q)g7?p@w{7W+hU* z3gvT&WQJeSWBKFl64|<9;p6WuUs-5aPFiSKSy4{d4-|xxnf&z2%YZzBOQ|n%ASOUhG zdmJU&Z0`geWg)RL9yO+LrvsdI{F~M)4LYj1q(jT;@3Kf3wfjx2enJW*ml(e%xS^$?C*by2Cl9tusi| zNu4e<^{0UGjQX4Ei2oR7{C+XpMwwJ0NBN;au-v7U&{t>=JZ;1?*NAI4ar`~&FU#QH zD^QaK%dziMddjc<|3AJYqb!rf6MR?Y&*$U!nK!KK5z)_V%inR!-|^UAoD}|!H~u%p8(Cm> zv-r)`%t3DDCib88ygxs95;1CqW0}++*?q94`UdCh=a&2HPs(JyyfMYu`zg2NspDTb zkKeycYSbFm`~t5i^DloCuq4F5@h1FLq4Hna8sN*FytoElbgp~3n~5bz#P<4?O~s$>DbCzh0hES*KXuxi z;SK7&O>f}|!^}JArcc)m+fl~ASR@5Kdt-f_j1y%_urdY=eQ19%?VQAcFDo()@4-F4 z?NQi%tyr9WeuAzMXX6PR-w}rcqY$v}XM^Rb*&9UygBbIuqWPl-?0xclkOGycWiMD2 zfBm;X=qpR+ah1qs37!vDO~@dCRI^dLzy^JvW^08(NP2G1A3UB+1ciR7t!zY#qOd+~ zV2FhH96os-;iz&J%Rk2;=vnwD^@D$3#_;tk88ekN1@wP$T*DiHmozjrZ*lYJmGY4GA|XmFaYR$H5{ z(O4^e7Fura2mPY5LBHUcDt_~QgH7h$%1}*-z!7zENr#r%u-6pF=letL@2F70G?$(h z$E(t?J`U{>AE01#JJWwp$@^?;9Vm?s;SNci8PEBUu+ESHDA(XdaIjE@_rmVZGTba! zO_)AI^1PRWCzN6P@Ip>OzTs%AO`cGKda4HI{cKPI89jM~K}!<`bDPv#4owDxI<{{b zRO}6!<*xG2A1|ukHgaxK)3+=59BaV#=CQX8SZ%^7jIUXx$>G7w6kxo_FC&s4=ZAH4woj`C2rH-H1i~0PC z-C};gTY2eZe$0Lr{8^kcwbJ>^#}o6eJXp}~P&&2L>3NeSq@T8IvSqJ#g@D_zlF?u4 za&bp?rBU2eM7%#8ZN9B30A2ECbn?O3 zkH%}skpRq&zdub>VCE8W)^>v;jiDK{f&BFKCln0@0r{4j7+ELIJ`Sxx_~upLyL&UA zKHSz$*P`4v~aegA${=>7Bw|RG<3GpPT0;AElE2lb=LoBOB%C# z87$8afB8ggn?jlpJtzLa*sbTY`PUHnio6U=HzJhAxseZ+(lsN0DjEi^IHPY^8B3l% zM$e){G9>-1g^PT9%pj#LEO z^G9aHu6`b<82tC0j0R(wz#a${9M=~}b2(R-do`t5uPUy8V)g!!nAcw6gI8J&C&eXC_(qvd*@3Be>haY?vWF7Gv zZ~}f8Xa|@FT%KtLy!xrMoj`o$#>tChs}BY&w5}am^C3v*+jQc(H?K@rI;M_NAi3ig zD0HE8bmCt99B!Ls-NAi44eZ5@ZkPAJQs6zRSo8mq$%A`#VcXK^?(iobkxej=>lQub z<6>8js#X&qMwCZ5*@w0ZHA@vYER6dMY)?S#!@7$wmCn>Cz~cA`}+rvg* zl;PUMcay}yjA{4ejGj6bJ*o$j6Mj#mZ(X}Z(0}{-6bFjlou-V(%@H|~y@&P#peF;j zdwqB}cbg>R7|kT|)U0(|tP)LxXlRnWyJznbV8@2@X!5BR9m8K9>V;`->Y4gHKn~Zr zCzcM1A*~^@3sK97C_3&YOXCT9)P`B%`u-`J;XT@l>e@>}>AmN*hvP;?CqGS%`FL?} z%dirK_wl#T_N^KOxw_M2FmRkZ zfL$k9$nQ*QKATmGjR{qxJKWy^aN8k@41SysIOP8r`~!i~v+Lu?3NU2`^;O}sh#H`T zj-k+9tM}D6Ump_~c&D;hiDNM?DyVvSoZOGylsuLqh1?fz&odhujJKol1<_HyAS%6A zg)?n~6$Tqhp|yq_c~$M_g84sBHP1w?Ge{=%v1Ar;2|v^ZDKLI~VUx=1(3%0jOcQci z=F%huM~(t}MEOQ)kW<<1!rCzbM~B6{`};3_x^4Qw%+BIY4=WwV*h_OB2a51o06TB6 zu^pUkm@KmM0)T}TR7*E4zPd($%><%N*bEn_S4$UT_CS7nGKw?CVvbi+oX{I zGT_8>U?3)U5Zj-yh_)0K_u8Nx@42noGjNAO8rHn$Y>srSenK&{vba`Ppq=>>fXmcu z1miOCpJ$#7WUAu_bkgv68V}_3JzLo)zcBIHl3fEq8dmE2`#n=bgOfqCI@4^PlaqZ=qF?_Wr6-h zLN~3aLoE}DK$3usQvwI+T;uUaSEW}xT~|!O0OI^WTe~epUI@1cT5ec9w3zkT{?gYZ z7Xe~3e%qwR0z}J}@l|>c*PH$L<-;MVq~Y2x=slOT%rn94kCQuF&CdH?70SOY%AjES zAw27?vIa3zqb{ zby72j3nx8ZXVqKnLqZK)9(D*Wab+ju7~$=jb5!M$H@7>QV>J|Rt$R-FKU4FfMzBpI z#3ZJtbJ#4k)VzgnJ|mI4pX_qQUF#T0qWEkQP(;|#IfCgvf6`*IhXv*V@5qEnuOH+@ zV(y%x^C_1P&Tqx-ZS(1I8$QbosVUhrt~wRpl z$P!B5dt3Qr-8_`#YMICeG8ZySy|C%UXG^Ylu~mVZQ#vtbxfu?4#nu_?BX?BO5l%P# zS%iaBGak@uYpmTK*7QNo8t9WaHrU6Z+1E8}F5iBW=2`QZ)MD{SI|9}uB+5kJa@^Ke ze$DCyug7LI=ynS*udS#pOBqInhQ@qN|=o zwNWz7Z(Fp^4tY5F3{AvCuDF%%zI`&!DlL^YXb;-{F502>s?K9DbFjBx##_xv;P$L- zj?1K#o^{P)Yo^G?PDH<-Ux7DP?lxzJV~HS7QVV06-(20@o9%8d7g3`aIB}UVtrqA4 zAHJ-Pb8&j8v4=kZfaR8Z9$eF7(07da##J*VMzu@P;^5r48}i9tEt#0tbA7n|5TCcEB96dKcUV%L;vYsJJ44J?jljJ@46<`hka#C71nnJ8n zNEJ7&#@UQ^463e7oCoi>zi~EAkfgt)8n%BD3**`kunsadp5{qB^)fm~TWn2xVPE(b zXO{3o>5i9QhYR*EJj`zTu$|ZtKg3TwVHr12Za=2uxE&xp)v^I?+)958bXa8oXmapm z=2QZW*ZyTc0L)|?Pk>gP(_&3?g7il&SUEcl;BQ0>xf4U2NFmZG+!x2MGVdoeo$p2X z$ToXF-&>3WFo`qq@ss`)T(SNp$%^#ghmDxWR+?^K-=AJU$90^?7S+InG>)}e6CQE( z7NqI?Iou;JzkgC%AK1L|$)LiT8MPGZT>f&-l}3#R*x?7chfuzoXA2MG>PAtiS~hiC zDN+Cv!qm-nF464%Nr*K-x`|opLy{b%_xneB+Om5F*^@;`g`1wQ z1K!Q}^P|ocK}Zw(q_l%==e>_TQI9P#`>hu}1^}c_HVncSqh2%P?@rk`e>#~-rx^8w zA@)!3L7I24inxuaZhP;xpW#)n*P&53&i)7bM|r~@DK|8FH{X4!HBLdRX>!-hCJmWi zfPk3u-D*m&=~0((@gM@ffp#Pw;bq{rxP(++vGCewlty0Fr>bx7Nay|339b| zQO@?j+(=-u(;K=S|ME%7k@a$XWgl0KoVs#Gr;f+JF$`iix@mo%b# z2hp_1@b-_3`CkC2AouxxJ!EtusM?%3&;GcGD4-AAr&*U z^Cb@!Dhp=EQn2T)QE)4M$&YVuG-6!gIm$mjDBc!uww>8D$*|=oGj~Tzn#xFYVXJ!D z8Q+5N*i|rot`Dt#)U?xf8S6jR|Bi4@<8?@?*V`5D4#`fls)gpI~Duuy5WAb1+hvH{R^o+m_WLO1~OTUy++|G6VbK+oSo<-A~ zU^jDF$xBwAD_5Ajlpe`G6Pm5uoAu8&Si?0f4HQq^px>@tJSv^_nhy1jb{%NdRzI~m z9)oF=R0-D1df6Xi(2)0BB#2$<e9ZX;|t5rR1Sad6o;Y3 zkD9hHVzQi`dJr<>!ah{C;1IPy(FMWyL8_EPoXZW42b>{^vn}H_VxwA9u&KjLvml(8zDbPZMgIfg)j4ZBF_X;}#n*Nl_do0Qo@O9q(zW(F6g4)=n zNtrXka+TFxN_|g9U?i_4VRHjG%EUT5tN>ma$8WRFs1}oXsZ9Pd()|)}?tJrM?GSQ$ z-?f(c{)(a7&Yz*v6ZWFfzb3)i(i`)rM-D4)onr!i@aZE0%&nFu48tdVR(f~=6ieOYXvh_`yKGRLkOro2)i2_ zr#fTc2{}*Qb-J^jWnZzBpr1A#Ir|}!#9}ItD0Q6m!qUP?`~slgnpwXwb45%ci)nJz zlVhdQOd!1IxfbvW_>|npfoehgLd2IZ5#X{h)H%d~g_b4#n|>f&HeD5i@h;t7R)ZTh z%k7Wtt#zB>rjVwtBh35(B9z;tA?WAH^26J*AP3cZB#B{6GTkNtFT=_rDDB^e12$RD ze{qxSrWN_oJ88ksi_y~6KZaMNz^6fcIUuN)q46}Tb}5Oarp&6c@2t| z#)x#>P?)Z=r>LIwI&%RSQ^5=vm&{K~@`PlVXvotasjAb}hp9MT^KBdQwMFA&!&|}# ze*uT80qgl;c>+Sjv}5!VOmacevH3)IGE+Oi=V@5Z<^4ZNOMGT|PawgmK7HzGCVhZ3 zZ3-D$h1`Mq>N{bK2HvEJ?Bc7of{8X3kVgTVznxien?sx$e~pz0yjNQR%kG|d`~ z(584@`&|NesK)8e8!DwTClkE$mbcDPN5~qtr`%1C&C4rdl82-&B+`6VsOC2-25(PW zhft#`(9PyehvOO;vBjDe4ET5{C%O&7;GGqt=R*BU`uIxV0=9n)PKT!hT+qk|t)8HL zPiSn6+s~ne7c|#TU1&tA-r`*Jx|{qN&JTb46RtQn9)FPs9mD~AO(KiZN?9=HnyR*^ zTlJQYujxV;6VgV1s9sEu|0py%!BGKieW3F3G!Qv%m9Ic)f(G9$LXwK*byr;+O}8vGd1f!Vp5&&3 z=uFl$zYxEwdkqlOb;3|9?t#6g$;6Qr0!%b`pAQa_fNRNR-@Abx7%a8`btUJ8ilfr> zca9gfIlClj!TmAw&3+7Dp_7!i7y%kE##6}sXT(GpP^-8$>ldv0+}VLr&@S)(lNkUM z@6vv_*sgBD4tFcAkh?(*5UAm7)ghS&z&S3+HTfmKMR-J5vmn?v-|>mdHm>kF@^t46 zaUs;;gYj9lQOxa(d_y2{|9n3az8tP|c79p}v%lb1$#hvxNMsFcmVI+G^ezp)QV97_ zy11|eK#5sj5L`i|2}z2Qw_I8Lm>x|2oUXQuTB%OY)Q`C*Phgkl4!L!+!^i{48PTQ| zJE;r`Mdq%|WB4^7X7b~2JXJi4CR;1Qwf@P5`k!To=Wp>E_j~rulgV^@t8?@c!-onZ zHzv&2cV;n&p>50y%*@VlB~2kY4zL;4cBcBfD**kxPP%OEGy2K%wWqhNsc45(x@d(9 zW0jOAhjxw3#fo8Z13mKwQ4;3J=Wma)9|0C%+HzZ8FVHTZ ztmUGBm(M=i2Ei?wza~H}vXzJz(J6xz%P}^seN1y~XSc|9bjmz~NBO%0{0P ztuNOs&!m&8b=*}y_*tcF6Q~ema3k@ZOsW^ze4uLq)hu`Y&iDJZp4}bcSB%c6bGI8& z7$v24#di~Tfu(A1{o2-bGCyfSeD=7xx|$TBgYa{{6p0pjD@d)4DNTT{Ls(PtxMEvb{~zHUC9tg#DVXn%6Q` zOb-FoAGY7mYy&gn&R+kmI9-peT4T!5no|!@YVp&PTL&YnQVH>}2m1%&pb(9g;xwpx zqNhelfo=L(q8(hcc=Tdgsjde#LqwnEr8YOzJ^~%2g;8E5MVSXfi&hH zrxPEuvXYp7xL;=?Qu` z@pN78o2^{WX10yJ%<21IV&*9<*&y-^ry5#c9U#gp6mGVg-> z-9$hrQO^2q-+rk)At9tWYmdj>Z44!S)NP*HFXkEior9-|65~0d<`W8pNtyFDinQtd zDzt>nK>yFl%*NNA4rzOR+HF6+$d}7kFFs4HdDp@C{T)=(d#IO)hk2h6AB#EZQXgVL(RS4dq=U`@}Sq{W$D$H#N4(J~#S{pZt&^5b_( z)mI^LCVi2{P5j9aO2TZY%4br?oV_k7kuD$Ps4t32V$RpNNlrpn)mVBEo@vqJzL>ki#wI z*5&yM)v8iA!PkS)6v}UV@3bYCM^2U;ev;dBkSob=W?F~-#svUy4|@SS{aP*z+P%i{ zoLi(&N}+29hkVUOi#=RDE58_KK690?R7YZiKe;FpYAX|%#7hYwdV&gpB#&uW+c|ZW z)y1oWy`&UwsRhV6kIE6H#%m>lpvuqTv#_rC7PrEw!O2oWqx9kV$CL<>2{VelNXWOe zGoh<1xrTf8tN|-)cL7t+>z9#JD0zzAdu%*0{`N_ZLvvB@lU<5F9Kfo0 zmn}h#g}B!jLtH`qZ>8#8zOt)ebqxr7L%%f1`C{oMqQKV~*F=GxpKK$O*r6t=8lDRi&p=eCg43&} z1^|4xXIf7hIb`&iKX8GE0r(E^Yo9nsf_XOpBPp-tjFhNaB%Z(@T$_6j8 zbv5x!ne3-z1{^JV0Z-&{1f$a$(*)FVgO!mJD`E zC`oX=m*&;erG%)W>2}*rJ=K$K;-aQ&M|B08vommo!EkJn2#@;2J&(dJb34^KRQ70# z!kVCc;AnbwIrNhn{egNU0}$c4`3H;cqqY_=fu1nESAx#8cPZ-nPx9VIv=hQ5p1Sh@)^z2jln=qKTt5-E2{mdjqAhLDVU?{ zKoxvpK*|=FwjG<-m_R*L5_(Q=jBAA3r~F}8SDj~Sizvb6Rr&X%2ouVFU#$te`oW*j zo4_h}wN54DtoD@S-CyEcU$r++We%d={?9#)+Jm)=nRw(+|565JEMS_U4@W#9_QR29 zT$=d0?kv`yu21w97SVbnY)ladZYPuXx2cjRoHK9I%7BFm02B@KJ3C#sf?c@>k@g`< zU{xe40MMk-Iurx@(ieJza8B+4!M))+(|g-Ps2}H3f5XSR8lTIu8t%R#VpXLrO{Z5+ z4`e*X2A{H>8`c<{c2|CR!)+!GQG7cp8@pUi$2INJLATK_uGx>@)#Q?D&$?purZr&l z{Ou_{8Pr7Rw};P(hcCmIOA087^To{|_MVAKD_?ufjG3}1&SJ%Q_y6_#_GJ6+PgG=o zNgR>2Hd@N9>mBfg7U6ERD--3_vK&`ySaquKq4| zDL0*bKf1U4MVTmyE9$^~S@O&b4f6FTsR_ttdl17-A;XVIYMl+7OUhq`JK8nUKH_Rx z7HW+fOhMjK{l>in0sG)%9?DKRl#gE?M*=JE(iSI{Rw%WH21yOV{HrUZ8qv_0aCw8{ zc(Km3hoQWQFF9~o&iK!B_e~0|51+v2!^`oZBdd-HDXO*Qv-MV;#lGkWz1eR==P|0CI3+0o%Hyji|Jc1w?^4lysqHH%R1bR*;UJ_9L;|A z5-s$Z+B4&wqfx5z`LR=Z>2F4cJtccAb0-Pbw4iZe)xbzIbE}heOd@FPsLTDKKd_?{=cS3DsDFopb{09n)~j3vRhHVx~TWe z>GVzDeJj8dHYor4_5rjQ{15-Kq?)Z-CaxF^T}*}+zApg2UxTF9ru*bs02%_93CTYf zD@d2ZSI*u~?>{Re<^GR`ct4Blwp<6UabsaJN_b^}e=I|J;-@F00chBYU@2qZ07h%b zOvZmQRN(k^xOdvCAmKdDg*{jlFBC$u_DJ;x=*zQ5`NVaY+Ff-5qO^Xdk>%Ms(D}7X ze3?p#mj9TbWZruVBy?(B{u|!WaX8cX;rK1L{o+r-!D3&EBz{%`tnrtkpTvVOi%)=41_dMFzVvbN|>~ZeaFi>-rnLK116=to z7#bu(9MYza2tQ}Ak^nZbgo>iS?la&!ewGCej#JTw%m%XS1o3?WvtFMG^+XUL8Xxh)QK zF9M_9i82k*jc5E82fym4(*M+q5(>Dpqps(csRoHk2DVq5(fROZtmchP&e@pkA2k|d zR^-0Sw<{Mz0@1FQ_(cBPUH`I^9XPmNvV?_)EF~YqBgu|Y+h3V_i4{bjxsc(xnz5;f z)Av&8r=xjjaUsyC&Wnhup|{y_-=F-^NvVG^=c#@?g|`@H)1UkC)DOG}9N)FpcWQt( zRg3F8QIvNhx`ihAzCrKYZ9ch$r`Lzx`%-wJ2si-Cz%xEv{-L2CY{2U?=g9!p_2}=7 zZ(f((LhS)asv_$$u<>b41;eNixy2<6X|Mukal8Wx3{g-smDSv#Srulvk3AhMA;6?< zRvN>(y~b+Zeb*Y9fRbS+DPYWr?XLsRau(Nx0WHs}{g$t_eai(PR_iO(XgaT-pZm7A z0raNcrPMJR+_SIP>iDE(Qd9{9s5w29M7G^Y<_%}DrwuJe-<*48#49tFB}|Jt64yrFr^)@;+W?7N^BLHO-6!lTuk;#@#9Ntxko`hgLS z@u^@8D?Ogdj_pA0^XE|0mo?bIEzTe({OKc$Hu8Cy^z2c0Rp@D}q!$cS&rE9F+RZTz z=3|cytoGK~4MfKC&rBw)J6!6+s(bEp>C5oHWAf{cF&W96E<~~6Wn_a`aDnO0ag_cx z1Oj7Y7(lXu@o(Qf2(g3WxcBhZLyY7ZV$=2CE{1{IY?U&){$lyL6eBSz+7og%4_q6m zazG>XgVqAh0aC~kG8qL&9H+)eZW{9cZA#aa@r2VWb{TyS_w1KllJk~3XdxXluV>na z+d<4a7HkmFa-V-9oBOl?)uw|9>%_>$>*Mf-9U|cQrBtI`1(@*A^nDa5tpe}H#}*z> zwSPPLuQ{89N5u9;KrpDU*Os5!H=vCQU_Mhesop-+q#Y{0;7>4P!XPGWwVaBw$~9N3fDwbmKDu6E?i#3%F(I{pHtM5O=!l-$g_> zS@^f{skxvI4UBDV;_-WNIZ}3^Cf$iSp*pN9&yNFLh8)-23PD)cWNK|vft(54kaUVpgv>bek>RV2rQQ5SejW5AIJ4k zaX#F@SUU|$Vh*|eNO7d9o)EfRpYk#Gp@qN>hTr1)HNez*D!)@Mr@oti{MrNt6JSbz z@UH60kzTh;kS@Y&gRttgQg1gYo=ETk@F;$5tu^8dzYvGDYzzB2M6_m($K%HW+XX1X z5yAT!7xeb6q~&PQ6&uz1Wm1;%G6+rdFp|Spt!(DKs=Ll?sAMP5Mn{S7{<@E8? zw;aPp?;Ud3lX-u49?1LsRce}f_eycMyCW)<+$piN1s`IgnQ|~Fpor;{KK#Q(UyBYP z;N6oowgowMz2l+}-rFUs)_EJtr#;@ZVuv;Xka@JQvp|yC4tYc3sIZ4FG^c2M^0yo1 z*0RgUXlZR}mbRBK=~8v;c5lVlMAWYb;8tp!L1WD>QpYSKZ@3J2oI!F=|9PXt8%ka~ zGM}fJlb3vGivq&y^P5F$1aG)1v1s4PXHmcB_gwY!LPK0Dc+M&)io&deur?8*QO|bB zUJ9JKH_@jZ?D4e(;%uET*YqiQHt?18yi(CCdm>?R??K3YahATPTDyRsj^^b{`4=6+ F{|}5kfO`M{ literal 0 HcmV?d00001 diff --git a/docs/guide/letsencrypt_flow.png b/docs/guide/letsencrypt_flow.png new file mode 100644 index 0000000000000000000000000000000000000000..f35a141039a1204aa5680bbb907491dd55ad708d GIT binary patch literal 90792 zcmeFZcRbbq`#;VxONGcPBa$+cBJ&&(GDBuogpj>5kCVy@DP&~y$|@^+ud=d3WQ*** zInMY#o_aOkpZ8zCe|~?wZ?{*k8^`lJ#;e04>KJmARst;T|riZfPe%? zKtRMsP6|G8Vs`#SKtRlBB_nggMn+!7{+_*)ro(MBb9r++b0;e^4f(4C1Xn^swG8i` z(xR2BaJJ+))1i6q(j%7)_)*TI&$TbTDlA8ij0oFBUl$%^pS`oa+!6D!PF~^j$nn)>$7Z|&Xx!w>IFPYznb-baF+57 zwX*oeJJAzJqNQma_<}0DtzlR0$iCpaKI3E+>xH+oQ~#L8yN)F1Ijr2qCE3>9eY9cw zA-yv;N$TOsZG(ND%bYV`)$5hsK1-6rkWbjy`V^lUABv8TpoWt^9UhI#O`~3sHOLww zdt059l3f&h+IQxm@CNS$Utp)mOp9jSm&~`%8(!AA)t6k>Utbp|t_>=`6}Q&pv~R1K zxIQyAe>twz%R?4}uJlmY_HjxQU7GWr-FO(yM}|p#5$S)T-aioeN=|xvW&)QttVl(~ zXw5Wr9m)Uf^|4z5lZLiwvzVU#=c$ruvgmwL!kJ`Gr08 zh7Nk~B=1MUnJ8rS(Sk!H=VC4dR2^2)C8EA_4naCb_vP*z|6`;VX|rh=gN{@Wc7)?aLUxZ}36<^Vr}Am;tf0Rr-n(WbA<~Dfd?KWg7JqF-w6G%C1TVy`Y@pen z#e1yyF-Pn9eGDH+4CQh%X&eAw%*fbR1KBQ z+VtNHDCDi;@;QfXtZMgZ@oD$z@ToI|MToa1Ojdv8cVKlkZrrxsVA3D+qVr%Vv7C#x z$x=Mcaqk2Rozr{auj;oiu5%qBH@m9-1r?pWuuqpHkSNJ~%#3w9eczy(Uz;#!KM%I4 zg45EvIp+|0o>|*oI*D?&!Gr*}x2j$IZ%NcQpeBZS_x* ztc-4kZ^mD&prpCQFaL7?NNqKIZE>G}BCL=g`jj84iNlVevxRJw6eB{`c4ZT_{-=2E%jC zE32ITKNlwif6e*NJ->f)gPjnCKkk~A_Fqm$h(h-Nw?mza@T)}ziD_#7b(0+zBy0cKN^DhFI|o+p6(D9lf;wLC5bqv#qW${oiyZgmE6H z+7(c@v_Aafkbhl{YS;bfe<20x_#tx4iwvR5kN*13kG;q-6{P=#6o??eK&9T;xU2t# zAb`C@|AiF)WAC4Q{vUgPD#rh`_fIwcpZ5N+MO6PY&VO3z{~70>ZtZ`*_fIGMKi~T^ zYWZJi_0zW={P6!Faz;^;lj@k%KT}GJh-!MqA)glUs3}AJb$4@%*_#$i4sOavhyjvB ziQV0t{bXe@McMN0YVLKG`{B1^Ko(tIn zof^9J%mkG1%^RMT7s;t;E^&nTcT2&P+G2#4qPE}IE%aV`fNfPxh&y`|mt4?hd>VJasAKf*m~;qbgdc-h05Wn%WT1JF6vNz`q&+G|z5|j7R2L`>+F?7}1E?q8PpbM&#ef+qTV;n`s zq9CA6N)p*eN42|RhJW%Fb2?VoR$t$4x{bD^xpKXQ$0~?3dUjHMQPmHDBw_C#uG$%X zj*SSaboo6mS`xUyv&x!%9Al5KW`06fuZ*1&vbaNYadJ1J#qzV3PCJWDPMHN=3FDIu zv9l;u+EVm1|X2_0KrwZXv=Qg}>6dR5B8cEkVBSoV&lo zb-}YOhF4hV^|NxWmR`lPkF$#hxozLok?NHvZremK(cBe4#oJel8hWoa%mwr0G;>$K zdv+#B?k9 zJCFvP`DG7G-jU3c;3kJ3;QdF4vVAbk)farwo4os1?-(*@G7@}oYUS(M9UQD%(H0+wABUe5-_CfRD(9#)U>(@&dczyt&TOo*LQNu2dzJon3mt$jwV3S(Fa~jY<$8T&*HKB zl`i+q{4Y^pdrGl+c9nJiFmA?l$~O$38*d!c{v`7Vd4qlk_3D^adO|m_6s;o;*&JPI zFz4yE>nH6>Z@cMq@RbZdJFDfZJsFnJ)Q(~8W;wAduo|9COwn)PwZwZ@clV1DjCfGl zWG?lEYO8YWv#ID1M(N+lkj>XMREsQRe}EUL-Zk}XX~Xv|_xkqnFMl)g3B8=(E0wAB z0KdOjpr&t?8in?_WXyp`)`LX^o%Xdb8wmwka`u*kxKVt}Ko*NGMgoSeuWS(B%>r4<4h`(I9 z)_5}AXJfGN^Eq#al$^H!HJr{{x z=vl5ux!90J{5`IADb(iNo=K?^Ib)ebpvlREPYsJtT&7j)1=^( zbu5>P?S>Jm^mWtwi&IO+d>lV?>Mjz6BgVb{`~bf+w)5$M$Hq+hT-5bj>c_5(``sfP zTMR`43w{gLT>tK~_N~ybrMO5>v!=uWU-$bdQX;HAr4%x^RP zh6bUW4rNJLx)nF-l<8~X`s$C2lcNQ6J}%*-a=kf283g~qg!Wy9F`s+aqbiqWt2dcK9dTw{IRoT;rHs}0my0@W<8vgC+63Z#1)yjeW6c{6rq9$_B$b#>f8708tWR zd_b}B#z*a2c4g-JWzT~3fU86Bua@^$@$(h+YFO=^&{p`sLsJ^ncqmeA75>k~2NE`W zQzz4>6o9?bHbFmOOtp=F{JSs6$E^3XowG%!Uby}K;k~o=ZL^^SNnlgdKZq2|aOt{; zsD_$U3!auE7}qyM_%$*~ekSP7l7}GEd&#joCDE-Q;ini9poHmSgxqsnj;k!!NMN0a zx6_nkyT28PGZVs=*r@y;=>6(YY_H@w(`7LRc!jZbufr zdnb?dm(9_=wpwFFU-!*C`ubX6P^|dkJDHx21zBS9!WW1X<1lZ0-mySEeCR?&Hz{s1 zyajk)Ca#0q8kv}yyLiO5AS1bdLxdx5Ay#WdvGFt8;eMkakBD>Dump~SkHpl$VNbDT zTVbfWS3{}9{G@(%cj}WT;VhDa_wd>k-Ms-o!|H7_!X?%f;(Og3-imfyZSp5t$oRQ7 zA175E{v4&$DnjXVCU4Zoo;MwdlH#@%OU33^(QZ_17ZhMhym_*NAoA@A_uYJb$)|DC zW#6{sXm_mG%)8Vh7y)rt>)?=U5J!zk8g=LrgKw#`E{%^PpS-)ZlANm(hCTEHdl*z_ zdw{2wybtd3b>v0o_LxPyEtBs(vNMR}m#}!-V!d+AdTT)=kXoO)GRq`BOPToIw;E|^ z>UY-NpC>O7D{^1>$X>&_q}R^=!z!}9EA=Bm(u*+8aw+juxddO=YSmiK7&Y-fO6qxK zj0=(MPuypW#AX%IaM#F0q7Fs!ufjq2cqwiD25@>^#;EI22KZ&B`P{^nU*dHaPAG~$ zIb@l4!!oigH%-%e@e^|>0qP<}R$e2Mo^oSK8*0aC4u+TWC-qgo0z;|NGl)8++!RUh zr3PR97q3P%9U{kx}gzhyWEv6=M+lq^yNI^Nv-F{F8I$(de`- zGqBooBkW}NC21~pL~Tp!^vp<9$Rh@=Z^!OP3T`GExO{g zFYkTUVi6Ly*>Hr^U4}O?Q8!F4dd~|K9i zE>J;c)lvv&_R1j@2}?bM5h(lCj61YT2e>{@{scJr4(<3~OHj=cqj+~~7V{IjdI^gJ zEim$oa9@mH*ktR$or=DBy@98h?ch{fLsL_|ET zZIPr(jmaJA@faUFGE%NAe4MRI3AJnZ3~w|yrL(9_1+DT?-2y|8{tW2CbmoHTbrhad zUF^sYnIu3J)5mb_8kbPYSI6~_J;DE2L|5nx-EL?=w>zpkL+?`BO2WZ8crTcK{_>ObOdtdB!Op6FEQzlJ%)Wm>N%Uy&`iXh;@W8 zxu}zVvsAxMpm^>n2|iu<{OwlsSW3*5%ig=Iv~!}6Ydmt3EolvIz%=cv9(~rM=q4OH zk2D6_L`Eot^CE!qJEKFqdG*K7JB{!cv|Sw7FbkKAW&yd$=4OxD&iual@Oc=lCta;m zBGG&OBkfrVI$i~Vs!a{X_)HaO{~H=8kIbAnqcR+Nv-tz%i^4}$_S(c44$6{b++TsY!3l-e#b zCIPv>5a#(Hu0#a@J2;9hWEJGs?wE=fj+1=~(=qbkl-wD14*9&iI~ksf>%=ulZRgny z74{o>r82t@3%W8JNTO$7_khGl^IrII;q*k$dD9uIDay0J=o~dERAi3{R>$BXV}Q0Y#HM}0kl(F7tQ;@Q$L!R6+A2y* z`_2C6n7Z}ol1k31wNQKInM5x;v8BQueb2`I#*_9hNAb9wbdGR|e2WWL2A|J-eZq(= zYKb4iOc!o+i1!ahE`xmHl3C*u+PaN%ke;?<_18XRY=C~6*rUNO?@t8rX*TR8r4AQ} zL zK}8RfoW45&(X(fwfQ&5>uZsr)2{!JJI=ebnGrbRKGo-}Hc`zu46kDw&qCccB&mA9c zSAKp3iO0^ZHLI{8r(SLg|Tm0UX*q1C)4Ts z#5ghy_6&UDPG!1hAe5mS`tD5)j1W;L`)+=c&m0<@Yk&-R-BX5)(%FAP^o!>OPfBQT zyXs5;Ct&C{F#n|eivVdYu=oA6R+npbs#aWgw2~+z5J#9AO$;zBLA5nk{c5`o->Jj5 zjA#H?-`u5LKX9OSh-;vfZvMlJ+f8%J;9v3x= zU3O1DD*T0Qza#pVU2Lkp=-#*InN4@2=p{$0sJ1G{A5i2II9B>@4_HoGMO*||=S48G z%5lfyF`am2fy&kQ?ECd5h+!<%6yUMJom-jJK}ClLzb-!nOclT`TEZ711eb@un1=9m z4~=dY-#hwv_iBm-dT0z6ol}jTXQ{Cdm_C$M*sb_E>CJS)=U8AvxGvOO%s~2&BPH2~ z4mffhv@@tYfI{hmUx#5dXO$JmPB|coB6dc-?9pwOQkw3Mw?GJyP>uean!jYKCiJ=CgnUeX&$K-78N6}3){W?x(nq(NoP^CiSFP%HC z<9D0j-Tl?uMf1!A;!B0kYw=yKAwo24U2-sLOilH146?TkFFb!)ixY%Cd&JgU#(c)& zMS=eYl^{usFcY^>ln-)|O2TcXFOXt4T4eQ;bZJGp38pyLaWC>{={JhVtrm71ju>I* zb!y&fyMTN|u%hzqR}#pbP-N*J!m06R#cLq#zZ+4+BuU$@edQaDf8$K%vs7h!B7=8d zTW5A&nS4@cQuknfKhdbZZATZe<x z0j%B^`S;k?lghk3G|PS^#`*65C3QNEQr?7h+c z`K_$D*{gBS1>*XS(0e+iS)~)fdiX|<*HWKEJc4WlJLvmObV@3bremW9N**;0xP}F> zQFomQ{UBV3?-Lt-^gG>}{BKK5~^iZFX*OK*93HWVdUi@Y{JXKRwl=GGV8@_V))E>HKGgZKx z75j|~em!Cz8^{sXOn0(lCxmeizFS9`|0sB)bA4tqOq?GXJ2h7`6@4pzXAHMp(o^a< zncvTf?h43nIQjylPm5uX=jn}nsIANJh6C|M8Tg0yhz;BWXmoL*$9B!J(xMg498dtv9Yt2fx6GC8pq@~_OwZGPB zc#3+9FIqSKG>bY8w^NfY;JX&m&_1w0eqklAiNy*;=+|G%ha27TB7T>fnG!(WN=`uO z8dMi)`liTDrn)L(DVXrFt{I#6hVH#@lsAh=J&+dairrB(v4}w~q~)7P+ta^QfFs6yNGhRODss z0BE&F;pE0jeS%D$?q;lIV%Zlq#OSj3*nJ$h+Dpj^R}3b}XM12x<21q~kc52~`JpI^ z1j$-7zCz{xsD}m^^GVQzBc9WGIp({D0%)0KHgs~-ZOAr+B+$#bS{Jh;4@awqSJ+kW zP3xIRP~LFEUM0TH!N%ebGsdI)y`lu*u(V7q7)`@*q1c7?EBwnti~WKWm}LKBig__k z?wqhD>dKLO1?}B4Qafc05AZ^k9m?`79(RN_D=N(DX9Ppg+4ep*!F35=Y6RS!dc~F( z2MP+xoAzv?$hmWY1=O>{lCw0iY52-t%f^!J7i{$#*LVh#v3RpdZnOz~-f5r*s_?~o zUNfl$hHh|vD6?D7{wWEMnJJKW^b#^^axGe^-cS2Ssehw!5v%6A{(P>a25T#})vKwQ z{iw=J0#(qy#LmF7u$QuBw zewgs~`oB8hOu~NRy3umwMhC|HLsAK_;=FfXvhK#&;+M!foXu6H+6B^!fxdj$7%NzM7J#Jy|z= zABmnzw;T&XvcEgI6et~yS$ zLQ?`RSX)F6r-cvYwV-OxB^%>gl3P(a26{e*-k+M?t_{ZP(uyZtXC>m}O7eCR_B-JY z#Cxy9y;`WEaT0B35#$@EY1pFY^gRaM z(mZzo+h!@g8$f_cqmW%S9!yFr5I+t4h?qWLBeFm-4%_(x}EdAvP_VZi$l~#OKyb9P{zv09Bjd$PEza3jZvs0qw*#x_fUbBwB ztJx+T7TZF7BV0q-*?H@;PC6OO{p5vL*45!Aw!TlupGpjIY2Sn9!D^rk-+Q;F;QIR`@-!23axDjt}^olyV%Sh?c5Dvv87#``CeUopT=fNqd8nPjyTm5$Qr%>mJY{S;oKr-b?WlWMG7qm zrQ10*I5aJhy5QZs7L#Ze!W<@O$*w2P5!f=4)IR@mg0Kc@#N z${W9zc$t=!Z$PVNuYl&uumz{1|H6n&=cx*uLpWh>{_1WZ)U!rhf_2w`7{)ryFR0ch zCGeyF5R}G7ALoL#j6?xneLrGgM{uJimm<>5eNhg{@bEI1n{Io)QHrMx_ zAiMyuQlf)=!v~~$P|rkHT*qkSN0KSS*L8W7LAtx{T!io$fe52Bl^F zV0*l~*kRa)3d0Hh=au!2Aw%n(lcJapQhnorwCWFV*O+q;6{aJ6n{-oVzPH`hnoz{7 z!N0r@Q~M4SMd~Np5QtD76VCrc9obICz|O!hXYR=zarKM^cIcMm{$TO3h4`LqNJ5MoDw^qy z|JSo0Lg^kU)@=j`gZI$We!(6|0qQGPCIUIuOGcz-%D$Nekc&0_Q(ClqJTP{{-}S}S zlBk85$IYRMFDA0)Fa{m_}$3AU}4y*s65L~iFuaU8Zc9iRSF@qwPqD;UcP4~RHCJMK@}(dXE#dFoo+ zYlfG5fUXJITU1tA>DEUq<))B8{<-TeFeP#!Vk|%-pkLKtoe~?$ zboK3;k;(uj`?@4RdX$4%sk+qGUX7N5sHts2pAZ+LXJms&E@`rx_HEdr^`v3Q(J3Jx zm-j`NR8H~vY-hO->kpac4Vp3~Xz3Uh_1U??Lu^cvQFwi>K$G4C_t98YiM4v(=2QO1 z9r;#@`n|YCr_#*{=k>=zIuEYec(q9~s&tZrG-o}jDsh{&d7I(XN2Qeb7ERTuq^-^2 z%L$!OShYB_w)%lmt)cFHsjBHXCyTmil^?^qx$~S_YBMFEPW6h0YLivW%Mw^gL^=gI z#vOek8T&mbsr4gapV>h#_0^LUd`NZ1%(y|Iqq3QZGKC z753o+pkMA@)G{4icG(6M@=Hc}qw<#jF|l*ER!eN2vqx!s(+IzL^%JGw0@}uSUq9L| zeWAL_$9S=Z;m(}ZS)c4lVGeAo0CKOD)?e1%XW>=>cm37cJl!pinFO6o#SnuRp?7rz zT{YLjqT;xaTOSk*d!N)I^C;~>FnO)dCqt*k`&M=Z4ZC?)fYj!zdl3^V!UJb0wr9>) zt*^Al3UM*JeYrhQPL(Ho%^(S6BugA@vtv>|scGP*=U#(`7)CFxe}P3*+Tr@Ei5}xf z;3V3ecBiHHN43wuLMm3Oy_ke7l((aF3AQ6ORX-Zh_xRK+&q6U@mE^rQ+dMhf-Yw?0 zZdcgi*I4m+3SDrghNdHn%~dQOUpPh-^Jx968#=xA#k!AQGm%)n7}x})JRSf_Z$^Eq z-t1#RM$-1x|_xcm`?Ysv6s*`P75ymJmlqnl|W<_j7wBIM6kW&lFh zM@_Y9pHO{+1_-jtTffwA;zct>KlD1L7LHpBpU-1=r~xHmt+Ac$Ys2TCl3^#s=?z`f zw=yZEc1^ZiQy3WUvXy(j6ZTR!cv>QIK8pP*I>)_=#}?GG^u&_xvkJZCc;RW}yXw!H z0`YUMTS=fDhBx%b8PeB=nr#-8(!i`7FPkqZRBadDV^W;`KxmO&W-5@{S_@LJErK4PN zSd+ZedIYUI{kN&hxgILGZXrS#A3tF61apcfVDT&!sv4i96i|Ej>~|FgiV1_dtk9FJ zc-;}7PRX4%Sleex&->KUO}nZ_P>GtCJfSB1(rH1VD~fFfMz%$t*HuQOn(lo)2>gjy zcM-t@@3$Vrjj*IN2-Et{6j*i|oe}ZOewzaj7AvT(#|ly>p#bvwws?_T>otIZraN~^ zmh|alCb=G6-{x9nCF3Ct^D2;&F`!98_ojFI?k~M4dZj&HrT>w%hI|J4`sde;e!UsBI~(<;0g}ri;MUpl*p` zKl`ls8xp=pjx=^ofvz$`_iVq_-M*4)t?WL1o9rkoMhCw702Hce@9$0NczAldq+y5fTU39YG>!oeH^m z*@B;bh2TC^e$6I%*Q{zdS_!7~+MFIYl@mrtrCixBYt5Lz%t-ttEplA_bUd zO8jqFEM^kiRY3(LvpRRnG=RBSjoJjtHhUI~cFJ$IA&qVDa!5Da-b^}!UrTFRyj`5p z{$T&KeP$=f^0G1=uv`6>O<@S1EWs|rplXl(p|R1C>BWsER%B(l6`nupV6bkWzGd?$m$Hl9m=DVHZmRO_vP=`H482LBzJO(wck6e?rmlL zUWHMvM}le6a!wUyKk*P1gEwmTmiWCd$FFxGSF;mP5KTRy2C{+XALu`dg~U`F2l{NZ zM+6>tLD*q|iB-0V<01p$UUPrV*T*NdYv_f#IL5sDoj`h({)mF!f>pjeDMoPEkj=T@ z(7pWuZcng7bmk?-ye0{1J(+h8;MP(hzF7?*O|6AB2rh07Fv2FUvRn8!ZJ(p&#cm|w zUsi=RSZ`dk>B}md24FmirR3XuA47y?rr zGADuvqwJ-pDBe$pkju|M4HX%^694EF5e42IRO7Sv70PqTVAOU-^wx^U=2Oe_m$H)Vrs-(`Z4X19hd24P2^ zM@kq>y~pc%KShIpyz$ku@%NtjMB{1TCu^`v_8TPw&M8j8CigMJ*Mbl}ZGGW&A*B|k zO92cBUHhu7e4DKf@fGF0aze*`mHq{hLB@W$6sh0^3ABK3l(DgcvPSFLWO?525 zKt%@WYmomW{UZs&g?8XJ@y0W|BMdb|XI0{QCll1$=NO?Dfe{iG-`%e+1LiMMn|Sb5 zxH~WK$ESCtcsJJ*w@dg~0s*k^XS>!GnYICf60 zVBEQXX>Xr`zm%<^+HsNmu^d8Zz29BO6tAf!Sya*PgO<=Jmwk5`)=Pc!iFCj@YK3a- z_E158J9?%Dzh6@z{gixF-p38#42$Xun40~O6}pd;tVHLUn7s_p@9C?blIU?XpDI`Z zP}pu$Sz|bB63L`lk^&=59o`fagS_%-WvOSwj~C!3sig-e4A~q@q_Zlh-XyS_oFtr` z7(CWs*0?}^sp4k}Aj&S8MfKQd%DT<7F zJELdi;pJc2w!E7F0Jf|fNZ>=P7E-WnM(QcG428yXg@(AGzK7{%b5)B=e!rqSX*-@mvj6fh%|GDTK<%}&@3xiSOwX{44bm) zjSB*MBcgj=pjY}xm|rq}yi|<)fYx#*L5w|Vd^n_HsHmG}c)rzCu|Cut6iTOiBeuwE zDN{7|q|c(ZF1mP(BY9U3)r^GG=qbW7We&S9AG29Lv+A)%%` z4n76qvB_h8%`z-a%W=n2N}3jyd<8WnHE6FfJ->e>G~pS*Kpq_8}e)-)grxVTa{h~*#KWp zid7h(v2rkVoFUJW4;tSFqO0Np$Y0b z%xujrBQNP!FEQW5QLY@+{rbzNTwD7$XXp0!wi%uTZ8!MIoq8?c+8q8BMBt+ope@1f za?Zu^Ds4{ATPZdn(mD`vh)zIl=G~z3X^)Ncy{fp?qjm*!x_-wUHUV{rs2_&OyB8q6 zoL}ni*XNm$I!VOov9Au!79UB$InKzoeF>8{Rw7oIq$Zq_i=2HIwmkFkUEtG?2K*clg`;Ve0Th|2a0gi7}`mH>4RxOYzb3X!5+;2~S zM^S^|p&~*_FI$q0Wm-uN>@#f zr-2v1CT<)f4B3x<&06$pwi_XTsNS?H?}BrruCgv@%c(U1i&8#*YX%kd5JDiRjeoUx z>bIRQ-5dvn@9V6)wPZ}5%4C4#2z9VS#@EZxDGKBLZo-%P%;uH8bK>WyGSX}Fx&FcCK7cl8sp5aDi$9OR%nlNj)6Chi;FDPb@XWE< zJ1T$QdLD>P|4v>MARqCd64kSJ&%$?@yj0lFdnovA2qCZa#rFBt6Hr3P&J}xqdSJ`wxR^fKXVp za_9j}v;*VttjUx-_?d=yxoReHb2s6oE_j2;_jdtIhPz+YEQ){@gbtYeh!~n3l;Z;;)MLkD-31$UONcfI@for{RUoipLRj}Pt)h6#`k z2FwQ(|82dyf`EK<3sj$>mM@ev4b?8|C4I7At*YNCqkj!hM$7Z64}X84ZF+W)cWQGk|>5>Tjmj(F1_S+sqQuI8-0l&%R1Zo#U^ik_mx+9Ywg1 zL!aMO0v9Y^6#lJ&_WP;k4*?o!=3x$kS-fxL%+Sq=fY z38FurIY0!lKM_GDNSzf_ntnA6e&6k}GNKFFJezP(5Y$%>R^~>-rlh4{lTqWOiqy zP2_-3m4HxFOSgVCv41x+%%_2dP>*CqKwR!31Gkeh8Tb#z@Bzm3%~KW~Fs6?R5GvI7 z2F2e;4+r=2AubSyq|oIFKz^EBiXQ)|l2Df}`bD}x(L;z(OakC~_ci7J)<{xdC6H8( z%yG~|il+fJgY9|`=>PrD2WMpjyCr<;)V~f%HxVQ(F-i8ITKb1lsvHN>dAzU(vMq=q zfWejc&|5>R{zj{FHNcYW?dwt;sFx$4Q)q$9nLi!@GDHOgij45{>4S%Q!9%IbMxj5P z2(&y{b%_rUa}=At4K07+5_m{vayjewy`W=If`dyE3@9HQw454bs-%rge?3&&1>}h` zp$ar@fN$(W2S>766aIDZC~)vM<(g*DWlYE+Atx9)|F@9;?jDHI?119*eEWZLg%Ea> z{{D8XWxm;Aw8+ao!h<`?kjS~*t+@Ozg7y7^;FNc%QJ^q<#t@WC=RR^J-i=^-bKp~` z?%IQ9wV?`pZ{nZyjl2rA)6f0>`hgF+ z06X(5roRaGW6xx2h<`U+j{L>HA5!uEvjeo##E9Bx>w6`*#<^d${9Db*FR7cz(sWMU zJh&`yJ79!rLXqdFhva{_9<~d*FbfQ2bk^j7KWshpO&Wjw`Pm|ak()pIdVXp~*Eyi( za@WOo!bv`iru)1i;&oki$xR@$t=-cN7TcKSduw1}aQrbqTq1 zV9Rrvwa}Onw6giq?WMmd4a_&@hxr~<1&Cj>93Gp{!3DcAL{})7bspkcc4y!fN)6#E!-Apk;gsizm_`?e9*)XZ0cl( zrYl6AU1OA04>8t|;tx_mmo`9*!(Xf(oCplJx(*gzvRvl2>Q1?CRfFADDzIp`ZgtDJ z^e5E-Vbvj>c;<2nTKsM~f*7k4D=tETUa7$Ya;U5ja{X<9_0_>0Pk3ERfXOb84pcXiSp_ghEpk5aW)+*P99IZSvJBCWg{rDGM5V=4? zy;RKQq5Cl?qR7#mCpKAI_#N#26gEV%3Xsg9l-D<(1J)(*UN9dpFG$8eJVcYSGxv=` zdgxF2AplLcVBRwLqbjuDg_{rqMUcM1$#~FtdM+J*!}a0D-*(iN1DGda!_jo@EAxqtw2PY6RFjM5$ntGx)c%JuUlU(9_WELp%8^}kQ@>@awRPoddm zsONi$mr5GHmrII;uSbL{tn_pwyjlL6B0Yd2@v6%Yq0euqnXF%I1>p%%{y}*1yX?bjQHN*bk+-q-pZ?{iC_57e?08 z=M$i75{p57JVfG#&mjp=gaKXe=-z+AEH2FK;^1tF&Mn#=%$9w{|MQ5F4nOhn!Xx4b?@+|Ap7-(_1^+@_46g{}O znCv(-y#|nO?|?#WA!r4Y43fu3T}fUh9o$LH1ZeB$WITX@y9;A+30&C?KQHk82Lu=3 zwYzDq4Jfn_KfYmNN{sE-<%pjXn1w%N{@VNZ(^i1fN<&3Vat9%7%oRsd{i@@etdgEC z$2aY(aQ_+3o&qG9nw8In_|ZzC9GHxcv$2rI0}S}4XHxWv;EBJD!5^9kI!HH-IUeAY z1QLj`Lm87d|07oXEkZ`@lPkwuI8A{TV3x**`}Qj1x97wzgQio66hC=CcoXnhMO?KA z;`1@sB*$z`pFY5tSx;=*2k>hiX!Q>Lex}dOPXVq#)C!~WzVG33x~M=PAu{-Gll(>TyC;EJz<0@^Jm;d{Dq zaHMaXq?a3$;Ctk{+c%O|pMLQ~Ot%(K;_5JC0~sh%ZcwRwVc_)f%q;wd3N)Gohd~8P z^yp6Yiw@Af>(a#R&g&LfN@Mj1jKe_=c@Wg&dX9tft1nUwlzj%y?VF(e&72h)Y=@x! zhIW~4G-jhy4Z_#laH7uBDWKEZKB9cFut#+c^kBD1?Snp@#EGh3DXJf0=CHz6Pnl2d z6gVftQ*c3i5`Hhq0!^vV^WqTIY-TMeUp@;1P2`0*^g_B>m}r_s7d&ha?2`^n)iZ;6 z2`m4j!V2^T4$>ooa{>?5 zML~VaXU-D7ujf{Rs7|u{Ypb_O7q`-iAtUvaBv={=d=KN$*F#hp=U3Olk=_x>$Zu6} zP+ZZ>PDY-Brp!A?N!V9eb6?jJATY=NczxatZ6K@p+gWZ8F9 zWnqWSQ>pyyGMc{;Vf+6{Zjb-tG+lWH#dpJjD0gH@$aWe;Mo%_cZb@LF)H zuCizrx;sa=a1WfXb9w$gP{kh5NB%;1*x|$5EvQ{R)h5_Eoar*RSN($L6u>4s`}_}H zMDc`Chd@X@r>kkCXQH(A&ohJSBvgLS^oju^^~Q4) z#=BRc3RxmMxnU+`=qosGLvg4-qe56M8Y&FAtbgRvAwvXjL9g)ug?Oo`(XAZ!6L6z% zTRPS2Q7)LZCd;G-gkm9lxiZj|2AXKPwtaNt3l7YMi}ICydlqtU0ezO#t$MHBJ`K#D zI7g`5X7)MZS`CIx@^OL8(0dzPh8>&u^6l5!L3#E$r2Q_SQ7`?jd)^aPxYQC*x;dew zJ>?Np5UA>=-Ccot4C0IgSMxf=7Vb0!5t97MNf4Rjz`$@zzJlt;ccoFLU3jR!2SBi2 zuD_K914G~|S-c|1(cu`9(WK+j81ij9K#tFP-$s~cL~8a_f@!4%dny<tj>mc1~j!?7hf(lqkFf^Kg&86VoNQ7oIE*Js{6FO&vAcaz|egpiWv^WLwytN zGWE=dfJmAY6M(k%F3m$PKyx@(0vH_%A;C^o2%2oXX2qTZFVVTDO}c zjlUwND_;voF9H)E6h0q)WrmRK)YGEVgTo+DF%BzkD5o5*eKCWZ9U}Rf7%eqWH_nL zy9IMv=ux_ff5$Ur?A>#+7ch|Hf$B!v*x8y=aW;waXokR`3X=8XLNjK0WkD(`EWJCu1h~FlOKM5=R`#4d za{pFzaRxfIB}A7%Oyl&mAX){%+C1nr5T!g55%Me_`vL3(%bWmq&0yiYrrg3DDdx* zFE<`px!>X3A^%RwB9^`tiS0*f`QxbD!c05`$9#C7RfB@8IpGv}Etn?W8tAV9jU$&N zR`Gb;7#=iia|HS{GFqm96K3m=l)mO6C7oESqx8KH|CNP^kQxzA828l=0Wb3hSL8VMB;X@sFm0SQ66k?tOH?mpu8`{KIp|935y z3s`5)?0EN^&-?5RMf5YM6&R1O0NAo06O&sZf=>f>t4tU-qQZz9PoSz;WHxUE#i_$1 z83W)G9GW(siB0r<$p5HIp+3x03l4903gC2!-PGMZ0+6Lb+^dgcZ6h1Qu)5@*%$v@? zdZY@q*GD|ii0i)Lcp!n84%E4sOWtxL;Z!KjYN?B_M>)TsMA5|w7o+v~a$k`VshhZr zVy2KE^^?DwgSiJlf*SX)rBepdDqn7#>xX{tSAS)>B)a+TcNa6CbC0*_Ox$!RJ6NZ# zoN-nb0GIsHmzr-slEutl{*w?Fk7h0TUXKK95h3<`-6Q;xBa;&KCeU%du3xa;(K&VT zM53QXE=CB*cBH~(>X3;BzHEMayTbR;t_7YSsm3A2zc2q}!LZw?nu|HLJ)m||3VIm- z*=f=2Xb<4F4`%X_GPTwC1y@0lt?GmkgLG#={5PbnzRGWJod7_)tfnC09bs%g5--M= z5g)#aM%9|(SEnY{j@lO2W&wIkMrde?S%d!ej54l)7)39WYyiT(a58e+fWFek*c()K z@Zt==$h}A_Li$+JubXu5wccnLYaz1ly{x|TkY_@Dh=_XoseJCe?2yMXD(M$rjsFRNbsim7P*!xC_3j9jw% z*$2tiD(&^>t8oMpT>50h-4dIdP7Zg+b@_H{Xa&v2$;_)3*}|$f$Tky!yTI=~yrrA` zA-%N=3BQVYd$x+`L61^bO3@=m7q#rc<&RfaKLcRTL6y4`b)JyBrgMbTwMqyLsZ*U< z6`0xQ_SAWZHvR9EGTabp16IE$t49@M&pJu_qA%Oo0T0locvRb_BB|}&;33Ab z4o_`&FquIT=h9d|&#tV$N&P!(w;VyYCs$J=0x63T4hEox3j)`lb~Qe?@GG1r*q$f| ziNjEWwFNWt?!yy|x2KK6%kJ{nznuC;z8ZiIQK@BTyEgWO3)50XvyDQ*vsaC&d?@ zIs6YvEVRq65siBv+oLM>%(gOC4?TD5+$X>2)4}TE$v-W9yt>)4^elZtoNF+@vOQcj ztcJby^q8oi2kl^DeQJN#*vD9Je47YIw4Je`wejUJDeHEavuYVXOSYU#@dGznLY3bRTK@;A88F z9x8!6>dE7*7-qBuj*TTZ*_9Yc$DjQ@WZ|5Hw;g-iKQDeuPBfkwP5MfUve0+~bdpN# ze}a6p)&bEUcrFIY>EgBBgXeY;vQE1%itfhNZ*F`xc#{W6^XiVv$#2}8WVt_~z9{R! z`P{ow!NYlEJ~3z4*UR8|q{lbGm?nVS{bqtY5_s@jHB{3eGExF9MJGwqt;#tr9FO44v^0geyZC1 zoi$Wsj>SYWgtQs*AO(!SyP$?P;8 z^og+b2-B6GKt?b7S63sSX(3>GU|yoT<_UcA{C&^&CB2?ZN*vC~iCDUois2)B9#$z> zX>KZcn)zw_s7TQHRp`rtx)b;iw9wS7q07RGltAz|Nsr`A?6M(xpc1A!9Rqv}Z z)_|Qk7x$^>5`6;b6MsxR0?Dcso9&fQeS?-%Ob@nO{>gpS5=&7mGe)f63L=1W4NvI}q<{VM~@)ytt5;y>A2bpTR z60{96V=$4^7l8cLNC{Y=Mz-w$Qh09)c{cUK>4ztg zR-7Um>ac^kY~lD}dEHQYkN9yPOsk$$c#jT<~r{0-I-5}DE=fsIBC_NmlyeP;;cQAxB(Aqg_$Jy84QAG;p%%^jQlmw(r6id zd+-ZBozJyZ`m3A1VR&&EOAy)ND9{+y#I|KB zydIPAtV(bJe|0S1GK|r-H?jV7-n-wp$Km=t-;^o=sGX|yVEgZiBTyz$76;g=FvwXQ z)uSgk?HoWnLklBC}6q1-7O##gG2n|v37drz+nui6;8NZIuFEv4K1@a}C8z@k$ zE=hIeY9P{M!@LnGu%_^k6_5ZH3q8xu3U9E<{usV98_AUmGF!Y}&O-$dF(J-Y-3*@H z(zC=6pE>}H&LQwwl3u2oLddyxK`ak7s~LmXxq!rzvHpy#9?@!b=66^#l_Lkb0ECx7 zc4Ljh?hLXL95;M{CCCA2KOVGmObNu@E31vSioZB=#3vt`S73ba^B5pj_&NzZF<`v68saZ$J z7xH@UE$xBRUAN$1wZ8d}=rWL#{zs1?jwn>*Qv?}CBpK+o=R8HcOh3)e=KR3{0w6%T zgy2Ct8PHTy0SP=0m%b@omuTvfbil~a4^|>=63EZcsHwX62_R2+sVX!R^nktHuS+W( z*s27rMW`JV3M>x3usebn=O-7{0X5FD+;K9Nvl?LhsApQ^ULzkZ*@L7=T&Oee2g?~3c|j7JRIX_Jow|_4mdfdO%oUr1q^w7*mZZsXM3}R!l1o;iF zhbR0YfIN}V9pKe9f9BP%eiZEoolUPe&8R#M<}{RNz=bj4HAKd3eqDyJf!n)_J!)~H ze(zSVvH3tfw>f~SNF4OTKGf1-@hWrt@CNkabLxY%kd9!RCqZ$Yv>M9>eOSDjL!NZ@ zg9ukSI+@Xo$%HDW>5nxV9p6u6A^^a(9N^~7&~R$+gq#6;O=ulL4a~=KOvmx9L-?C# z1^@|+NxXyALNe%=2acfC~rNE zw8xtp{@xM*?)jI75P1j{BGx_De-$FIel{W%PMjcBXzso!Iwg!{?x+B{NjCx;YCz}b z-o&P_APzzT^vmmvW`3EgcV&^^aQPv%q}dWZu3{1D z_qeR@n0KocU)^F-ppeOCx=(#KmKF8B`Mbae@}3&mf)v?ThQ%@KPi!6=O(?&3>GVw1 z?{L%2&hfshYZ|kt=%%ihEfFCK5n%w~)fIWS0IV>7tT4%Wbmh}_(Or=I!iE}H=iAjo z80k^LzuLQopXU>BAOZ~p=%1hmQHT^sl0aszIf=()mTtx;Y#GWNVJ7r|%+fYj zEzCC(4C^2Nev-s)rT55LUwlc3LPI1H%0YFh%pwUht=yn@E;*nE?arv(``18ZG8|B^ z?Dq?auCqbYVN`$qdo1;UaZYJ1N9CWR{^!q>Ec7$=8jhoCzh0NYkHU36<=n4Aj8uBr z|9fpNR_O2l)g@4{k^6l8v=@KEBUFe)K6>mRi}u-q?DwRhh5zTj09|zE(;>#}{fm#h zBHEfo0_KnBEy#}En@s*qs2w+*F*w>Bx_orUE@obS+AT_ z?8u_{*90vt2SD*np-@x|!>=LrE`r55Ji^Q9Cj)Z{a3!%wMD%z_oDZ9GN4?@j6zgjVNU&R}>1bb{T8Vi8mN zLrOUpXYN0eutbeYd@j1E`;PhI(`=~3NKMH-%|9=OB4O;-6Vw+1L>G%e2Nq-9q=@N{ zi{X47EXIV#!~-j6Gnd%Gi%(V@SD*fIF+vn1n~;Pk7w45QV2}`unX+3X;$KJo_pz5e z1V?`AY)IYw*Tj9KC1?Gn9$Tfp`_sb>gy2xnp^B~FypSAEFbut($MRSIcL@IR!_qCV z7&WV&@5?S00}X88s=$9%?Dww$-r(8Y5sT=H9l<04BW@&fTukK%})tLJQ!fz8Dk zWy4lDO@9%d@Sn#ne%+%351+Yxb7_MH+1n3Ze7HCN<7@a|Z~Nz`;_1QRM`yOZ&<(x% z&?y<(zi;_R!T11aTivt{m0tpxAq@n7GZrV)_@6KQ-_KbzB&Q{~Y&>IUs|ZRZ3Z?7wf?v2=pw}?Px|VCKCKL_j=9hXy#lCVNSOGkJ)+(+&sLAZ+3-*DfI4DR6a`1h%<&V4i zKRxIf2X@Q={z2sWp9lDtOf`uNX^>GumW$iQ*f{Qq-!0kn+V?T?*;)p~?X z2vXxzWq;4~jgsM;1+${@KQE-A1v)7pFiC2pOLu34AGJ|QcPe}R*2j``gt6^@bdR5= zX8mC|_fP|Z$#j^z#5y@@?Qe=;{@wtKn?NK~TJyX9_n zTR2&hEAl6=WXTehiS!j>s-H~z`t@HLnENS^jL<0c-~Pk@&hEtzWp6Ic);FD(y{V45 zc)XVr7&EudSNDbgI35fl^)BRHp*`u}i$x4xPIC3lg?;*uSS|?w-CwgF@g9Dm4+Tm_fMi1kV_PXcHW(Z;+Kq}O>wcH&NPrpbo=_?kEbbsKY%C_7Z<_4EfFnSe=*wQ zc(69&wOb+=p7pgK~%K!yVr*D(`;n&5NOaqPU#?8H0iKD$I9xF2qsf1(;?Goa1KvAJ;G+%bRxFFGg6` zMTkcJFYTiW1hSuK_0k`|B2{&I7l)I1yS|_6MaRW~ReuA%%M9={S=G zFZa{8OD+84!zjR+xolsi_-iN`K#~!gZAKyD(>>bq|kk|j#n)F5WxFa3AiVDE5n!;mv3bT~x-Eu|vzykuPf6(IV3 zp^RWNNcT|XzOnkiZQK&}sR0!y$TBw$cW9|RXc3SSUp4UA|MAYv$UYeq2AV<9VbUT2 zqzOFJ{H;<*4UBKTg%ae#vk-HfeG~R|=6b5snbz%KE^mf{+JjMt`+`y*$hLiI%9w{5!K(mk0VXH=?i@&v$~s(eQ=n z;o$Fq2|n}ePq6iDid!>Gqfe_=KIAt+6$v@lZ{vB273BC3;OeIkK=hr^2X+Ch7Y&5( zsyYW>i4#igs8C`n_|1EPFC7~|?_341>6rdEbPQl1(l%_oJa13q>0vRjY!B$x8~lafA=l`Y~ty9 zgX~){pf`QiTf!?*0djX+fa{7i85-gYC;w3*DPj9Wy&c+nMG9FGIYjL17;T|lyIk|}h6MTDoN{iB zEEi2(N{yYoRjcxP>Zj&ID~z!$r=Z9}BtTc{{ID$JNC&(INE|BLG7Uhka(nj9ND11i zf9!Nd60~MZ-$xA=K6uR7L|S59AllM)2$A7MOJ~ zbhl3mpdp_D7#JKOr3DrlWSQm!;GMl+xuwP&2H|1ty_F#8vUTmU5<>uZC>24#r>M+JY6oBqKkpd< z>ZLw%sT=F>pTwiwp(fR}$jnj^an#Qrab*q?!MP1?;|T>dBZ02lLyuLnhEu5r!Gn%(TG4?<` zr)};{t`zK23Lm&M>Gad6RX*NmluI7Ov-p{LcMJ{>9H71 zNfuK`&U4O7sJjYEAF7Os4Xz+91f!(5b2A?3L@A(!vT6*ATS;-8a~So(0XLe97L1vd zX!Gy9Ra3B`=njxh_|A?!)c^HLU$Hh%oI>`#V_1iyF19Z#h)krOX1jUriS1Po=Hs)*;hT;wgy;@tr z$({P1FM51<9iWMB9J@I)JQVt`J7$4`(&AT_i3B)lPN6sW8U!d(+uBk~?MmtVfLNtE z*ssOhnL@0avk!bPTc8?WYitLNkDEQ_96qzqU!d z9T>>y+0Kr3H~?gitPau2WoDc385i$ET+rKP2=FQAiWhX-ivP~e7pAzE>##HdP?Y*~ zEXf?VvM(mbBGdnhc z*p}p}a5FAn5SP!gQpJY>X8=4TNVX}tSHgM35N~4CO?);ItNpB{pqWuIi>PGvV@(^1 z#_aPHzXw$b?}S$G<$pZR{82rr4lY20L_6d>So`e=bL&NLQrf>_hH`@$36(crIb0z8&-okX4sv`i+C){e=^gXO;&_+oJV~0KRd|d{VdJu%V z9l5HXr^S!a)?W%T=Yk_P-`Fy9zTJH>=PC`|3-`j9_%kY#N@38#0Vt{68G)d;_ZDt5 zp`IyAX8P*d?=P>&qNP7IIMRNJUEXY1259_X(Z~Xgss2`z2ZuUluUT}I#j3u(ynemj z`dcq&)Ls29)LKwf3jvuO9v+PB!^Qc~xLLpkW5qAfn;Nxl&BEvWiH525ZHfE~A;~YCo+JmVy zp1=D|@X2X1z%2kya57Y*uo7ZAzo^JY}t%{vQj*8-1YVK1<*5;b@Q4<`kwC4(GL z-tg5T0?L6PnP!A;Jk-%4`Gt!ZuPyRc^*f8*gpZ=2k*^8d1vJZ}2FkE^4ekwKny0#G z-A@Dh-KV!SwV+pdh{zC5?TNo*^yYRLs2Kt@GUw?*Wxa~HSoF~@V5P~VdF>;RNRt-HOTE! zuK4a{vv9D6S0jLB@-a&gYY%d12wNm6&Y^f*GJ#DLwTa%6X+*(5Y%kymHXU{Lwa-vl zP1l5@cij;d@-8FC7f{L3z3)7_Ae3bqOq{d+G|+H|cAL>m>B`V`%)6)8+1AufxT@%$ zH~`2+*yXGVfTkS+4A@1r0{B(dX>+nH;^5@H+`{WHlo55aX^0(E%+dN6svkI91W1;T)FwB<6{V?sZ-9SNxeZR4(v66`!(DS^H!KTf>D@aR9I#b_2ZpuzxZ*ujz zN4whV%5p^2Zs1sAMxEMu1fQ6@3rTkefNSvCYI`PIaap3-B`dTapjc;DMj zjPwehqXs5YhLFqYRdjU#zHuz8;arY!jjK@(LxD0r(>F{K1%*-->Jrcf<`1Xh_>&$d zp8k1Q_)%&48OI~%w~Z!Q4_?dp%>=L;+IN;9AB7^_|)-H1sx&TOcj;JE*nmO z*M>i961uTUp1n`&tU=_SpKULys){>uqj` zoBfzP*nzz86?C7JJqQolzhNC}vfF@4w*-v~o$Iui@a8iG^7d~>gz^IrKrAPs7F_Zq zBOZ1j)J2LtxMUcFCPl_G7b$_YZZBm;woFl0U+9LaN)mP39yDLDJil|q(A0B>aHkWO zyKKl|y6_b@?#M7MJh>l@rc@&2LoK5P^1fazzYFk>Ff19WO)Z0LUncQehN zg&l-yS%iBFU2pSJUr~i(QtycnJok<*d8wfC;q%kB=Vnb3S=Z=RWTiN1g&t5QoHko@ zb|@cxdbc=-8o84_#2PGYmd+H>2DDiql40gs5$7PxbRUSK(+qR6B}mm3=Lmj<~9@kDMbTfqC( zi@7hew)8q#0`|YC=nON!oE+J_|_H60v&{kX>qjhJAz_S^kl4M2*p$jYZa2y516u0f&Z5(d&yxL zpdHP;CE^{%R|>u#npqMe-{Tli5PH7SRl7vs!^gi{s7xa_`-cAA7=3+MNqLZ;%wr^t zMvr98b_Vj~(W;QI-U3mk3WAGd1CuEPy9o(;?zpNK={qO$fLcPhb8EFv}i7^`>nb>f%URMlkWY{wk^7(OI#!5Gk*yoQzg+e#T(CwZQmqa*AJxpoE#d zZyO`*8GWX(Zmh>e#W6hDd=4~5q?Vzl{q*}#Nj0W)7%w^l2Ky*5L~qINP8NR(fxrtq zbqp8`A30}y+>1E@<4vDXuwe%s9*Nj~fhg4-5kj6V1I7ZZa16rdBy5{3S0AmUW8nr| zqNN_MG`sYcs3$|sUDVve#$IC1)Veki%iIOH18r>-Mnmy3885N6icMzdZ;-VeKrF%A zX!Pvflx=$AA&rzOUmP+RoF**2QYM(;kY|v&hC>k8 zcxeXJPI_0UOpM=F0!2opZX$x%)|YDOYCCq$0tvZ5FHRuHfE^X|%Oe2A^GgoJP)0fJ z+cQYu@`X*o0SrR9Lak?uW{qU-av9@xsv4F8^G;KFj=WAofNKy>QLDs4?zzuT zsMe6h0a)W<3=tNDo3hAmwC}fIRP(Gd8fM+5mEOwLZ>%9Se?EpGvnJjvxeWsh)IiRYr>yg8disjj1JJH=;x{sL3p zyGy5Ed8)X$owxBC&)}eurL`i#M|8~B_lliodaS*IO7VFI?-5Yab53GRFjU7F0QHoxHBZ0%h4^G`SWX<`6t zJdwig3m0KlfmbMI48gBaO_UAUvgXRLUZhT5>h z;MA|ziTTOGpZHXSrgyO-AL0(Vpp^F4x(ognzCpfw9-fu=xR?rm|>in>+ z*P706d`yU1;R}!A8&_hK(HhoMCK-Bqy~K0g`4*nWQ7$2B?Hk{CPXw%!bpx>+V-noc zMDOn9iIKO|E=jo3KVW8*H)!hZ-k0WXYebj6go-jpO6@YB;ZbAh5lY-ZRH(c;jnC;j zM5;9Fx5`B$_3=l|KzoqnHa5)hT)Fr9fq0Xp!48JOiX@dCtKG@X#D3AnraJKTV_ZH_ z;Jyt0WtBGZXlC5YaIzH@3n?KMfj5K)9_`d19tocez&e9^I?iXrlul6YzWEe@wQfpc zWUZ_u-EJ5~>uTr0k zII_cXC**%g28EG_a#EJD2k1e~6{4K*Rz{dl<5Es~&@CSXNV7g*EL{5@)lR@ADE8ys z)otyLmE4%vkUZh{ecTe%fgQP%GTx=&5{Sk94bn=x>tiLCb9pO2{s`N)?3>zYq(TBD zZsbt7SH#`OZCX?1FnCM3siZL1@mNI|0>0E0)*4!kjjV{yWduA1xI#;FBK_{{Cw&n^ zIMM1&`EeG#8gAN^Q&s9nnT%-kC085$@NL>Q_Np%WPXkll%3F;_6As~`9e@OZvDXsa zp5xQAjh0jvnI_Pssau_-6&6nhOuLw zGlc0{`e3X7|1+J0-oqZ5!oe#z{arP)L!RW@S~c z!wXaQ1@KH}2A3~WUL7X%=67sK%P2M&HYF`79G)#G+_MV;H)u?1wOVgPGmsO&m(p#7 zxY0yCi|uLm92@BGqK7gvVZR~Bbmr8kE8rdVRH+GUcrc;SwZXxXAg=`AI-1@J>y;`V z!^JJSQoS8Y{c6a#JVIm1d1sd2 zQ4%MO$Z0ssf~Rr^#XPt`e$NQB(uWtUnTU;#lH8b zYrS5%W2QyRG3T4JF>~IU+)e#pr^0T~QC!mD<|i4C4%UrF$dmzI@~v@4341QyG|3T> zuTPl#xVF7x$!hUW%XeL}>O zPy7(;I|~kmJ9EBAhfb5?DO5c-W7rW^?6@HF_iH|_b^FX8d9oPyDG%?=Y89RYdPVO(eWEl@=URlAj~s?eZwt9HrfHNcaP z{j62s;wK3^6A;*(JJHN5=Vf@O2X{r^hEkckTciu2)iJI*f~N4N>xw7f-GTc}j?#_BMYecX48J{FgChZ)moF)>zhKs3r#H2MzqYn2?XS<9p+ypafI}Aqt%G>AlYn?_Z$^}*qhq^L?6&3H{ z9h8pR=)S>7$7MLTG2!u*?VG6x`kC4udAmvBzH5|fY{VFr=R%kpXj3m^_RFlRFu%NP z@gSBWydGX=C^zDPU(&|A(=L;p{Vix%!2r(gGke4wKLj$Syk2ETw$|SW<_((InqWQ6 z#Z`H0fdtJpQ(C=V0NNT6jFth$g(n#gz8Ka%ju!8JC?3 z76p6I7+4W$%2Y1z(y{4yK)SbUlXK%AV>;>|VrNp(9RxI6(qfG-2QKqq~=O+o9YUyilN~7lHeKe1Ks(dTYdPbVk z>!;clKuxT}4plGD@(@B4WLt+0rQDVb5BQ2{f6O$DuhPYR^F~FUQG4AWd8us@V~Tdx zw({E%%F+t>YfS@c@5glrI$9t0GuNNwx;`u%l?h+LK{xQT9wG+^$ zO0!nSE&@I6bb-3xRffHWhCOq=(P=$?#@$0u*LN?Bgsev1kP`DnJ}a~BQ1(FTc|x~# z`WqkAtX-YC1uLF!ye;Rg18x+3SrJiK%0{OX9}2r%&62p>+8)Z^RXRz6l|Ifyu4kb{ z-!R{LuaysvAs&$>%i$o@;2)e}8}8ZIB9x|9&2Qo?jb&Yy_w?c6i&w>6WgRBQr|nF* zw975!#<7x8xkNifufkz2EA}VqZ5A>FExO@p_Y=5RvPstIqI}48OE+XI+J)}rtp?Dyc#Y2nk_Gj&!FsThso8jr=Yu-klBI+$h@5>C!CB%ygu);tqK_Ms9L#rM@CmE?Uuw*i}P>$~#y^ZumaolX3NxT)IQ zBe&bNlpXO?Q|Yo8p2PT^9(kkNx7UB=i|i*Xz96_IynA~M*^-x@1!2Cp&1x%wY$(B< zsr`XwtNxnNFwPyDK0&+Wmdt|+cY9~e74!;OIEMk>*j9jX?Y)SD?NvgqBT2>dJX_7x zxsHnd2k4_JmwHr`_?zGw9SM(w%iH3ExtarGF_`dp#D>;%TIdy}Z*@wO7=4xTTXBIu zK!i1rP!)$%r@(M2^Sp(2@iTdP1RhExlM_L=i?w*g%JI-iW7+S0pY@T|@Al|dkZX}u z;5?RmF8KUP2s-@9c2lm#S6}Z>B&v<#FvtMP{~*#b!Vn?%STibFSxC=4a>F z&wP~^gSX_>F7Z8Upy28P-0d9p-C=_9cw7z@t*c3G8xZG4_szD?Wg zlTx_V!II6cVdS#<&3QupdWNVsB1_0WtAz3^i8j00wy{U7J}(Y9yi9j%yvKfaB0$4> zZ>cZXsGOVF{`|398p%hR#fICGxJ`U9hh%Lu;%~6lo6nvT#he76;3dRIn)^?!CAz4r z{p8`dpHT4L+O}a3a7cHmpO#fKAb+~G*SuRj`JlD7MV?xU{xH&Ge3r&yF`+I!Unn>A z`XNgQdyL9+G~-KB404R{;dkj(hg~E?2ez$OXK0xS!xw83wtDVZ8U%quCu1IUr;a9u zj4y3itkPaGsKP*U0BwA1KteWzAxL|Lff)BIP9XN7(4n11mf+>K8y(_S1MjMqq5R-( z_Z_a0XpyAjG&qmr4%|LXc9zFrgVn6JrlBQ4{5s8bRF$Ba3D+oG5xR;+P7J8yoG~A$pHP(e-fTx6e&`SN2`l zjJfdkeKYHg*%3|-8TX$+B`zm&VGC-Dc=!- z4_R(*%TLIOcD6BPPqxH%9Y(P{X)pDu-Dj|!hdYk)sipht$p($0Z=qLnQuE8_}+_Y<9c$##Dg&9C1vjNiW)4 zt+!X-Mune;NnxdnXH&?j!)?BhXe7fhf*67>Z+?|_zq-4XlNp^f6Rk4P?q1Z~hOWPX z=}B-kJgluF5pD0h)nicd)baj15aPNd4Sh&du{Nw5V{p! zoC(vD3S-5`#-rH-O60Cb76+5=%e?Abx=q^it6T|85pR~1CcfQSSbeslH>yr@T{X&> zf0pf#iEb$9##bd)kb`!7oQ9UBgKJX$9nde53vQ?RZ`Pg|EbK3e3vkJ;5OZnez@$@6 z!+C!sznkLFNPPIisd1<5Jpo>GWjmn9l6WVc>l;e6NsDe%=Y(jRsd>(WzC%0|eL_~` zhn!^K`be-z`GVr=(&v_wFT^e+oBET2-j&;Rb9WvvN2o7u)#a*8uio0=@q`%-XD42D z5k9;<_Twct{CIe-jEy_ySR~eIA+_s-dOOUK&kx*femztw{`yR|X?QhPQFpOzNv@ca zmY{t);p8`1VatBA=sv&%MWPw*6il~(OYh@~U%BxgQp*wdq`=+D`jx&;){Bu0Or=R{ z>3A8vZb7aT8Ncl&*CIxD8I3)s=@y~ZTbhu{(*Np}bED2{oO>?4Jk)(~3H!A`huPW- z4!~Oy{bDiBUV`;>uE28Bvtqg8DTeDP=iY~d&HOBT1;a(rl)|pSB_3`xPl7V?ln@zAA{knB6;d*zK=X%jh_bygxj0>smQ7I~ozpIvXr= z12{#yrt&fZ{F2KaZL)G84ebrC3#fwG&6(8 ziyuDIi+Rp`*~xPVSuDUt^1}&-y?ZESICfvmjpR5aCJqx2Fs~jwYjT-9*uNzo^@)Hu zrWMmem%mhuDBb-LeG>g}KUL-pZ(LL=S`7p5VmLyZ(V3`;F&x8NWX8iT9K-YY%zTxw z(tS!>4Oed?56i(l;$@Ybcf1G4H$@UYCqgX_32NsHxkN&?HCq;R*WXQ6>p2+)EoIlh zkT>-xru^W6PN#B8xy!fedZOKL4gO13X!RrDZj?iAsn@~VbG|>RBU>~!rV3oKn+Vr& z#Wp!-I9G%D^t$<;3ksl8%?ByP36fx3L721PijMQK#(tj*dbVw<`Ree#XiRw6(wLW6 zC@VT*N1Z^OZra(n?P`79>s_qyvX_Onk;BQ?Pv_Rxnor${5_yfMXP8EC1HB)Z|Cq2* zqEVpsjglKTAGkZtE_+CM`1+7Ir>^tE?w9($7M`^?4AX{5yL3khTypmLN*;CGgM-Ny z&3aY|45`PXb&>BL@dzybXvaEu%ts+q zC}YVCGKG477B|>CY?sQI^NBPHZE;!jXl!_qXdkkbRc{;0-Cla{BvPiTM0XQ)tdcAj z;P7iA#+B;URqVK}6Uk_T&)f&!S6UG8$n^n|1_6u2SC4druY5n4%vg*lFU7Rgp2J+X z9<4j=i^;Bipmo~bNs~vlf^IyiR}E^#vm8?VwP7`!GtR~?Xm{H1!9{7`^Dar_-Y{v8 zi9XR5Pe8%vJ}7Lf{HoqWuamZ~N->={sp73Sy<}2*+g(jZH2P#9DqY;qS%~kTIowP0 z9=a7#(uY9|-6wPbfJel)H3idU#qku zj<++q*6PyrT)SF*+;(Zq3q)Tq9~%V5u8RtTGax2@&J+&ABC*}67;+?9dWdRf`16$T zdvfZQ9Q%$?c1=AQjXNfyrBun}8iBCPfG*mAjtNYR+`qEp=iGZHXZ0s!EK_3{3KrK1 zR@Ce=lowZP3;NSo;j7Aqi*peK=~*&iSh#o|*bXBtN^n|ERkO*1J?~E__H3AR=m}MI z315baO6Qf?2r{J*{teBg27T#b-)ZF!G@7bE5Ff_-$ZYXgPs({5F+Dx9^*&*+P8VZB z9XlrA3i>3*6K390zgsdYbw2D~b&X}zD1oqT#RDMixpXM3RPx1G<)}oTY}|tr?GhWA zo${qu>hmRd@z4+`qMsME9BCJEMWNLh7#lT0rE-pzkIXQe^w7T;3?O?@YJ|(!4H3fN?W;8=!P~XVM zy!n~DuyKz&Z%=FDeIlA#5Vk%`_o3(%MAJo!Rss1@P_12a9Sx35gbg)2~tzt8Ue$&TbCDeLO8c zha+p9Ro8{cQmz$5hhlXnkz*ms&#^M+F@19i<9unc8;vQk7UjI&PsUoW$=~2oS`syt z^*gwW)6}3i`+6o-PIz6Xb@}>)Cd0Gw8Kk$xBfBa#JsYVQ9mPsEZ=p0#7w*o|2sR@r z&Jn}9x^Lgeuj^n32C#T?v4=}ippI~^@_Jb%E2BO*KZpx{m$qmbU(Sce;*~kg_JXCX z_e*)(XP5P^4t6^VI$80LT(mqMv{4=&?g1%6Z&Hd{4;M`}?R-7jE%$d+4U^rvNpVv4 zAG?t}+o>=YMkSqy33rK1%=Kuvw*JmSq{KJse_OZCWOd7Sq#P zbM6CW`W_A6=D|Eq?IWi3K3x}`k;!z^o}4vT_k6uOR>kY32dy^@*{tcMk!z&;y~Scl zo^yB8jCv{#QHwdP+Xkq}PlZoR3O_ICe(ho0<{!CX(pIn5FSi=?jY{`tYcg*jfyt+B zml!|V@0YjGsB?#m%J!z1T>Ww?=PC)*-uO!7RLUcYUcu1@!0m_+t#_Z+`t=wq=Z3lz z2K7FBANI;tb8}$vUXfD|KNpvsAqO$HN1D}-dv$lz_ilY*R}$nxzD2K9yhWEu#%VNO zXgXeM`&xHXW5H7CDaA_&tj8S0s)`e}sHxUOID?lpIQKqH%=-o%`crt@H{l3-zlkJn ze{Dv77Y!e~v7`5HGTWOB729$sD|@>8geQd0qiJ=75d6p8EOhB#`bUq+tyulIg})}- zJ;o_oKQx-8YOzEcveGOixKq5;szUHFGo+4DB4>jsLtWz96F!L~@(GtnIB|byZZbKY z{(>w_6836Tuw-da@$~Vm&I|W|?dKWnnw)C);T!}ah>WN7+>}E)69)bBipYf=x%tRX zQJ11a!yoESb~4pI$wO|SS9ts%;@&zetEKz@7wM84kdO}PZjf%JyF)-qKt!Zlx=WCd z7EtL1rBi97LAtx8Z+>&*d7kH-A$FF4smv{G!CZ4SH?cow+kTfuzDgv;RuGg?VBaNdDI^vm|K6u$&B43n3Kig1 z<*^Y=EF*L1hMs+tQ6S}=*L)|Eq35mRtR7oyu#lz`g-6<0&+VKs!9me7x< z5Lb30+_(h5_wmFBfCM)nL!n6V?%*{Y@}wZ;ms z?MR2q&lTLA;^tUf(w)PFjW=TC>I5+h_v?e)+3-laN(v5>pp%@~&E!nN_6uk~dQt&J z9fNYN75iC7b$*(4S-9J;UiGgcsU!@JvxFTU z30Gg8DDSD$ir#oFOjB>vyYKL2-=wmvk51YA{IMpf%x6DH8}U>)l>L!6GZi->dYAP% zWU4Dz{NYc60Zt@8JydEo6L;o=dsEMb6}K%n7*?J!v!oMLl44{$oJ~DRciiyjb(1`! zR9WJeN|U*~GjPLsuss>&T$iiJH#1_fbf4CME~1fN0{inwG#tCtk{`JNT`Rxt^LBo- z%L@i!!VSE@PJfe)<#;6}_x#Kf-LIp2Z9?~IM*5Cj+f;*89?v!>%%)ePuYD7YUuz-0 z_hWj~We;RkjTfqNr(;gXY^4yorYu`U$@5Vzt+5^mF+hk4NUlBQS30@#bMCvJ zGzfbK808sNkYbM`xuzR?@5|G*in<*+NJzvmElCsc?GE~+HS=O&8aAO+_#!3+ai?F^ zLmKkqcEY8nqImIw)eJ6Y z^qI481jEf$MnoSb^nnk2-8?_mB_W^=!3LFGE2{7ld1;UFT%nzvZtTM|v}zRf4cR;|A3xqjUq-HAo^!ql^7@52f9cisp6 zcu^|k?!Y+I^rfKNrF_Ptvfe3vD*#+hndua z)ug_nI(RNUotg0d;EP_=;6sj8#0CFN#gVxYg5&O%2@{h`SNqk_t`oAH(disrY>&Cc zaa^S6YPcm04x{+-pM|zBX##Nlv!}mC@Qzkb+u5YHGu#vBf83h-8k~k{x7o}wZO4PZ z862~X;7r7ZexJIo?o-Kgy{5j3CnWF0`C@34&C)I$gfB()2AS-dKQOO}@;s_z(By>W zG?~1HW{*&B)&sC98q}Z9Jvbi}()glF&G8WLf=hlB82+X^v|tSScbQQUoGI5d1!RDI;w|C(W6erNi!+8nEYEFU%pg-`cWu zM_FeVi0*2J3b}QSA3`a8U2-1CS}*f5YOvE&4Pa$GML{*0<>;e$l9K^fr&tMe?kHu< z@Q0H2kvyOA<%q9J`w+FA1Y}hlYMCRHUDBE$T_tW2-1Uff{V2X+={gdK*=i zUDYW&g98IPHH+45f_#QrU+yhAN%op=ZpsAEc0ME&c=b$(`Lw3Ps_NC-w8I05>z7c}>VYu05~p&X3GL zYR-NhGfQfgjJ6D4OO3XfgP!=D`cgS`j<+4f)OcN-_x(Z#VQ`CLQf+nW^A| z6JE;mIc#-@msUL(ce%&v_w5tEMkt~a97trns&KWfzvFr~Oo;$_+8OZxx5K6WaS=z* zw*h0L?8R2Jl+&TNHOB4&2@lCJW^IlEVM@+^Y|x%4LNav5j+yAuS+mlPg}%m%V&YSj zNC**|=LeJXetBEtkrR-YaDR7nAkF8sCD$dJ>6OIEpWN_rvrl_ETrA~ILt5cGY)k@o zy`hN$z9IFn-MWX1)TWQ^WLgQkbcku64c316N`FE(5NLnq=t)%C_n4dBP3aT)dCtOf za-=?fBVo6TQ~gOJ66t9ZS3YH9B(n;vEcfoWRV-A*(Cn!e7o_&QrI$ zp!avP**u_1N4-JGAnEdHf3_2EB;$cFVC@lY8bT-txfio6$$-ZZL431EE|G)vz>HkX zJJq@GoiOs&&SQbCgCM)H295TDZnFzVmSKMcWp*|Z{Dg`JTUN43bfi>~p;HEy#3jBS z{Px@TL@f|!Ci_xL9+=xXr_W-CRP$m$BH8sE(;ioT-Quymcty?Z7_H;=WHz#LoIOYn zgJ>00+R5QG?f0BFZt!)1ex-%EJ z9o#1N75CI%*aIBK(*Lx0cGTsox+hpTBJTc@Iutr;q(Oq?d9NYk{1E59Dghbo6N18EXP-|xal?O zZiOdKUVgSpt#|S)*(X~Q44cJn#Hc5JnRIB@dgfVc0ZtR@y{J|0tjh9=kfc#U_N3+r zk{si|x7_14pVaMC%sJHN&q34=%Dx}!OI>ripv_~{=GbwT~zCSC(=H{z@qF@%0wGXqWO4}_4(CNmQr9H*jK^OJ+3Lo z=9|Td9F=kBpMa+E+VM?Ok^LlZPnGfP-dgPyb8D{#Y%iab*G^2`6=6>Gp00j*qMJWV zxIXhI9o-5C%V&TSPkw-ulit1KM5pZX;#2|A`DJWxA30l@bOt$ ziLLtS(>$2>jd+5~##lcnKh8edRTg33OGwsA$Nj7xAKzzzItSA=uV&E$q`OW|HIrlb z61ACjX#^9&Vm?oFoKpseZ9>;gPhbVg*M@_{{xurVo;p4dE_Frs*p0g`j1VuyrG2#u za{h?5WQHUzMK8axhp2Dl9Hufa%)vMD)Sscfkkieriw=X;sPGgVA?T!wTFbD>&`vJr z_DJO%2oY5#ul}et*{hzk1A!lIH7&YMEt?fp2hetpPm*U_??(F>5}st(JsfJfK#$_9 zq?z?`a}Tpq=RTo)!`?jzwXCbroIYoRut_gF#nbs;{zrK=OqZ$EDG&N5 zgHJ7}g-N=}N;Ibz_yhR_W$;MM!l#NWH9_J$>kdo*Y^s}i2ftVn34+5^Xk(Gh<>0+6 zzB_wzbNFQ^nhFoT4n_6VWJr&GOyD!mK}Ktc5Fl{mJi3e}-?M6Tf@wWpXr=&Y;o(`y z`Wy8JXYX27%5!54l#RF<-R7~4Lf699?qRAEMR+a`#}uwfrVTQSbn493{RDCieocCz z5TdI7VDX6atx63VeUZg5{{1rJ%GrvnA#tHcMmY8B8zSF&(ro&I8@>21y?z|Ezhz22 ziz(3L6`?p_T#3Gfz~NH+I_WccFx%1HtwI^}8gu_rC<3|b*O^IRXtOiWU~f3I5wsVX zRQ0RKMVP9Prdzk?a^Q5LoO22)nWomI`NtF*rt+?s#Y;0~>Z|pX_6|XhkqLfI%ryMPNy$G@maQR|Ac435dNM zT*R}}mg#O57;Rlht_Tj-A5XWQggpEijj<_B)wlr`SNc;01VP(os3lzPl{$-8%nocX zjoj9rP3u~*csK30qB~X5F-Kb{p1+iNc68U}LI=oTxE^w}I~(aJ+owVeR~p`eaqN=N zCH1(5>;uYO&-;2p3x!L#(89FP3qnb}*%v~hLQNs`DD)50^HFg{UOjs9ZnQ%3tZQAb z;5wWzAS>z@=SkEF=i{}RsJZqCi(XnB6Cc;$iKA}~1_k%n)GZDTz$mA--$bZaIuatQ zI9wtX>OIkrlsX7q>%+)8#(3%(lr8n%oo#IX+m9!qMn+Y;*!{z5vH2x-X6mz5LK#B& zKW~Jh1ALg|ey+8DUIiO|NiRvNW?iJubI0AxI2s-vxEA3xehW7X@KIx=r5I{TQqHky z*q^f^t0p%gIcG{m8SN!kO0J|c_B;sm`!yWy7b7Axel?2w0#P=e^ONn2;@%x-SX|&Z z!&F|vmQ@est9TR!S$lhTt|LR~333w5$vmsV^uojJTySVxn1 z3q2-J9MYU{qnFX^!a^tKrUPiEp%@m@Yq4$-a0>i`w((daa}KG7y;XaCePz8Ps>Yhq zMBUm1A59y4YtNsPk>U{TkS93T z@(Vv?oN&Yl%UNr-!w4D($im90N&HTgKA24RUXFN7;)lk+eIVXKjQ08XjLt{up*)Js zEc#U9`xUcQZ~$A$J#vCqS3Qa2&I8lN z++$gaafl>q=#EXPKaAdwUX1qApfEGJ~4?xl(Qq}!tA7R+d6Gds7-NBLSt)zKNIt{ncNO6G*2VK}D&d_PAxkMFOHVr@OVIQ^w* z!A!py01nrSM&a^+{qKpyO85GjfkrpW;3UPbX{UHb+b>sK8nS)91Q-y^`tZY>yzUCk zPh8LF&q-^tKAmTW!%8T>*M?`y6$KGU!l@h|R-A0KfOhAoNBFM_hps>VRw@MN zkCjkVJ@rW7erwEu622C{B-FxpVS><+xi^>;iDMlcNDf~NV(Cd)VI1|Pf0w?FZoY4@w zgIH{cT|LF&=M_y^QYUcioHkhc!wTCLR0d&X^7NE1C`(BmKP<{|o=Ep|c|H+k&qtTX zgu@DtzoXmu$A-8sg-}O54zP|00~%UC@3(Tz7SB4qeMG~C@Nj&ZIK$QGn#*i$wx7Y% zO8%_WQ;V!WTzsCzWRSNlm8F2E;cPKH-i1=;Cm_EHxT9L`bn)zG3D?7QRi6fJ>&XyI zcPVc~b_urOIE9?dmzwkDo^o_r%DeVWRoM1$Ux)Zg-9+nHDgDNl)FIs8%&+rI`1%`x zfr~;hMeea(rb^M-2G>H4g(*a`pOowIi2P@J%8J>2d-v)J?v1J^svoSXKQ_;YlcWaJ zP?JT3z`gE~ec4J)N*NWy8Dj7GA@v~pc{GI(97hs+DH|0|x?3-du0K@p_C1<`$sBIgQqoD(@~_6Moz_;?k*(y|tUog^W6vosyEp5#}14t;&N z>6Ss|bG+={7(t9t;glvzioEw6dqrf+nfyQCx#`mpSGyunMzNj+Fed}mJt5H~$eVVL zbe!z$*h6h6Horg*czCo^@&O77hAej1F_bbZq-4hE(Dg9a{_B1RS&*ve3S4Xq>bi^0 z%swAkYvkGJTJ0&E?8NX%*=52c_!(Z-$6l*<%p{8#Czu0*=4C3`Gu1yAEP{-EcF}Y7P{meLv{KFpyN{xR9tIeE%*oTF1j$PP(F(ks%fCDM;vcKIP*DJ+}!E~(u8mTA8)MJ=SJSShKXNQd!gIncEg z8sZh;w#ddrX|TuSW+j0cEX%QmW|KZ;j^q=R)*Q8UpqtMi$x9AWkqm zx42Q)hFzF9Fd+ww?l;IaGomB*Q(q7zk1YxK6l>O;}A! z!s`W$Tu$Upw5O<4yrCX}F7Fze%A*1~5d9fsrs@N*$=Yc4NKNmV88D-9$rw6gR23#dOo*RA}8i~qkPu<-L7WEGc@9H`Yl)6fv4HJsBM^?2wRh; z3kYuS67ZmIGW;2;@C($;8dQ5(^)A?*PMws*a=n{{jvEQ)IM*I4a+^Xh5ST9t z-f#aEF)6-|gTR!Bf~iT{Pws#mZSPL>Ebvt9Y14LjPxhKEm*mxl0d#N_>YhLKqEpm3 z8zmkj;Y^ZS8bvau{4LE@#l3}ShybA;L;(Bwk2kcYbm?!|`;YXz!I8ICAgcu--17A- zcU=g*S*MN;f{|nR$eHYg)e%IbM2m4{(2qJ5*ZlH%jrv2nsygGI8Y5`bccVU$0%0q0 z07b@ANh@-9{@0&<+uW!jKh!C=rYI;D6ZD;*3i&|28$Wx53W1X9Rx8^U_$iD;=C0Cb zt)fj}?{z%R(y#O}A*m+{!6wNWW#W?RPt;ILcW@fsU#NK_R*l!ec%CN`&TojUG>$M8_r@M zKW8$FG4T-U09B%y6|?9f%I=)=PjVu_kJ;bJiG-ex!{~l7&!JG%v_%tXuIs zc)tqmjOjQM)o`fR>_y3W1!|AF+9*+mCA08CPwzXWjS7q)*DEiKf8_g`ka`im)gl;s zIk#!1Klel(qlvdxFnv2@?qT{933F98bu4wuilS zbn02`)E{Dh#UrTcz@{pLfomR#t4WGmhGmB5a~0-osKLq(H-QiHfZH5XlbgCw_kbLCcyErj2X;jj3j&^|cg& z$BSq0#l9yCc_WhV2E)**4?dRlv+p{zX%Lj8b1acjJ0f7CC zXqxYoV>}EU?C=c*$2w4#IwQkPXi*r%6g;GEG_?(R#G9KdG46f>h!y}nm+%#ze=zaB zo#HDmg%HOGOq$Y2Krt;C!vh1g_2sIG|3$6L$=sf1UP6Gikz;L#tE}&8Z^Z_v{B={PLK(=OP}eRj|~Nd|~L5vw$Cv4rLL5->lD1cV`5Gv@{DtvgC@1HhEPDgdr1~Bp6(B zR4P(p**@5$MSp>?k--9HC$re>zl=Ktao-lE0k9Z-2@VznCtIX21f3;2Pg&=z9|DCR zLe|u3PWLpM}2O4OT#{@p&n@d&XD&7^cywEL;!%0 zxyz_C>|6fxKd;B27fub250<)uwT;|pm>Z6Nn0(4j+-$^E36;)Nx*PEMD-h9KG=hpF-%4nSe0eZ14{=JlS90Q7~WS3+I{ zc8E5@n2HItbj=f9f(;!|Wi433gFL4CcXHA1pT9(b#YXYhXA1yAgpwl@iAH`o(Y08s z3dN`f^ec8iH>tG(5FvD+Fc4-ugMAb)nnYrB-BVza?zU>J=LVDZVFw3>DA%wCsH25m zeLJW3KSVGBYQpO;-_QD&4Hq|FY&Iu72U0p;kBhl8$!>%-qce6Z5fVzn$HU+(=zL;k%d2QgvR+@?un&XOn_}0CHvrAXbYkU!<$I0@ zGT|=;Xg(vVvWO&d89-dHn6U?S-StE=Adfd#n z7gPC4cVRRxu`9NC6!EBvEf{|8<)ApS(=+}8&b2JNO=^nhn+w;`DjtrdgEKm?wPEh? z*(XiWch?FgX=XKyP_MLLj}+XzW@z9V#X17wZ^lzlFSO@MQ= zn{sgHFl%k&J`AQVRI|iR0vN=&kcTiPV;j&J8rMbCuuhQx>k#$QAVO9P0ECLHgNZOK zfU3b4KVsk7jUt4k$u3G)fnz&bX$^;e64uwL?&4g3T{q1Qy8sEiPH{!udSLz12clJ> z-eF>MkXwO@frK(vrAHed8U^;eMO2gLQ(m9Iw71y-458!a9$@OxlIA*B2Ruk|8q+n? z6*>8&j3{&732BiuVlo+}@d6Amhe7>msAI;Z0Y)GfpfdylO?xIH9^lQJCO5^^nKv;~ zB>C9B;5GmTTVPkUS=@KnD~h}2`o!ex#?mRLlMohm(U~4A^Na>pv-E^t5dc7=Rhc*R ze9g@5N~E_5XtoX=KC#C{x`5rH4Q8Xq78fjcy`J1re()c4jK9#Y1h6;0l)G7`6Mb@w zLF`jTY8ilZG$@G`2vc3&B!p@9>40&Jt4Lj-0K->`ZOsqhoF_pKLzO+L6a;rFs~MBC zeE?92+Cc&SI1&+Ghk`78P}oXpM@~vd%vhr99NaqjzW*T@e+>&jBN^-MfxPjB8cdrb z{>0q!Wvv&`dCN)X0@GUXLR+xxHL@Zzlj3g%(&-Z8)@URI=#(oP~9jL#|pBJdcZ2kp2E`J&g>dHZ(eLTm3GO=;Sz0bbCxkfaf)*dZV|WPE&y z5)BiBp$UIrungpJ-3#gC1L3(qUQ*@^NC)CWfpF(phXWbq4S=F4B-#TK&=Z5~Tp+zU0^!2Is`- zCU7`nGCr5fJ%9Io8|ZUJn2F-#i|+V_u7}rpfTg8uaNlchz#zdWX@Xv4c65>Zc+N*x zf^E0}@~e5|@9r3gg=U2k@41Ij%e=reh6Yns(*Xm2xF8iuOs9a9a1FdO`7Vf5_Cen< zYy+Llp=9ozr$M_21eLjZuz4zEn(BRi6)7QC)DkE<#s>_rbaCAo_nsoSnNV(!Dg8)k zLXvHe2pNFkyK0+4x8I^iexj$`&ymgYi+urRCo9jowOx-l(#R%ZQ5`(>Fa{^scI2Y( zfuY=B!eeCygs7ep+LD#0p{_TCN{~Ju3)SQ}*FgYc%uU3{v|&vUPDriE)}t)lXZ^CCy%2z_6f)MsiEqq^B@>r_s7S|{ z31(sDLLg!2fH$*7NJFw^^+?qLrp#>u7_9Oo&o}LjmQ7-@Xul_Q2eBV=i)Yb%Yze4%32iH>f!*>=!!fEd>ZXDYl!+b!7p_S9@^l(nDX> zEufxGsy6iU+fUPQmR2ye#4=pO3uFtv5nSZ;udn8U2_s_h_kzhfCsy(c^8jXh#+qlp z@afOv&&zzOx6rO2AYRAgeqm`X$leHiXO3h(!EwC8PYfy#cVeOS&{aS6qFjI!8?kWA zA2)Keqq@LF^q(c&kaB~0_F#Z#Wx>8?`HVPi%1uFn<*q-)9@k%xJ~wLxN1QuL3#J8H z17j&C`PakN=B?{5dpWuu&U8S5v|$j~YT^g4#Z?w38_|`mkQ;~+<&^NkOo#mF2VmLZ zvBbQlZqrjlRAlpo&6u{e3qdBa()giY7rjJ5i6$~q2h9Mef}(xV)q~i>YT{T}Fmz9F z8$t&9T_PG<55#gwlzq5f4ql_fBj5uX2<<~`8Di#Em{QpiMUyW6kxX~C*Pme#AR$ZL z^f&OHyzLD({H6K;lt2&JVb32F)&wX8Sd<_Cke6);_y!@Q9n^uiy3D$*eav!Iz5*JC zk2d+G%rd*~Sm#9YqQL~=-}42ld&9&z{X&o)wl2S9e$P1+@{QaMP1#A}hJtE|CCYg? zN{S7MN^0IAYPMCV^$dXP@Vm@v&sS5;9x$u{pe8|%?pX}KmW9x$*D|WBEzr96OUDWD zlGzggvQkUu!MT&@ylis0r0U3{Dogd7R_F!bx#V9^s`^^`BYP6j@uk2o1gNO?u9;%^!BfLGz)btp`N8i(r+D3 z4N%WTy`gw=w5c$1g>mJ#$o^rLAfQ9+S-k!U^;OBMsWnuNL=0BeYknkoEHlt1PcO0mHW zN3-kLf>ok!DowU1QUut{6`TyR=_MZzC&M&gD`snu>|Jj=S;5LTz=aM?v3tIKvTl;6 z9YxId0faF7=yXtTT-8wSaw{U3nJ$YXENuB4z5H@qw^2S*8Ev3e50s(vzJ| z{Ql{#i=qBfKGW8zcGaO-Zri88cbQjwzwg!6O6SlRGN&8iz~2$_bfcT8q;irWPpY$- zzS)T~OLww1_Nsc~su#e&nk?dy+0l~Bb`E>kj3>B!{URFH_qaX;-$g=}pG|t!LzIRy z2`Vora%W+}A&WNh+QjIVl4Ck}`U$0`daYCnHJ`Fy0|o|NfmeO`v#eC{p95S}gPOhh z_4@+0W^B}}M>1NLhNGxVRt;t>NwL1=AE(tvwWZGgF(<>2@N2L&roec1Sey_zMEZm` z;28%8pYB9^GAMa_^qmyz6}sSi5FogMYNdS{(_(5Bp!|Bwq22B-(Y~AuQ{wC_1J*HZ z7VTfZlhQ!qAj;$!O*$SdBo}Ph89N+2It%D(b~U{lUiyHfl+fcAHTfqFha0{c>VB)990{-E?{+)^y*{Dy5C{nrCD$x z%<-Eg6%hs@m_w#XB1iRd5V765Hdad4Q}uAXgEI!vN5t-=OIqwtk5zCNN?Gu%T>(4l z7I4-v9wjcue6X)GPOU#y$N=v$sM(E!-^bbY3*K90V_BDZr^EdG@>5OX;j0r6cJ&ez z$=mC%*v}1T+BQO^C6w>|Gdza{>3*|RU_n$vQO-zADd_;TF;E_-@s(2rBRSbHOnFc%Nhtd`SZwv*&znS9?U)Zjsn_$T0S&q~gwF4#piCW_?Rt&ZTc*$M3$P}{`Q z_xFTP!VhUyedzLJiUYBR+vS6c{a^#{(ZR6>?Z4^M5Wh#jcbDMm5ZLWDt$7I@S!BG4 zZWPnOuOHre;_9??qHKHNj#IV>qiTv7oB^F~@4;f%tfMZ9x{z~Nri6d1BIXe+FriUGVfWXrfGD5s%?mOb?5~!;eL%q|v<-y|4p*DJ#Gx}J zj`mYg(tL9&QBZ+<2!pz_)-Uo&=<(3^}! z(9^zjTHeq$rYGh}f3I)NQUMXH|9NKi`uN3O@lqSa-0)Mj-_!Ey5|`vXR+*1EGRBQ3 zWT0B}pJEaqjldlZT>EO-&fE81^w0LL=e7@7DhDOLQ-;L)0FF6rlRk%})dSPZBU$%( zKV(y7Ul>dn67)@K87v%xwgEO{0TiuUOP4VUckVB=#b0@btUfb|x7=UmTetzq76l0| zlgDM19r*eaw!0&z2H$3^o(4ugRjqy}rs`dI_My^zdcbdiipCMQ)ty6;d3Oy8lq4yGRDF0j<3dc^ly}X!~oD z4aD@|C8U+t)17INa)gfuCEEb?y$w_>5;Tk!2}y-f^vc)dO5>kfL5w6u?ngecUULRP zQ`f$X>$=C#^w>BTO<`Q5>OM?rQt0{#Ogj4g;)gH5Gk2Y5`#%y6DPezYv63d<@TX8CKo0U732D-TeEf5(82g$M)1$_~h=O?m;PPLTWl(gvmH|mO!L~Dk7^r}0&(P;QjjYNf^9O` z#Mffbwx9FS$wuJN>aFdL_%YFoE*a>V+WKc7gK1sxy+h}N1sJFuCSA@TDGt8VwC7P@ zykaqMLHzS2p&LDHO;?$RG{kup6Ht)TQGdQMpAqrkY3GAZN|>NIlKtR3o9J1U<>XW6 zsUrVdm$Eo$*DW(mYlF04rOCNP)n!(SZ@-1Y;B zokEbS{B8nMvUpgpRL;)hxLwv21k=6_S~g*SDk=h(>b;1Y@%Yy_G;y#(V(S{@f^F90 zyrG?>g#%@sRIS{lC_&bw%qV(gg|m7NLM%W^roYgi4EYwLWc0IxJw^%UbW2 zdXtW6zuQZ_a@0zLNvf#Hvm_L5QA0F|UWH2va6pnmF;I6gDE#85JpRi0$`}I zprEh?uw)8_zci)njQdW}1XEXSPLb6nWBi4uZNO0oQlw7^sgRr=@VzjIvlO)ac)Xr2 zv$)cUj_Z%qDrin8Ldy1%d6rNBq_xZP?h|<9^F{5t>9URIk?17)^2F_xeU#OUfX2LTnDH+b{wU z52PRFg<~cZoD$vKY|kM7yx(!%b$|^~73;YBCY%1Tx_B-j;@51`j^eT$Q}NOcXbl~a zq6UJ#Y52z>oshCFBPvtbPxv1`-^YVUGw(O;H5VR{jPLz=ak|S@GQKpkv(w+-Xqh2< z-M4LDo+fu=+MPHqQg@GZ>7Dg(-ejP@lHd+^js}mODxDq&KZ=SmOD}(I~eZgJtNczxN{eU z!uLOaw5#}L`d|na1t@A!j3Pp1BZf zu)-BA7h~s{W8CD;Te@Sh`86~YZTAZu!u{>>fB}H3ZsRps{uV1E`YCKt3hW`}l7>7! zX_(G?62oe>TfK7h*@>i$IR&L)VtO!3(vHj%5qj-+|K_LM^#{M$Wx|vu<;J~}V_$qF z=XeLF`t;pzRaW~lll&mlMU%S9Fsg?r*Yja(Wcx-#dOYWQ#cn8kRYxw|__N8~nXhav zNwi~MJf3jKE0*VKl-&8`EwQf860=A~t$xQX&1no3tmq0x%#<#2GD*xf@XJ)A1g{-n z`M5U*mK!$AdwfKJV`lp94_WK1ruP?4Cww*n&)Kkjyx^{p?;^q>K*a9+_YVq@y9n1y z!`dcQ8WkCM%UY_>aS_XVbl-ZPgYcFQxJb>ZC+XeXnHp>`Cp?w;x+{uwXBZC6=<8ds zU)1-_MH+5iR#xbF2B%fLsYUHCeLw@RfS z=oIT;_4ost-Qt}QV)=FGOyg@DBITrRU8uWZYU2TZ387Q^;Gz0GnE-?cHy4KKYqw)xeBz+65UV#r<-WvI~6=QDxcq4BP+w440&@g0#6J}kLX|F1k<3OK$ z`&n_s|9{p2`zb~jxE_lPJtzz`%#(syticZ4hZ&3d%N-gV6)nu%Uk>KL;CJnw8S@O^ zfY`5UXBvoLbT^V&KQsAUZ-M}o+SQ z7nHE8X*?nQo5O>-K1wvmF&1_W+UH&1gG!T%#*Ckh`QW{Nj>sx*T@Mns%EJW!Xb&bO z!(XY76cYOW&d5hBV}@Xr#U2+#ha!?q~JsDx~p zGM>P|t0$4A4+Jw7(M=n9soUi3ZbVMBoM)mP+A0SR@=?DP=DE`3EjeAgg2- ztW;nXgd*JCEcvnGa@>d@(af&iJhl^qR&O$l z@a|l%=R{|ZNloE8_-PgCA+n56^?*Prx9|SY7pJYK9@TtbZn$+aGx8KuEv6fu%^ANQ zl3oSfz#%8u2zCn+3ym1V8pZtA`8c2~29;K^bzqaP&wZ=Na&;hs{09AVE7M>+A&9lh z-GKH=??jnd!9H5u!+&&1raJ=4g|@PB;+7mxS~3AAS6|Rsu1`fhy)8Fa@0#dtD;9z3$XX@Sk_NbC>6#*kDu8qf~im*knO3=lkGB`oMwx z-@)M}A)ZXiGF#m1M z0Tf_}LsUND{C8IR-~9CZCqFsR10`>T)@~c~x9RJD7g8X=-MyjARHON$h5ro^5^yN? zjTzkf!~Z#Y{~JMo&O^Bi#-doR+&{kN{~iKkVlCNCCzsBm{ z{;DAdd`#}L0NelFSW3`+Sa1o9*#FPBg*EBp+dFD1ydksekmt-@j_68t67AngO@}yRj(Z;Eho}YJ~pt z@qcTVST5)`ZMpQL|GTk}H{gx`e>D^V)WXUia(Uj@7XmvyN`Lj)Qw4a)-@D^?Kl#z4 z+m2Ngz~ZRBE?nBAKy(6`x=)puw|3+A7=?XF6lX?^`j3A|ktWs_gsBMX+Z2PNNWt01 zlJ_t8s;PhXR5K^|TtCtN_*f8hKh67Bdn2LqKFsdnGPhl*0h`EZP6YV=FtTNuQlM|c zj0vSPV)*M;F?!(Q=LFS%biSArqV2x1wGRqZwldfXA9gRXTrgK?e?jIK{qxQ&^KWk4 zbN{j+F+;Eumb0GT8?{h1 ztlNk4!2C*((K6A$d+wu&9}ur|v)*rVSR&Gz|MA-x&lG)|Z|w2wCI4-`eo1H(^FF36 zbDAj7u1aKU-rKt~fLD8`7XRhnHy6t^y^98QQ9!sME@|z(33IwjCj+}dV3f$$IGXqRn9x~ZCH6S!F#wywN?d+}4)#lsF1e_s6LX>NiRC#fLz zRkV7>kQS`1eyyP41go9D-{LRJy&V`3Nw_yR9NXL}5ik>qsR19Q{yYD_KRF!2B6H@v zPkvOI0`3oJdmXX$?X)L`1RC++R81=Tin?Ek(R@{RVX|4k^OPb%bUK&IpL z_H{VG6h)rXpZ)3YFL(QfD(>J7bNO;*ZabnI4YZ+9)Kd1}ujq$@LVs2QZpwKidbdFw z_qJhScwl@LQgr?0ME*C*z$9H*t0p;28T%`E-beE_9f2SC-BZKW*>s1j4t-xzv8~Jw|C?ITbh&O_?7SNJv zMr)tNQO8Qy(ErE+uCD^g5!gfi+3NcYgJJ?Vf-Y2N#viDIe|HBR=nf(M=eOSL@7}_X z5zlHc(+?9Gf7B!=nA8JvL#C|Y;#JbeM}K&%Oiq|P+#Uh;%hWcYd;5D%;F$+}+qD1P z3NU79f$=84KD!4qi2plU$YewSli1s&^QNii%xC&|q}|f67t(}vGTNh9`M)>!*0lV( z(MGiY8lKg}{I{d||C%Cw#ZVD8$4i>H&p-hm&BsW_+o1uWHvkjgf-}kgBQP1kE^YD2 zy{0@f?h3@ZeW?}LKr*uL5Bi5~+_sTbY`>`+bQ^4*F{K!!K(hM~`uC2Afv8Y?(yg$t z0P2bCkwiS46_x|Mj;p;KKmpgW@8Vmw+tKQn+3wG}RRF%>Ia(XwlTT#+q>IZ622nO; zX47QaBPU+m&S&Cml1|%O0s7LiumaLf0Mc&rb^%)*$*|(&es5JMWUS%-icOv!ibjE0!^Meh{|)LA z3RK&M+|dwpn3x8bfn}>KtqA_IW{pTOP^cU6=6zivkYweWsB+nxmGOrIMVCIk@K{1m z=a7vLX*ur_zo^X;0QF+MAfISs1eRuTRRuQV#j8Mjvd8s?{&77h=t_cKU#0@_?Pd&e zk@^pI^phRT1&^E1(R!K0d_XxXi?&yT`^lTFYk=E*cWFfYy6@GKdly?@!PZ6hJ#VY0 z&DXPFk*Q+bksaW~*0H!~{qU|PV`h^Lsi4j3jVl-5fdpExqs?WR-4iu9)q@2_i#8JrG*B8B;T;k`B> zZQ6HpJ$I7~f}zQJP(p=RV*|AHuq=)W;y?~>P9cfCMQ_7}xtJ!xgW>;U?k(e@TEF*U zY5)fjgHRN~A|;d(5ExVxP^3Esiw-4~8sd0V6h#4%9_f&f?g5+bPL)PNx@P8C8;^Rv z$KU_`^Tv+{IeYKB*1h7ou9eA}x8K)#Pj(ggogaaQoStD(d=QQ(R|#yb_+H7p?OVr; zk|s@k2;hPzkiv3~X*aa#wl3&*HInf+n$=l{G;M3BC-8O*o><(Q-s z?t1TyX*TU8KnO=bh38Wl6ixV?Isx50ItoGh4q6I8`W}t<^+md=D{yGsj_29Kyx6E6 zG77brn4HoChA(d8O@~sx0Pb57YPVZoHp$~LP{ZntouciTusMb?7 z)^JDQ0$ompApxGYKAh{jGCr-pF1=J5<~i*K*ZS)uXvMz!G-Wnq<=LsosqfM(dZBZ< z^)diIhx`US7gYCmk>J^WcGV7fJKjN$_d00SeW5ky#zmQ(C(J4-7NFgE52epAF@DoP z?cH^c97P+xqBt8hoxA}`WCPrfK)k4sWuB)C@~wM(G;AcNw@&Q+SWNk#_S*(A$xXv&bdd_%@` zP|~{XQ$t9nI6q4ut#QafZY}k0lfqKD_V_86PP|RK_?T8f*HwnE!ZV)GcxPDWUJnrp z)GtiS-Md?7pckEM)o0tsB_yy*ov#4MEg}1OJ0N7IUa78ZtUxhE;vt)`L_~d>3msyW zW{ZJAS~o!L*g~8NkdqPMNdBthEL2D7iXWeF zIslqhcYoJ1;o_1h!~26bD=EzZHL-*0b&uK-Z$@4AmAAG*UY~*vxhh;m)rcGQ3)g7j zpuu8~s$nlz?>a5Qhwyie1$5SEq*&EX)TOpeFzsEho?iWs&=co;LM1dLbzDEO9Wmxp z#_=-erM~3TPXpPYppVO^)V?sw6d}}X#IFEmsqoI12Rw@sV>!^~3AMo=OCElAref_m zbhT@{6f4(%De6?SWzczIXr}YGuI8wlZ??G(olEj=#^0ONtVwmIoRVGnW&@>MUXK^7 z(>g&LaSw&kT6x&dy^waX7TNo`;iZjBXSxlsHI{Up&_eN7ZFiQ>TBR@bw5x16*WmE{ zJ^9T;T?q?O&|i5u_puW-m%H`hes?`YZ6%+m z*MlPyRSWI=9~lL|<(M^HI`;uVrsZ)fIL0!A4rMPc(+&;<<^~O1D<7hBk+YG17<>p_ z_lT)~vKYQyXy@4hS<8(&1JU|7cNzQ}Ns)mPJ)gE8Jl)mMw_I4df);VUoWR-?L5U7P zv|DOXT>Ta33Vxqbu-~Ps%Q}=7+vE53BXuESYVO&6a+a7g4?EX|s*`oNmH~FYmEJ4{ zdsiQ)`0$zAYlQ*GY4%xGbvrBinlYMg&bSdof!CmXId`bFJ=Ct0stJ9dG_KO?(2bf9 zzVq_o0p^7^O$kt%QZRfuOboq%WuUFSGv4&lmDY1!j%1OK^a4-Wv)H*ZnFXu)ZczN> z?9yLvi^pPYs8=S~69)x|LY6zvQBzbWS~SMEW>?7u25pvayrQHedfNli2nzd|${VF) z%Z-cd=gBvuIR_8l~F!|5z(b0t!fC$;aS|dgTpGsrolK<+eMdhIhtye}IKHnE(9JyJKl0iv5MUhBTL~BG8@m<85>+alxt* zCx9z~&xgXhtnK7M+2|l+@N>R#jc0uoPX`6k;Y70~(1NpvM#5YhG=8*nVgkmZ1tx&m zHnQbs6D;aWyvSC%2KULy_zO=8W_9%B6uriivzwuTZVlY@4i-hF>#<(T2BMC`N*=@# zSH^Ee$$Z0i04LIeuekW}>*ED1a^mP#9+^+t48oNgsfYDyAFu!#SI*^m&8KU?LgU;p z&HbFqB50iNsj&SM_Jcbjol zir;A|X=;SuhuD6G0qxxQvY{FSgtOO+DKC{n4RLzT zDXhG1m^$(Cz#banp!<}d)7L=B9dKX&XdgliuTUcg>d z0nuPgCAN+4sgZ$Vs&OV%2c)N%ceCUPvRU-%X3D zp~$PHoIBb1nb%%L(XLQ#Wkr8^cgMF(XKaY~8|BG;04b0;?|+1SsJZeU)iOdwYn4#; zwL>+w^E4Q$J+jQ8Zcoiqbmy;RSu&E-?HOFk*Ay+Dhjs4t==Ew?(pe!etk)X)qg8f| z1k5BxI#hIzj_Iu*XJNE2gM+Owc_fj}(O(=m8wJJi-~O|Cgvq8>HbXZB!Ugv)=M||n zEAGgxXzCL!_J5FvV-iPYIOlfe%ixv4;p`78iu-G-w?d(-hahlb?LT+qpTG%kNE0G) zKKH&BOZ{r=n;a5VioHB)C*n>e8Rr#%pX$o`db!;@LN>~t!v|^+-Mlxu3LUWf7uzlk z18JwUwd?fCdIJlpgor2fW)2n3e@8e83YWcCO!TuNWW7KzqkA&cQE9@$nCsM96uL6q zONi`2CK0DGKD8MGnI@$}J5i~vg3`x@1%uLg7@3(nz7W4$;%-dZYHn=) z3)>Q6-|HHWS-%am_qs1G(dE{3>v7Ba^~-vBLFsPFJk?v1z=-# zPEj-=ccij2n29}4JzLk;8)e^r++Ch4!+qpA|C|7BBt!UuRm*T;_ne`2m#l$$jgyUT z1u!L3NC>L#+LF8Gxs}+M2z4|s> zHruxx*H<2IR?%|VN+d-&n452wlOTr+Qh9ijP4mq+!kM18C%|uT_r3E#K@a^S(n*A5 zJr=Dv`73B}WLD2^j}6QU9~OEst@o1oG}4(cNN3gt;}$?%2Z|J#m=t{2dFu%mq|88t{BC5$RE2rz;feMCHB*)*5Do9Ebi`S zd~qy{F$n@_TY*&={eg|lA~Bg6r!+lA%cD%z3gHJUmCF9O?zi5ldRZ|V zlTQ#dzLI^E zO$-yhEIcYBKR1}Jm&IkLk|a%3*fpr^Po;u#2D(-6fob;|1vQ273;v=iBXQ!pkgDS= z(PGhP`PkVI!;dV9Pn)-U;DSDR>DAfPKA)nwSG(}#q3{m5WRG~?U3!~xH+%c07AIQ! zdp{9M#E-YBOZpR-13PA)%t#S0Pw1FVIb>H@HJ2$yPxiOcx=4)99jYVMeqQ_r?&6P< zZCDyu248k0pAgT{c;n|K>r5k-ZYp}up=5%4|8?CtQ(q9+I1M!*+bh&<6Nc#&Nd#`6 zi(jAFleK$UcDniE2LSsSs^?$YgBek+%(hLByGTx!f^b?JCYYTB;8B|rt` zGcF4(J;nBLvl^ZTY~-{wg(1_SSI!aQnUN+tR>xt~LzRZjGJVjCu%3I#b zqawGT?YRC>YulYzd*5Mc8O34$*f(h}%*tpVk$|e(6HcrSOQJl%m;&j@>dZrU#ix)2 zm>lk(Uq4_@9%XKiXpM31W)q^TkjsSJK)0);VS@IZc_+T^`G|C4 zYKP2C7XkVi^Avdi%Svz(HZou6p)AECvGOXu2^d^writpxO#gITW#$b0-?f%re>wY$ z*b~lRSx(_r`rYb*2xybuM3sj`*bABVz?V7+ZIdi+_CYsii}?J#+MA^I0{96}SJv}^ zaZ~Hoq`>m)^3!T^wCFdnIMSLz6PEgnVc1y))YP_vMEiB>?S==6y3UYU1U`MIKX(y; z4|(hU9Y_kSoqNv6vD3QrWG3A|S%aPTQ+=rJpS`z((Q!$DD|6qN(D*d=x!-%yJ-H|o zB;M@r^)qGZay=GSi7UXT(jGIeUZ(N&BZZq|H>8G8OxH=9#_aDznLvHTeeV# zSMYxnBXsiyU?rR#&faN{Yshq->JX&oFSRQ!#~I!ktfC;p%qP{h+*550Ndj&+@HRbI2P z?Ls?l-W!}ToFN=2?qc*+V%WL(#CfIZ6yhNBBjrj(NylCPWck$XsL%)2b6*1`$o8?! zOi@VK1l_w*=ZR;IoVa+ZZ)_&cPo+jev}{FMIo0PZm~A>d9DDW?4wmc9iE4dkV_&9@ zO?`ZevR!-7Dw1=t&35&YKTYQ2J(o{a2@R#Y?AcsLu+C0^g^sKhDn0(ykaE5V99&}; zb8zN!A5zloSJ^zzfG%B>aX<3~5^vR0+YMLDU;{8cZr(%fjXpT2?)%l&D=q*yqipdm zJ0D9=@&f*-5kGT^@SU#6S@?#4toxz}BxzDO=)upA{x(A@&r-yP>hft>^}0^@_%2s3 zFJ3HhA164Uw7tM(ionL&v0Dk~`|03y7A+r`{wJsGo5rTzw0zCsH36WLA1qmKJ0eI4 zKUG6B?c89|P&qGGakMMHPbxk$wiCPd$%H~n;Y!AdpiJH|5~R99W^78m2FmVKt)L?W z#MRT{FeyDTsb!$(cV0AICxjO61k-R=$^8yxuk8rtl{WP>8Hz(HaO>Fm7~fee?6DRo zlp0BwcQP7NYxq9fli;5#+a5GeGjN_Uqz_nrRjjgC@RYH}=J(SH4Kn01vvO4a6l7tx zPqswzX(e@uA6Ya_wHD)gmI|EMPn+Oh7E+RdOZ^cjQb${73fE`|Nbhf5uM0HL zA7Z`NC(iqR(7pi~OYwUg%SPENz$bpCMbKR6yE7JD^^)$uO&^mviLUH#fmC4;fiW|u z?a6>K=@zG7G)VQ|A-8I_qBEBkzJWBcty!RvIW|xiAzq6x%&WLj?_`|EcX}=l@}io- zF7LC|EZI-C`upjT3$%k1>?Rc}X7MW@ID_U|M~h50Der0~aJfU~;*mM2v<2MN$cqC| zQ1wdITeG_I1z4FM@+>Qyn=TMfUH`PxW^^`PfdOZxKrO$nytMe&y8hoYsXlNK3~T~y zcp<%ZqnvY9uVLeiMa7ucmjvq3hUiG8u|@T49W`9SevzrpENphiTgYnLIjf~xem2!x zJclV+;)@WsF&8Rd)cl&v+%jvGRuyHtqM5kSXd;{i_s@&Ux^vRyjryMoMNU87Dyx1n z!h5%Js|wX`q|hI1D*5DDu&eB71k|T@?QA0ZSVX>O^EH2-vCXiuRc)JQ!5*i>Pyzvl zfDBEpKMUSXVcl5hqErtHS6iJk3yMvgK}L##*liKp-93#S;;&5U-b9o5Z>QU-P{!}; z1@5-3h=2(tf}StEJcJ!v1u+*p4z$?RZ9$1CzX)_2e{>M*0p09wVMFaz$dE2ehI4wa zR`Kqwfbv<-G`h{Epn_Id%)=9AxPY1-NzX_7W+-a(mrx=&ND+ZvowO%=g=GGAc5H?zPJXVMB*$udhrG;_(ou4#om=;nl- z*jZy6;S{yamYf^4L(1C|8=IG`d=!ZZQ+x@Qy2X9kg#o6}X?7x*ee$ALhZe(p>npFs zm8TB`5IKwIk<+sktrRNhx*3l;{F}-@v!3X{1aAjey?ANS>ki4A43}(5Ss%wp*hQp& zr~vVjs+n{q=!+)?gS-+w8!SKW?U;Js`C#{C^>$*>`}Cz*_QEGK5T?8H7qj<%ETIhr zTyw@{cHY9=8MVUEyR;vl^=2j;74+L1wI#PPINOl;JBb ze8TE$X#$z{Nq3@lLNO!1qweK^Uqzvy+{*jAj!e=>voPyqx2jPm;79MWnOY*)=l#sO zW<*5t(qjjz2~kuLX1nc7w>j;A%cZohchwEj^rv0_{?ObZ-43L??-TA6uRsndi{rMs zx|WO8GzD>0v1$>?B++X27T8VUbjHf55)EUAKlf6-!KwXN zl6o*P@fJf(QkvdlC>ap$fb@ly7l&%Yl#c-H$tAVOP6knx;)o4P>U-`3Z|>^&_pN-=sm}Bup1o z-71sO3#cA*b--VGM$X3f0}g%LAI60o2g zLt9O9Lpu#DIAp$hPDm1nT9luF=vGTQ+9*M?6TW8QZ9}`9{Gj_m#9G!MPLG+sqp}L% ziFOtm8@ai`7MoLhQSSjQc|#`HxC)1QXVQG!J$ft)M@)wdxZzujXoikv{c5Kx#{xi$ zdwFuiHcZrZe0nDw;gXSuQE>~NsTO23MhY_Lt*~OHrlO0%wMgOD!g;)A;(ni#`^{Wc9@Y} zz3GYO+i~e7`E4ro&unf@cwJBAWTZ{8^LED@Pil(jbtYGBYk#V?Kc`(-CQ(hjMm*7} zrC2QJ$OWA5sO?h1x@l+D;&zql1d@8RwI~ZwE0y1>wHPP$1XVvtzDg5 zcoG^NGuiYcrik}O^HNv|HYdNa{(fTtR*KN+B?sDLWoi?&7sbpp)1Eb!Yd;_fw7USb zs-1sM=cUOG$XUBsB}d|cTw7W6hX((s=&)>hs5#~;2CySc`(TK)C!k@Wgg z2xK|CGhbf$#~J>ae}rf708aeYq+kzX9<>o*&g2X>a|;OYuctvelMC>s+doB7A+ZCO z1*PNW*roT6|9G5F7BS0SGp9uj~&z7Lq*qB8*p5MY7Gp=N@kR z2QZu>?kl|KVFymmN;AVmqH_xZy7Lw!oOaD}Mcn&oxz8CO4LU{vu>8bvj}?@A5QpsL zHo#a1J`dQMg8*i(kGQ8k2Oj_YUw*l4`8+Uy<@G25#0&m%6!L$U%N33Om^l0w@SFt( zP-0u7=XwZ>oQJ=`Ih3Z>!2*h`mjACcf1EZ;deTb#|0UmE=T*S2PQryynh zBF{KyhTKc>t$$2~?0vH9mH%Lax7L(CXn+Sl#JKgu?GVF!#0Og*{^Ru_ShMP9$CeXl zh_w!l`+f|44gUJ89Q-8&m~rQ;m$#-)og21V*tUq)e?5HbAAB&*0BgTc=wP{T>)9koW-Lg4{RjB- zdl)-Y4#N)DX$v>r%H#R4aJ4f@ZUL44XN$DG@?ltSd;ul3@YBWS97Q*`_U`0vvky3z zB_6&1Fbd!cqyXCAxGSK4>rGdYNpUqf@Bin1(ucRdM?0S0`p6IjJW=ds`Ont<*Pug; zAyltcPTje6zOKSxj&vLn`xUAGT9r3@xW+kS1jV;j`P>Dc_g?X>Y95<|X+L5=d=7FY|DV7#KnE=fQ=d%UdcyrjUSMF*zxLOUCHiz6 zc3JCgDPr=|R(Vm@L1a79Ylp(?(yJ~c4^#j4S>m6lZQgVk zQI4b2$*rA1&k*!g{!R4R*8hAT7)MD;w#C?pPh;}YQKaIRueX3Qn0Be>KZ1|1y{M_y1dt`nigjX|SnT<#J;`jH)A> z*{#983{cYf?`j<8g4NT4)w2+3m8X(D^8?Qq#|=|$Eqlh}kI&%B!0cR%J>6aRLQZzt z+Y>=Eg5eZo2%1cO?h$MAU$`XKc`(M;+6=^4kT*jy2Vm?qjXz2_|9&sWW~ZoS)D26{Es7hF@bn zu8+A)7L&96ae%lC0Jf4tAN$8S=hCFw!*l7rwuj|*WK0xP8f-x4YV)|i{5#k9-!BTf z2iyE~cGuSE{&mKFPqS|dfLWioGd0WocjkY7CMC^0)Foo+SWExd*uU)CP8Zln8ncIb zeqE&h{SqI{L73Zx%By<+@At$ZV8cn-a~r>3{olWRIt#ONQ2*QSQ0s@O{`Dn3m^Q=$ zmfy4F{4q;EciivaprE>g;1sDJn*1@xzozJZ2}~Tc$ejM~VfytO7%nCl^BuF-e^`az zZ-f<^<6Jrz=k@zX|NYxAQYU0O?)nEf_16+{O~S;*51h?o`t_sVAMxXFzNg_DAM<^m z`QNAiUWIceFmeBX+$`$4zTzFmt+uVDg;tRCGia=>{_mp0ouG*jphAx>Z4Kz1DMo?( zI9j@UO8@nhw_R8!w*PJdfP3%(+(Tj`I(_R@Dea!~Cn;mU@A`8J?;|&Xz6)F6)(X&{ ztz=q9+CE%~@{|5^d-&#T6XWd-Keg+}-rB_skA-SzlogaQXpEGD|Bo;41D`Fnl*Qx6 zwG)nUY$5<2L&x>5@s2+>Xj>eGD9Pq^XE6f6prHIpN8DJMbpQgH?>`m7f3L4P3v$!` z;#bY@;u?>A!n*OtA;t6|K3>L0%Pp=EMgt7q!!KXW{y2vBeZdpG+N3PDHA#3eD(@yT z_`#C>rEbEi@7uO;PX9a4=ahgGwhPoWxBhKs4BTFGwyFF6nB%x(m5<1lOLQ4qcRjrV zOmQp#iIR*8?GHuWh2qv9``Hp9$FLJI0$US$1_BMv-N)Pi9NKlnA^sJ0yhpDpkfAK} zLa?AKu8UvlPxSiE`(-f0M+Ozew!9$xPO$2CQ*QlZQ2#Z%+WxRBtj85I5mYz^{}_fQ zeZce24`oLvwZ9ODr9PaC@)HTcb>Om^PKVX~s|xh%B)ZT*e0iKgqDBnK9*;66g1t3#U{;=z8-f~_4XPN0Wz`Wcq z(%xcC|G#&_7eOHU-Cp6$K<;5{O=Kg0;1yEANLQv4`qK^Bsf27DZGPJ=GkpIQEW;;P zM#evkvhRIl!yM*N5ZxMY4g+iiUuwDtMBidV;oA={;NLd@ecKD-PNUV+oC!^j+Ny#P?_b>XtT#|`sRasr3G{n&RQN-%{FxKg(~Qv4!S==OC7 zNRCVh-)H~5D2D|_uB|UB6+Zd!2q>B8PrOu^&)=3b+JO_*?YIqX0ezHUq(cbI7=EpS|9;#b)?lo1y&@Q_f$; zEwuGe@Y=!q>bWKM=e!>VmZiLAzrreCZhfyJ#CLijv45T>2x!5XM$3d=MBLafhvASko?f`r zTH&=(Bo^)Zhap1pA8`7vm;U`XvM%j{5Eqnegr)zvQERrrgB_Qn5G$EtKq)_t0}=K} z*Xf@2S17hWa|-k!hG1h8SrfP1Ghgt&iK6p!e^A08*hMHiaobY2HW-&4;;pK@{y$tG z^&;4)=)$i`TL=0y%_g4ANt&Z;1u*&P$L9Z+bLMM|++gX<8(TY$`8W*COnS#ZD9T@3 z1KvB&3R5s}{lM0O2R%V-A*8$N&hNJiY5e@{|Ki*RCBsLbuIl1I9>e8^e6-PP3p?~C z>>=~{;p%0JDnFD2N*6)20}awRlIuZwjF{CkXgB1yHdBg9(n;3^Sc8N)Xs?S4yvjaP z9}zP3Ce~v{Y5{>&gW6)B^K?%>!nr=dd>b(E^+mV7ew!VGIt=Gq0etn(?SWZB7+oZ} zQ8b)1-vEeHv*=}b0(~n1xmgG5&T3AefD_pqe1YIw7Yu zWHl*Gsm4Hu!lcXjC2EVzSU@Ol(IKP(OF8BN z)s3H)OWV}v06BD)+7S7Pb~Lh9AQqrJtK}kFhhpKM1AXdnq(!-}+Ty4?NrWScLy zg-7N~wbLRm5w11xw$IuD%t3Xq!mA#j0A_Ubi*65p`y`P6GT*$AE^mpXwRU>=4&%un zzqNP6<_pzMUR?~OO#S0>p!VCE4Ynua3%oCYN~3HpP+<4gM<52P9*Q2*Lg+FRs8dUD zrC0f-u+%Sdh?w~xu4` zK!IUu@eXs==SCbUcMW`}dh7HIq&V5aEAdo9Q5}PeM8sbf02$ijTT|svcOg4e?3Z;- zLAbK{y(`92Fa&i+I1I;l65sIe+AC(M9=&hSaM-YShwL|XErKa=By%21UJolq=m3;H zAUf6Y3a9}RpDOwqSo-Fm7~F-&C2{lKBtYe;*90o0&mB8K)RU)cpw!Jl6oe#(ay{Zt zBL&(d;@+^l!`D7O4)kooK_{J2K#SyNx=&sOlG4rLvh$a?!Uhu^B@ApF#=7>xy$^178$;-j3&$0NFJ^cxU+O5Hw>b9o&an z78M*yZ<7L=%}E6Rs=5&`#t!w3{Q3<>;lb}CA!;hr7x#uWh(C&OTr)v`EeoI2mX;@aX-VUex>z4DPQqQsWPabaC(%J) z36Hv>5PD~dE=+8DPp+}vYpR-KnauJ+f*~g1?gWcDf9lcC=^u9 z(CQf{w(hHo9J<$VfpPX`#(%!C69EH(Ve;G)r7ShLU&3~;XT$ROhZ~dId=bH+8F$3e zEHuaq-*D$52-V~D8aAQc^C{1^y9H1#^?7!^9fQY4JOsxlSR2|Ss2SPCL=0DT?vx>< z0*UVHdvf1CmU$7&O1#eeiVT=nfA>7~@0)82(-sI5BJbwx3PVNVEY#8+*a4+7XmiNG zt;}()G;X2PeUm6I(9gKja_IVd7Cq;C37$H#AV)3+#_c4iw`7=HyaF&`iD=1*sP@Jz zb>B;g-iQjMm(>aU{%j`7K1H*H2fUd_4$X~9nuSK7o;;?Mu z?O)T(KNMyMC44WlG%>+q)+K*Un(7e{U_A-kwj2fxoB9A-#fMdsF`BnvWNqtkj;^(y zUjhP|Ef9vf)2dg?X?xCtu2V0sldSBKw=BB)hrZqluZV)`qlOAU<8$ZnCb{o2?fpWh zqbK%yjSUvgxaXVLY1?}e?Wg!+Vq`tci5#jcr3vdTEZu3b9|HGVm^a4QHMW;5xtqgw zWb$Tf<%pbGM$jIlz@~g_jd^*}_b0^%YrKCi>>+dli4eL|O{snBXYEJeLUdn$Ca~?o z5CZw;*B#PWn_3Wbzlu@YHr~bI+I8q!(>k03R?kI>)-!eHnyv=MXyB30+UvZgcdYQ9d>9x*0nmCdW{ENaWQ{)`P>8SrXcobK(c z==?Tx;@XFfU?dt>t?u`&xVN7vf|f3SBPgZk47@qiRd zgZ-xIrKTKC&{?g0&6G2*at1859bUHzN3hJkwuSchO2GyQ=G*~rlASpEG#J_$gLLgC z(*ky%mW|=KN&@RkxzQKbo)OS;0!qiRp~4lbZ#DsrR%rE#SG}tc{^`g~ zcL=BTj|hfLhrz|(?(v?;&a;g%INa{da>v0-d!- zw7HJ~5+NhFw{7U~sjE`SAygoIL<{bwDNDvOBRa@Vy$s+d$NuZT5In4JjdZ(j{GniEqGSjJC- zeCi|((IsfI`Oy;ZbAWX9B6`-N2o|pfPv|)f1V0-WfL|q+{e`ql+Mzwpch6Y?$B{BC zq&oa@5X`Bjd-AK(Z$ALd@;y?j-{E|ej_C4YeHA>OJZcR45MKdk5Xqj;t5duiJv(_94JnfdN%c4-bO^p1rg7e;He--57>yD z_vVRq)|}Uq!gU18sq`j@U738MjbqRb-+u7+5aq@>jd?ER1|Fk=c-kFeU5RlvO~IG{ zNwOnuehmZo#`9hn2IN1wAVK`ZB&>M|P0uU-=%Q+Ts{n_Jej*?!M}gzQkfwL@yASma z5t9<<*9uKwPg^ay^ICSKnnX;ba}{6wxaYul`5GvSvD2Iu4>l70D_JepK2};C?=)M! zQZOMK5~r95Udkx67s#GP00A2j6P%zJ9;SLW!7Jq8IX-ReMQuN-I{t{nXd8{1a6kYZ zr|+2RS}P8S#QZ=Tf1`p(F|L^9|D0&&wbRKapiFE$?%8R2XDI#m@|^e<_5gS4Vq%#_3D;6A z)sp++p{XoFf#?UId2Ne)Q_T@PJB29LABZzp4G)-!)KZfbza8)rkmauGN@`rTdClit zv(Mz#_V1x_Uq4IfT5hhA70)AMY~~IvEAkYsp}Lu*hJ!BEE3ZYzT)zWkGQZgnH`A48 zF$bG4VLiz_H`|hI6|Lf&agz^dSn5p?{oQH2CV1Y{vU4E9*-OcdYHDX=Kli;ccJe^U z1?}XRie~XNTxgDp3!i+2hsrs%)$!DAiEA~5<8JCJ_XA>-)Ma~Za|6BH&-c18O;{c5 zFXUe@Z=PLel%7 zjf~|>vjxyXw~}PqFHzU9+pTy(oZ>UB4YPEocYjua&J=Se* ziqjn>hbGhE?&$9WuHbSyVA|^3Zx~6wZ4aJoCvPMvDE`h&f=nL7;%jnv6c|j5Elo&k z$=4i%TC_ADUJ2M9ZB#-xQyiwCKEh_9T5j>#h=o@cOozWgEBf|2r6s`}mBDhr#40iL zo4I_~%GZ1I^7J#DZs4Ky=1JN|vNGvDUUOFLOrR*FTHsH^THTw6 z*aLhFt`}JweN5{cW4h?WvfF}FQ-sBjm?};;+paoyri)947=B_IDd?msn+TwqVmF@m zC)r~e=>g%l>ASB`tw1kv-cfOiepLZi*k8Ur-O-9>|BEjvPkH-LQ5>@ppgU7HKK(8# zVD2I?WBQm@B-0CE zl3qx)4ADGlqH)tms`ab7Gl$TPN0}N{<}EBJCl?bm6_5+{S@{`jSid;)(BtZvk2V;c>5F=8Jce#nEV%EMg@R#vi2{y^K$NXA3+f#uSaX*O zxH}&0J?=b`f`vX_(}&hMnqDn9D|(HzNGJC4giX6oz7Iv@jpJXkx;8A9ri7@tUsbpP z3BTHs@z*V;#rcxlsu9=a4iF4sKpr3Dul0`qF{D8S8YX9sjh&V1j+=FQue7 zXJb-a#oegJEUf=N?@5tD-a-Ns0;KupzOE6#&?J`)L?J|?`}viU4>l{ccJr}FDAj#y zwbeH#ek_r(^87?qvu#$TXUNP;F}V5?xe&E~-GMkgBYvdZy;f`-lC#Wv1e_J*aT;{l->*<%EJ~Nn2W! zCsg(Kfx*?>IpT&``)5+!G%&Me+l5>T_*T;?3K>=f^0iBzO(^?2e?01-*OONL04H4h zMgHZNg0-v+!~7f@nN@#I(h6zzFUL6x0rr$;iVui?(5dK|waFZVl77IFcyig%)<7?7Y+oJ#ch zh#=1@R@|iK?q5gorx!9U&lj1?+l=D)9&(20xHFE3H0l}$uuYHAT?&3u8^V3`&Y%j; z^q$V}W8r$f0Aj*d%b9KUvWuA&@64Gr+j@^YYSisxr|D6fR-cn-)uKwB zT=35kn$}1o=BYBUmp-#`mz1vrz`h~_HJj0971O@a7IMUNjL@7suwC)WNVllV1TAmF ztV^BgV&}pGF1}6l+QfS%T|4QOJjhUArWfhVWTSqTn7Yi(q#OFhGh%7B#dK{&KS^Sq z1M$zc@)`(zmGqOoYK;Q!uU)6(m1E3Jbyo?zb4jRc7Y)P~`dCKNJJsagGwE}4vL#m% zaQ)qW>D8U#n+BqDp#mm+*pq9#U8c6#ZLy45;8f`Nr1 z6d?m#1CktkrwalHwxy5%4pYYP8HAkeO?1?-4m?9mhC)%{Luzo}8eCX2h=3*bOK+~5 z2KKznYE*3`xY9}es;L%VG(%zsL$CUMN3Zio2XbC=UE&XXW*&@iAS^ZX7)21(!I>#P zQ>0DJP!ZBobC#lF$8szMJ%8=|2izGQXn8ck;SiogMUuduUR&8YnrSKO;6tm#=E0#T(rYVY&s| zOpH8^CVm0=KNm13``=2ckY5=uc zi^W*UFps-2JgESQMpu(Zo;EGt4JmZxp`l%D9mw{xA;DCywOFp3_zZ1f$rOJSz& ze62IbHX4{NZ{(7L)$mn27`gcZLjqz%s*lM&i9Ps9&@K1eWq_b07y#4l=;;PPiM^Xy#gyu3HQ z$Cgz;Ka&QiN+>_33APrP_B`rxoci6%cnc*}w7ukH))N;%^zyWDl8 zh<~J~L#<*2`WrCrXlAE`)L~!|4j^6> zwJ4OA66*akuQ78GHwMY}&`Hc*=2IL>i`ZS)^AW3uZ@Q}?<2Lgu%~qqd|4oX$k$#r9 zdIzewHM&-9Eo5rYO+Nde>k!uJao?Wp5#M7k?icMp;=TUzj5oGb>hE%sm9Kl$PVb5u z9S&BLfK>XUeWz~XIhZf)SQiKk#u3)kq-jEj*FF%!Hmed(r(bdVoxItNRDO3IZAZ>{ z{_#0f71?mcJ5$!b5@}(UFxdR92cjLxw*$2}aqBkT{bf#r-ylA4F`ov&d9&9Ii!vM> z!m2T6hs@$p_Ez91IKA1%^8!%60{!-{duD{CH=tN&Znpt6yQ@yCSYXGDHptB8ZItqe zE-FyoF|goA+PBTQWb)ngqkdj)m3t);FYkimRgcU7EgoJHvPDa0@UNEY0-G+~)ic3DIMB@(JVqBj+F8bkM4ptbTln0h$@? z9XG8x=G6ocX)7eZw)mG1jx*5~qqH9jtpUxdOcPH+Gb9s>+hnf`3@|uF*kyD)jig0`SmPO(U@3yJ3D^G{L8xsu9P~3d?rJKE6U>W zCEn%huH_VZjgalN*J>|nOf}6andBFBU$k6+ zxLHWkanX8DFzjkPN`aSK6c*)YN!o;**T(v?x{T*M*ZT&EB#k>VIe3 z@7i1YJ8swJG9+8-Z}ys&^Dz(`bIq#!hCImi!2YoSXS0_tIqJ*6=u!y~h$2BUsr6ty zmzT7_bT;tYdKJz3x6v}PYbP_>mzv&V;Y@dEDyB*&R|xyA*A#6FhdGU3qH0R(e=(q$TUzL{GlWO+!ZZ^refF`uc-d4qDp2sQLWy)yS#` zS=JYaAbWKZNj}_xX#Lw=n-2x7&+Ixgqr0yMt3GmU6W~f_J{qAqGp5u@S<~4MtjZ8e zNX%!ccE`7ItoI}O9D(MC(!Ci^SxT%!gTiYdIzj}`o=Zde|Q9M;vJ)ycOCS`cm4Dwldl#`GK ztSZ8n#|SkAADEP6K_z=EP2Qk8*@gq*9o;5ZH?pNX%0c{;wvtAv%MQW48F31{8P|ef zqbvJfXH>Ghd4|#p^)VlxH$$0!ak`^2SfFPnv)YbfN@wLes61J5w6m2QG?=nNnIzB9 zCBC_X5KhUNpva{VB&`!E_9=;nzpYUP8}Caa_)K6H)YxA@m&u+E2 z(*QW{k-VW!%Z`x-uI;qfD}{sorH1sz3`$6ivLL*|e`f7!K+Nr%iYt=;3fbq8kbRIY z4{^j(qyc2Dz{rf*{@RbSX>jJmQkf`gajC!nR$WBZUQ0cw}$}{Z5 z9XI&V80c~wE-{yZ$0ptVi)B$Ss4w*eWQuP$24(YnaT__4W}gqBUTwRgfweI&)SOH(IuZv+mQh3^N+ji3=76SsC+2=s zXs@Zir90IK^xf2-VTLMvG)I)9jm+`Fvse8chizUOGk$`09CXVoIt0Q*$El`P(UO5n z{yeD-S~Yg=>qwa`&!0oZB9TW^sABc%MzH5WVy0#JF4RVTWqttd#m7>kdp9bAa0o53 z>I(bOm;Y1S)0Y2L+e?Jg6)Su(`DJ&UQ2u!x)8lM+F*~Tx-}qSq%n#Z~RQNrFD0M92 zb$a-Ew;w6(tb#nMNQXf6U0TmkqY|5o7Z>$_)JQd!ZP6fi0FY59;xSpV%NHp1QCYPn zJ%tVqrk)du!11Z%3RQL7@4o!a$|6bNhN`1R0RH9KdlA}FB=L(CH)@|t`nRn7o;C8|>YsN4KgKXmi6p()h&E2=%v-*@UMq}lu(^rJCAkcm z3rvd2R^b&+np|}Bk~SqKBvXd4tEkCqmsoOL^p{eR3Q4m(bX~T49?5R`CPS`NuuD~q z+OjesC{CPukvTXZ##7y$_kZ#mx|ZE>@1NC3#;>vhV9o7&D4^UyZS*McS_E$Qb-h+L zT6^&XQ+tk`p?*_OgQ8W9@tKFdd6)+f|X@}5d7n{*F|S3R5`Y=n`}FfH-eJ1sBy+x#X9+Kc&-pP1?Pb9|X>BE@g%@K6$yicD#iq~9}Ni@srNmXDNsZ?Vr zYs@cauhaAbS@`UUFyq+OsZ+y-dQ!`+O9FN;rZ_WXgRC*>%y!K~&-dfow1mTBbot(| zrBDNi8)3Kny?x)+q@$#ydZmQScxLVfRQ==oT9&Vf|0Ro;7sW)~b-@0J~DS36{VU<+#> z6z0=Hx@1`J0?+Ep0+4uA8N80gLtd=i0-t9otA2hE2?(TL_DJddsJJiT;|Ce1bbJDsXX9 z_KT{hCN3RbJjeOlhH^<$>Fim>mfgPPtZlUo3i5%KU*3`HG*x(p=*Z;xFv4Cxa$;^G zH#`v_Ya^pu$=R3S-B9F`7!+PLUt65L8FtXR(ueXG)<5pNL9s2(Rl%VzCqXC8=2e#e zha*5`pvhZ)9>C46KF^jC0R@@Y@yLuOvaTLGLH$e<@fH)85+slp==(xXzc)w&*ik6X zi?CrmZ@q565Kc473E3#V{cZG7(L_+{Ca$CV#-5l?>$O8NJyYb@T%Ub)%xb3#X!No z&$(6gd=^O0<*k80V2!)?Wmb7#B)^Xk=Oo@Y3KsTVo$O5KBs|2w->Q=*25PN&Q-Xr* zS5S}@=g&5)qVgDdXO1qVWM_8OUwmx%?h$$11ub|6kZ*f+c4X~g+3fu4Zr7VH3I;w} z3R>c-LYDij%%LBQhXj`K=Jcc^JL~xRNE*s7;YgteBe|LT7Hm6RxZeg z)N5)b#Y!q_s*tc$7eB-!4NL_7qjTWycZ=?ci)I=`0mBi0{b>w?&@2B+Du_oA!;MH|}PjNwZuYdiP*Y z>$mD3aMFKbs{1*WAa`cBFj^nUHe%iaYkvQMkCEs*!TJ5c<`yX zgfrh@8)!y8C;Yt^1A*ZbXzaG2Mbe_4bB72sZ;}^-)olA|$Y&2FY5wa$XZGml*%IHQ z7+C*u-N6qk9X-3?QCBPOuDC7HK)aUWo2bVKc&UY6BqxfN`(1smdBdwco1F6Ap7$-{ ze`zEl-^Cm16Zu`zj_T$Dy*%88`Z8qs?l>~d* z5pQV56?#T7-VuC8Z%P!=#Jgsa&|``HTz75`yDcna`9jrC0B7Fuz?c(W?gkBSm4Cl? zX9}zD+QR7n*WR^xbIyr+zVrR{{d?ZO_Vdi_{jR;5a3IO$ToM17i#KZ-@&DujN`f6^ib(2HSIsMy*!&BhGKpomOPDoxC z30-A?{Q4SaUfzl@A94licGY!4A@N{62LTKQZ|~|1aUX{wUfc(Zix2fH#*_V9vwXR! z2*{n9L9c?jwMYh!*(o-B$`a-gaflbI80WwEgH58mYSh2fQ+l~Pj|XuxQA(*|JfjCO z91pO97q^uY=J_G<)JC3`UQluo1!G|&DMt6m+rx-bNlll`fqn$xtJsT}AF_N?KcadL zDQdg>mPtd53LGc<>3JAYdHX){#92>7t?(JmX)P)7ig_mfRlHxiuY(Tz+Q8Z{+`%oF zt2NguDOgqX`XhHT&h3(R9W`0Xb}{zj$km+~)zNp!eRD*rY16`oH!jk>VUrGl%Ay#M zc3A~EGIOsSAlA+9AD#7&IGsoed$IN1oaxa_A9pay1-h=nZ3D52UEa^9s4}z z$F@8U2nQWzg?pa+Yy9lsGwm6Y@i^F!xP(_%;_?n&+GK85h<_UugY~p?bavh`Pb$0X zGzcvteJD&EW1F`7Q>>5ndX05t?9F{S zAdYP@p#n^Xip!oM1!t!g3FDOF7v!lqy7F1C{qn^IDF|v~W1}96kxMzk{>5nnB#U?0 z0JgxB@&jaj&M($~aRIzzTwYCG{SGr3cgk$30GysL==vF(QV8uR5!wBTAxj_cpU0FG z*-1uMynR7#o|Qd)n>VW^)ZnW>IyyRnRA+;Pg^86xIzF$3U{R1|v)Y*5vMM9@wqn*~ z4I%-wM4%18Y@2r3#sV&yI0BeOCp+-Xrf~+g)Zc-~2KT-uHLm*7qABTun9`8t0J>jm z59G7Bo%mn_1Y%O0!;7Ra2kSN8I8DF_*IMD#TNGKKuG;&u_4Vcy!WvWvh=^SV;3UQS zo1}o_B-7M;c03ezuc=WudAAD?<*7}L$6xyNGO}EMe0r~VsRlRtj-k}IF##`3cX}#_ z7D{c}l<{c^24Kdx3BuDXBDV1V@#slSDd$7VS-4UHIPvc8@4RFpKX4fsMG`voRV1Tn7h zRRE{^^qnn}rW0c{TX*217yk`VykbUErZ=4~d5ZZKjd2a6+O&MRY+>=f>k*}l|LGLm zoj-PX0PZo8k^g%+Vz4c{sSpIsjf}Q;P856e(V~{;?JIual*l4cwx<0<(Rg!vd;8Jz z5cwbqW(^9j7vl=p1%)%YOCy8AtZjVAi%OnP%W;vBkwxRvxTz)n9Aeqss5e6Jm&ran z{P6jzKq9Eu(X~W4zmgTH*Et4)gsj`a-MjdZ*{Zz6$5uCkEzHaw$LzEyhWJWG#U#YJ z#&F`Rl2}9Z?_yX2)+nKt|3Ef`TrQ}F0Sa!HAk6L?QQZD*8m{)#3Gtn6o(|Doj*i$0 z5beqiOo;*)ORu9p_m*n|}R{H{CGU_6+KV5EPoH6wTGFE>V}=VJF|Bf>h@} zdP}plNeq17J`ft#smr{#x|VJVa=nI4=<51OP_u>xZv&uw<+2F4QD-DrYslb*%UMCq z)oPl09Z&4vU6RzSF8OdoeWUl;v)g?hb}zo^@M$BrE*c4PBT551JY?Fl&F1>{J|)q$T0-?#+sV@;&VQ}$7rnz zGD=ELjmCIrF`_`;3(}_d-YEwmxH(?-P-mCoBb2JU=tFCHRrt~?^_uXkN&ta69F{7oW4ecWn{5gLe$Kf^VhFGcpth0|(L`%zyr~7Y+LaCM>!-nTn67k8px+l|^Os4KaK>~Wcg6EE88+ww>jnAAw<$#i%#F?mfm!+V^isit-tsERmV7_AT-F&MH!U;nwvlW@Li;FRCG z97lugif?-q48w2;3NFxFo8wTSU(%BmwtR#_*erCYUzQ}Lg`5>Uv`7vfCVNBm7whu# z;&q^rm|49QwMVVn5bkEHN}}Hba4xE5l7O3x?&T-8F-jB6PM~>2spIcU(m`aAp5Q;w zFi_*BMrvq}Ga^ip!veKRhz@hp1I995rthTpLPt`hQG+71(u6JTMXsm`YsZBK7`>b- z2u)hQVXqp6-8qCH$!HwnWR7j@!bpZ)dRkf=&`z|$31TFI`gVOd*_0QX>@?_APlofK zI~!;9JyK2rXQUz=G{wnbsx&F9=I}X@t)gPCW>k0o<4@4bVY7lpa(Tz7?*_=vPfan7 z9mri-k=Q&xq`I2-=dbgH#oiakbb2kXMdX|F4zBK;%(BV%!#$Hnq9#oHJ!Lv!aD6s^t>yIsCfATR1>?v9#HKRH-{-Es z6Z`F4ZcsHC+QycO{xVceBSVS<(q4~uC(M?_Io*k> zYkTMj47|Ss&>uzc_4Q?e!j&w5B`>N0CNo1@09wn`swZy^6yvHA*l(>r)z9)%N;jr} z#YCjqQOV6iWTh!zwv9KZ>ce-l!{*&Xy1%{rNf_}klP|$*i=DU8EC>;w!~-}$^$D4T zVDTr}RUd=2xgh)4)8kOd+q!aE`XF74V{lQ}W40Ov!zpV{W#7nberQQV09V2_hmdIX zHGtCwTqJMb3mz+u3$M|#2Pv9ucdC~UV+r@12MA~jU{@~(06(A|pvEsKrr4=L+owcP z=E?i*&4$`zN7$32qh%G>8mIL*2D$GfFNGFPsa}OSeUzHFZ}jYL*uABO)ToNT4MB_cZJ>?|-4?-gZz_|K7H_zC5opTFMu& zwE+k(v3_6-kMi#K%_sQTpVa}H92O6Mw_%${S)61?RC0U)-slT>`a+K1nviYwUKCx9 zeccmJt6WYb*`5M*N5X>(R7fbBWQ#qUXACL417ER{3zn#+b>T|`PRHZ-J*u7j-Z}uH z;w}2Addl9huZM3UL`QIZPEDV1c6!A-NV~%oLUnOn!^zA-a4SiN6gU#NoWygY3LE9* zST1Lr?IhZW;!Jtrci@gf^AGa{m;NTyJYknCeTZv?)7pZJVr$%3?|)bZ0t; z_vL3bMZflGb8Fe!y_*^*5&z&phQvtHMp+ASe=GQ`Orz}1L^LNoJ zRuYbOKED*qbe^fMQg6fWNmy*BM`ri=M&;=s)mdak7qzZgerkiWQ{1(U^RfjhwcfUY z!l2+*3Vd={fvT%J=u~y1a2p8|&TvPKsbDM@YTsCKA6O_>FVnx2e*In)TN03<&SmCI zd{l!AB$YMlL~vh-O|{jdqG#l~LCo*hZ6g|%F$xpNmN6ylVy;Ku(_1T^!Ve9h^?oT; z39(22+WD;7sK0kD=Xt#^b}9ak-bdDBxc9r+=Xb2=`R^N4mJk|pe_tfDY6gDzc^y9r zmS3PJ2>#b=c~ea(MqY(kSF)?WpZ9A2ltX+k$PpDe&OgZP%4X|HHl?3YY@ez2&p+=v zPM9J& Also included in the configurations above are several best practices when it comes to deploying TLS. Things like enabling @@ -318,7 +318,7 @@ Periodic Tasks ============== Lemur contains a few tasks that are run and scheduled basis, currently the recommend way to run these tasks is to create -a cron job that runs the commands. +celery tasks or cron jobs that run these commands. There are currently three commands that could/should be run on a periodic basis: @@ -326,11 +326,124 @@ There are currently three commands that could/should be run on a periodic basis: - `check_revoked` - `sync` +If you are using LetsEncrypt, you must also run the following: + +- `fetch_all_pending_acme_certs` +- `remove_old_acme_certs` + How often you run these commands is largely up to the user. `notify` and `check_revoked` are typically run at least once a day. -`sync` is typically run every 15 minutes. +`sync` is typically run every 15 minutes. `fetch_all_pending_acme_certs` should be ran frequently (Every minute is fine). +`remove_old_acme_certs` can be ran more rarely, such as once every week. Example cron entries:: 0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur notify expirations */15 * * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur source sync -s all 0 22 * * * lemuruser export LEMUR_CONF=/Users/me/.lemur/lemur.conf.py; /www/lemur/bin/lemur certificate check_revoked + + +Example Celery configuration (To be placed in your configuration file):: + + CELERYBEAT_SCHEDULE = { + 'fetch_all_pending_acme_certs': { + 'task': 'lemur.common.celery.fetch_all_pending_acme_certs', + 'options': { + 'expires': 180 + }, + 'schedule': crontab(minute="*"), + }, + 'remove_old_acme_certs': { + 'task': 'lemur.common.celery.remove_old_acme_certs', + 'options': { + 'expires': 180 + }, + 'schedule': crontab(hour=7, minute=30, day_of_week=1), + }, + 'clean_all_sources': { + 'task': 'lemur.common.celery.clean_all_sources', + 'options': { + 'expires': 180 + }, + 'schedule': crontab(hour=1, minute=0, day_of_week=1), + }, + 'sync_all_sources': { + 'task': 'lemur.common.celery.sync_all_sources', + 'options': { + 'expires': 180 + }, + 'schedule': crontab(hour="*/3", minute=5), + }, + 'sync_source_destination': { + 'task': 'lemur.common.celery.sync_source_destination', + 'options': { + 'expires': 180 + }, + 'schedule': crontab(hour="*"), + } + } + +To enable celery support, you must also have configuration values that tell Celery which broker and backend to use. +Here are the Celery configuration variables that should be set:: + + CELERY_RESULT_BACKEND = 'redis://your_redis_url:6379' + CELERY_BROKER_URL = 'redis://your_redis_url:6379' + CELERY_IMPORTS = ('lemur.common.celery') + CELERY_TIMEZONE = 'UTC' + +You must start a single Celery scheduler instance and one or more worker instances in order to handle incoming tasks. +The scheduler can be started with:: + + LEMUR_CONF='/location/to/conf.py' /location/to/lemur/bin/celery -A lemur.common.celery beat + +And the worker can be started with desired options such as the following:: + + LEMUR_CONF='/location/to/conf.py' /location/to/lemur/bin/celery -A lemur.common.celery worker --concurrency 10 -E -n lemurworker1@%%h + +supervisor or systemd configurations should be created for these in production environments as appropriate. + +Add support for LetsEncrypt +=========================== + +LetsEncrypt is a free, limited-feature certificate authority that offers publicly trusted certificates that are valid +for 90 days. LetsEncrypt does not use organizational validation (OV), and instead relies on domain validation (DV). +LetsEncrypt requires that we prove ownership of a domain before we're able to issue a certificate for that domain, each +time we want a certificate. + +The most common methods to prove ownership are HTTP validation and DNS validation. Lemur supports DNS validation +through the creation of DNS TXT records. + +In a nutshell, when we send a certificate request to LetsEncrypt, they generate a random token and ask us to put that +token in a DNS text record to prove ownership of a domain. If a certificate request has multiple domains, we must +prove ownership of all of these domains through this method. The token is typically written to a TXT record at +-acme_challenge.domain.com. Once we create the appropriate TXT record(s), Lemur will try to validate propagation +before requesting that LetsEncrypt finalize the certificate request and send us the certificate. + +.. figure:: letsencrypt_flow.png + +To start issuing certificates through LetsEncrypt, you must enable Celery support within Lemur first. After doing so, +you need to create a LetsEncrypt authority. To do this, visit +Authorities -> Create. Set the applicable attributes and click "More Options". + +.. figure:: letsencrypt_authority_1.png + +You will need to set "Certificate" to LetsEncrypt's active chain of trust for the authority you want to use. To find +the active chain of trust at the time of writing, please visit `LetsEncrypt +`_. + +Under Acme_url, enter in the appropriate endpoint URL. Lemur supports LetsEncrypt's V2 API, and we recommend you to use +this. At the time of writing, the staging and production URLs for LetsEncrypt V2 are +https://acme-staging-v02.api.letsencrypt.org/directory and https://acme-v02.api.letsencrypt.org/directory. + +.. figure:: letsencrypt_authority_2.png + +After creating the authorities, we will need to create a DNS provider. Visit `Admin` -> `DNS Providers` and click +`Create`. Lemur comes with a few provider plugins built in, with different options. Create a DNS provider with the +appropriate choices. + +.. figure:: create_dns_provider.png + +By default, users will need to select the DNS provider that is authoritative over their domain in order for the +LetsEncrypt flow to function. However, Lemur will attempt to automatically determine the appropriate provider if +possible. To enable this functionality, periodically (or through Cron/Celery) run `lemur dns_providers get_all_zones`. +This command will traverse all DNS providers, determine which zones they control, and upload this list of zones to +Lemur's database (in the dns_providers table). Alternatively, you can manually input this data. diff --git a/lemur/__init__.py b/lemur/__init__.py index 6229a3d1..27deb4cd 100644 --- a/lemur/__init__.py +++ b/lemur/__init__.py @@ -5,7 +5,8 @@ :license: Apache, see LICENSE for more details. .. moduleauthor:: Kevin Glisson - +.. moduleauthor:: Curtis Castrapel +.. moduleauthor:: Hossein Shafagh """ import time From 424b517914de6b749955378f23bfad8e9fb20fc4 Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Wed, 10 Jul 2019 06:53:19 -0700 Subject: [PATCH 260/357] Fix doc images --- docs/{guide => production}/create_dns_provider.png | Bin .../letsencrypt_authority_1.png | Bin .../letsencrypt_authority_2.png | Bin docs/{guide => production}/letsencrypt_flow.png | Bin 4 files changed, 0 insertions(+), 0 deletions(-) rename docs/{guide => production}/create_dns_provider.png (100%) rename docs/{guide => production}/letsencrypt_authority_1.png (100%) rename docs/{guide => production}/letsencrypt_authority_2.png (100%) rename docs/{guide => production}/letsencrypt_flow.png (100%) diff --git a/docs/guide/create_dns_provider.png b/docs/production/create_dns_provider.png similarity index 100% rename from docs/guide/create_dns_provider.png rename to docs/production/create_dns_provider.png diff --git a/docs/guide/letsencrypt_authority_1.png b/docs/production/letsencrypt_authority_1.png similarity index 100% rename from docs/guide/letsencrypt_authority_1.png rename to docs/production/letsencrypt_authority_1.png diff --git a/docs/guide/letsencrypt_authority_2.png b/docs/production/letsencrypt_authority_2.png similarity index 100% rename from docs/guide/letsencrypt_authority_2.png rename to docs/production/letsencrypt_authority_2.png diff --git a/docs/guide/letsencrypt_flow.png b/docs/production/letsencrypt_flow.png similarity index 100% rename from docs/guide/letsencrypt_flow.png rename to docs/production/letsencrypt_flow.png From 41c781318cdec7a1ac1b793ee790481141c6b63b Mon Sep 17 00:00:00 2001 From: arnydo Date: Wed, 10 Jul 2019 10:08:14 -0400 Subject: [PATCH 261/357] Add ADCS in Docs Add info regarding the ADCS plugin created by "https://github.com/sirferl/lemur". "lemur_adcs" plugin is part of Lemur by default so I added to main plugins section within Docs. --- docs/administration.rst | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docs/administration.rst b/docs/administration.rst index 491edcf1..ac8fd542 100644 --- a/docs/administration.rst +++ b/docs/administration.rst @@ -1086,6 +1086,18 @@ progress and the API is not frozen. Lemur includes several plugins by default. Including extensive support for AWS, VeriSign/Symantec. +Active Directory Certificate Services +-------- + +:Authors: + sirferl +:Type: + Issuer +:Description: + Enables the creation of certificates by ADCS (Active Directory Certificate Services) +:Links: + https://github.com/sirferl/lemur + Verisign/Symantec ----------------- From 66bff57c04345facf7d43064f91331f498a25e20 Mon Sep 17 00:00:00 2001 From: arnydo Date: Wed, 10 Jul 2019 12:10:47 -0400 Subject: [PATCH 262/357] Add ADCS Plugin Configuration to Docs Add configuration options based on https://github.com/Netflix/lemur/pull/2255#issue-240136873 --- docs/administration.rst | 74 +++++++++++++++++++++++++++++++---------- 1 file changed, 57 insertions(+), 17 deletions(-) diff --git a/docs/administration.rst b/docs/administration.rst index ac8fd542..e292ae03 100644 --- a/docs/administration.rst +++ b/docs/administration.rst @@ -593,8 +593,60 @@ If you are not using a metric provider you do not need to configure any of these Plugin Specific Options ----------------------- +Active Directory Certificate Services Plugin +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + +.. data:: ADCS_SERVER + :noindex: + + FQDN of your ADCS Server + + +.. data:: ADCS_AUTH_METHOD + :noindex: + + The chosen authentication method. Either ‘basic’ (the default), ‘ntlm’ or ‘cert’ (SSL client certificate). The next 2 variables are interpreted differently for different methods. + + +.. data:: ADCS_USER + :noindex: + + The username (basic) or the path to the public cert (cert) of the user accessing PKI + + +.. data:: ADCS_PWD + :noindex: + + The passwd (basic) or the path to the private key (cert) of the user accessing PKI + + +.. data:: ADCS_TEMPLATE + :noindex: + + Template to be used for certificate issuing. Usually display name w/o spaces + + +.. data:: ADCS_START + :noindex: + +.. data:: ADCS_STOP + :noindex: + +.. data:: ADCS_ISSUING + :noindex: + + Contains the issuing cert of the CA + + +.. data:: ADCS_ROOT + :noindex: + + Contains the root cert of the CA + + Verisign Issuer Plugin -^^^^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~~~~ Authorities will each have their own configuration options. There is currently just one plugin bundled with Lemur, Verisign/Symantec. Additional plugins may define additional options. Refer to the plugin's own documentation @@ -642,7 +694,7 @@ for those plugins. Digicert Issuer Plugin -^^^^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~~~~ The following configuration properties are required to use the Digicert issuer plugin. @@ -690,7 +742,7 @@ The following configuration properties are required to use the Digicert issuer p CFSSL Issuer Plugin -^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~ The following configuration properties are required to use the CFSSL issuer plugin. @@ -716,7 +768,7 @@ The following configuration properties are required to use the CFSSL issuer plug Hashicorp Vault Source/Destination Plugin -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Lemur can import and export certificate data to and from a Hashicorp Vault secrets store. Lemur can connect to a different Vault service per source/destination. @@ -738,7 +790,7 @@ Vault Destination supports a regex filter to prevent certificates with SAN that AWS Source/Destination Plugin -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In order for Lemur to manage its own account and other accounts we must ensure it has the correct AWS permissions. @@ -1086,18 +1138,6 @@ progress and the API is not frozen. Lemur includes several plugins by default. Including extensive support for AWS, VeriSign/Symantec. -Active Directory Certificate Services --------- - -:Authors: - sirferl -:Type: - Issuer -:Description: - Enables the creation of certificates by ADCS (Active Directory Certificate Services) -:Links: - https://github.com/sirferl/lemur - Verisign/Symantec ----------------- From 2628ed1a8272e100c2c36e852cf502349c9b7e96 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 11 Jul 2019 23:00:35 -0700 Subject: [PATCH 263/357] better alerting --- lemur/common/celery.py | 65 ++++++++++++++++++++++++++++++++++++++++-- lemur/common/redis.py | 33 +++++++++++++++++++++ 2 files changed, 95 insertions(+), 3 deletions(-) create mode 100644 lemur/common/redis.py diff --git a/lemur/common/celery.py b/lemur/common/celery.py index d3cc7621..b775396a 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -9,6 +9,7 @@ command: celery -A lemur.common.celery worker --loglevel=info -l DEBUG -B """ import copy import sys +import time from datetime import datetime, timezone, timedelta from celery import Celery @@ -16,6 +17,7 @@ from celery.exceptions import SoftTimeLimitExceeded from flask import current_app from lemur.authorities.service import get as get_authority +from lemur.common.redis import RedisHandler from lemur.destinations import service as destinations_service from lemur.extensions import metrics, sentry from lemur.factory import create_app @@ -30,6 +32,9 @@ if current_app: else: flask_app = create_app() +red = RedisHandler(host=current_app.config.get('REDIS_HOST', 'localhost'), + port=current_app.config.get('REDIS_PORT', 6379), + db=current_app.config.get('REDIS_DB', 0)).redis() def make_celery(app): celery = Celery( @@ -68,6 +73,30 @@ def is_task_active(fun, task_id, args): return False +@celery.task() +def report_celery_last_success_metrics(): + """ + For each celery task, this will determine the number of seconds since it has last been successful. + + Celery tasks should be emitting redis stats with a deterministic key (In our case, `f"{task}.last_success"`. + report_celery_last_success_metrics should be ran periodically to emit metrics on when a task was last successful. + Admins can then alert when tasks are not ran when intended. Admins should also alert when no metrics are emitted + from this function. + + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + current_time = int(time.time()) + schedule = current_app.config.get('CELERYBEAT_SCHEDULE') + for _, t in schedule.items(): + task = t.get("task") + last_success = int(red.get(f"{task}.last_success") or 0) + metrics.send(f"{task}.time_since_last_success", 'gauge', current_time - last_success) + red.set( + f"{function}.last_success", int(time.time()) + ) # Alert if this metric is not seen + metrics.send(f"{function}.success", 'counter', 1) + + @celery.task(soft_time_limit=600) def fetch_acme_cert(id): """ @@ -80,8 +109,9 @@ def fetch_acme_cert(id): if celery.current_task: task_id = celery.current_task.request.id + function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) log_data = { - "function": "{}.{}".format(__name__, sys._getframe().f_code.co_name), + "function": function, "message": "Resolving pending certificate {}".format(id), "task_id": task_id, "id": id, @@ -165,11 +195,15 @@ def fetch_acme_cert(id): log_data["failed"] = failed log_data["wrong_issuer"] = wrong_issuer current_app.logger.debug(log_data) + metrics.send(f"{function}.resolved", 'gauge', new) + metrics.send(f"{function}.failed", 'gauge', failed) + metrics.send(f"{function}.wrong_issuer", 'gauge', wrong_issuer) print( "[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format( new=new, failed=failed, wrong_issuer=wrong_issuer ) ) + red.set(f'{function}.last_success', int(time.time())) @celery.task() @@ -177,8 +211,9 @@ def fetch_all_pending_acme_certs(): """Instantiate celery workers to resolve all pending Acme certificates""" pending_certs = pending_certificate_service.get_unresolved_pending_certs() + function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) log_data = { - "function": "{}.{}".format(__name__, sys._getframe().f_code.co_name), + "function": function, "message": "Starting job.", } @@ -195,11 +230,18 @@ def fetch_all_pending_acme_certs(): current_app.logger.debug(log_data) fetch_acme_cert.delay(cert.id) + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + @celery.task() def remove_old_acme_certs(): """Prune old pending acme certificates from the database""" - log_data = {"function": "{}.{}".format(__name__, sys._getframe().f_code.co_name)} + function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + log_data = { + "function": function, + "message": "Starting job.", + } pending_certs = pending_certificate_service.get_pending_certs("all") # Delete pending certs more than a week old @@ -211,6 +253,9 @@ def remove_old_acme_certs(): current_app.logger.debug(log_data) pending_certificate_service.delete(cert) + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + @celery.task() def clean_all_sources(): @@ -218,6 +263,7 @@ def clean_all_sources(): This function will clean unused certificates from sources. This is a destructive operation and should only be ran periodically. This function triggers one celery task per source. """ + function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) sources = validate_sources("all") for source in sources: current_app.logger.debug( @@ -225,6 +271,9 @@ def clean_all_sources(): ) clean_source.delay(source.label) + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + @celery.task() def clean_source(source): @@ -244,6 +293,7 @@ def sync_all_sources(): """ This function will sync certificates from all sources. This function triggers one celery task per source. """ + function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) sources = validate_sources("all") for source in sources: current_app.logger.debug( @@ -251,6 +301,9 @@ def sync_all_sources(): ) sync_source.delay(source.label) + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + @celery.task(soft_time_limit=7200) def sync_source(source): @@ -279,6 +332,7 @@ def sync_source(source): return try: sync([source]) + metrics.send(f"{function}.success", 'counter', '1', metric_tags={"source": source}) except SoftTimeLimitExceeded: log_data["message"] = "Error syncing source: Time limit exceeded." current_app.logger.error(log_data) @@ -290,6 +344,8 @@ def sync_source(source): log_data["message"] = "Done syncing source" current_app.logger.debug(log_data) + metrics.send(f"{function}.success", 'counter', 1, metric_tags=source) + red.set(f'{function}.last_success', int(time.time())) @celery.task() @@ -302,9 +358,12 @@ def sync_source_destination(): We rely on account numbers to avoid duplicates. """ current_app.logger.debug("Syncing AWS destinations and sources") + function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) for dst in destinations_service.get_all(): if add_aws_destination_to_sources(dst): current_app.logger.debug("Source: %s added", dst.label) current_app.logger.debug("Completed Syncing AWS destinations and sources") + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) diff --git a/lemur/common/redis.py b/lemur/common/redis.py new file mode 100644 index 00000000..a996ad67 --- /dev/null +++ b/lemur/common/redis.py @@ -0,0 +1,33 @@ +""" +Helper Class for Redis + +""" +import redis +#from flask import current_app + + +class RedisHandler: + #def __init__(self, host=current_app.config.get('REDIS_HOST', 'localhost'), + # port=current_app.config.get('REDIS_PORT', 6379), + # db=current_app.config.get('REDIS_DB', 0)): + def __init__(self, host, port, db): + self.host = host + self.port = port + self.db = db + + def redis(self, db=0): + # The decode_responses flag here directs the client to convert the responses from Redis into Python strings + # using the default encoding utf-8. This is client specific. + red = redis.StrictRedis(host=self.host, port=self.port, db=self.db, charset="utf-8", decode_responses=True) + return red + + +def redis_get(key, default=None): + red = RedisHandler().redis() + try: + v = red.get(key) + except redis.exceptions.ConnectionError: + v = None + if not v: + return default + return v From 97d74bfa1d4a946e8002042eb2c20032353dc1e7 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 12 Jul 2019 08:47:39 -0700 Subject: [PATCH 264/357] fixing the app context issue. we will create an app if no current_app available --- lemur/common/celery.py | 5 ++--- lemur/common/redis.py | 14 +++++++++----- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index b775396a..05e66926 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -32,9 +32,8 @@ if current_app: else: flask_app = create_app() -red = RedisHandler(host=current_app.config.get('REDIS_HOST', 'localhost'), - port=current_app.config.get('REDIS_PORT', 6379), - db=current_app.config.get('REDIS_DB', 0)).redis() +red = RedisHandler().redis() + def make_celery(app): celery = Celery( diff --git a/lemur/common/redis.py b/lemur/common/redis.py index a996ad67..4af39aef 100644 --- a/lemur/common/redis.py +++ b/lemur/common/redis.py @@ -3,14 +3,18 @@ Helper Class for Redis """ import redis -#from flask import current_app +from flask import current_app +from lemur.factory import create_app +if current_app: + flask_app = current_app +else: + flask_app = create_app() class RedisHandler: - #def __init__(self, host=current_app.config.get('REDIS_HOST', 'localhost'), - # port=current_app.config.get('REDIS_PORT', 6379), - # db=current_app.config.get('REDIS_DB', 0)): - def __init__(self, host, port, db): + def __init__(self, host=flask_app.config.get('REDIS_HOST', 'localhost'), + port=flask_app.config.get('REDIS_PORT', 6379), + db=flask_app.config.get('REDIS_DB', 0)): self.host = host self.port = port self.db = db From 1b1bdbb261c3f6b03bf317cf91a154c3b19d06c9 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 12 Jul 2019 10:25:37 -0700 Subject: [PATCH 265/357] spacing --- lemur/common/redis.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lemur/common/redis.py b/lemur/common/redis.py index 4af39aef..0bddf9b4 100644 --- a/lemur/common/redis.py +++ b/lemur/common/redis.py @@ -11,6 +11,7 @@ if current_app: else: flask_app = create_app() + class RedisHandler: def __init__(self, host=flask_app.config.get('REDIS_HOST', 'localhost'), port=flask_app.config.get('REDIS_PORT', 6379), From ae1633b0f2afc29a1ff618c2d7b2944bb2b6cf66 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 12 Jul 2019 10:38:47 -0700 Subject: [PATCH 266/357] updating requirements has been a while since last update, more testing in deployment needed --- requirements-dev.txt | 19 ++++++++------- requirements-docs.txt | 47 +++++++++++++++++++------------------ requirements-tests.txt | 53 ++++++++++++++++++++++++------------------ requirements.txt | 45 ++++++++++++++++++----------------- 4 files changed, 90 insertions(+), 74 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 030c3f93..5e7b36f2 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,31 +6,34 @@ # aspy.yaml==1.3.0 # via pre-commit bleach==3.1.0 # via readme-renderer -certifi==2019.3.9 # via requests +certifi==2019.6.16 # via requests cfgv==2.0.0 # via pre-commit chardet==3.0.4 # via requests docutils==0.14 # via readme-renderer flake8==3.5.0 -identify==1.4.3 # via pre-commit +identify==1.4.5 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.17 # via pre-commit +importlib-metadata==0.18 # via pre-commit invoke==1.2.0 mccabe==0.6.1 # via flake8 nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.16.1 +pre-commit==1.17.0 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.4.2 # via readme-renderer -pyyaml==5.1 +pyyaml==5.1.1 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.22.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.32.1 # via twine +tqdm==4.32.2 # via twine twine==1.13.0 urllib3==1.25.3 # via requests -virtualenv==16.6.0 # via pre-commit +virtualenv==16.6.1 # via pre-commit webencodings==0.5.1 # via bleach -zipp==0.5.1 # via importlib-metadata +zipp==0.5.2 # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.0.1 # via twine diff --git a/requirements-docs.txt b/requirements-docs.txt index c0fe427e..6222687e 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,23 +4,23 @@ # # pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # -acme==0.34.2 +acme==0.36.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.0.10 +alembic==1.0.11 amqp==2.5.0 -aniso8601==6.0.0 +aniso8601==7.0.0 arrow==0.14.2 asn1crypto==0.24.0 asyncpool==1.0 babel==2.7.0 # via sphinx -bcrypt==3.1.6 +bcrypt==3.1.7 billiard==3.6.0.0 blinker==1.4 -boto3==1.9.160 -botocore==1.12.160 +boto3==1.9.187 +botocore==1.12.187 celery[redis]==4.3.0 -certifi==2019.3.9 +certifi==2019.6.16 certsrv==2.1.1 cffi==1.12.3 chardet==3.0.4 @@ -32,7 +32,7 @@ dnspython==1.15.0 docutils==0.14 dyn==1.8.1 flask-bcrypt==0.7.1 -flask-cors==3.0.7 +flask-cors==3.0.8 flask-mail==0.9.1 flask-migrate==2.5.2 flask-principal==0.4.0 @@ -40,10 +40,10 @@ flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 -flask==1.0.3 +flask==1.1.1 future==0.17.1 gunicorn==19.9.0 -hvac==0.9.1 +hvac==0.9.3 idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 @@ -51,21 +51,21 @@ itsdangerous==1.1.0 javaobj-py3==0.3.0 jinja2==2.10.1 jmespath==0.9.4 -josepy==1.1.0 +josepy==1.2.0 jsonlines==1.2.0 kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 -mako==1.0.11 +mako==1.0.13 markupsafe==1.1.1 -marshmallow-sqlalchemy==0.16.3 -marshmallow==2.19.2 +marshmallow-sqlalchemy==0.17.0 +marshmallow==2.19.5 mock==3.0.5 ndg-httpsclient==0.5.1 packaging==19.0 # via sphinx -paramiko==2.4.2 +paramiko==2.6.0 pem==19.1.0 -psycopg2==2.8.2 +psycopg2==2.8.3 pyasn1-modules==0.2.5 pyasn1==0.4.5 pycparser==2.19 @@ -81,17 +81,17 @@ python-dateutil==2.8.0 python-editor==1.0.4 python-json-logger==0.1.11 pytz==2019.1 -pyyaml==5.1 +pyyaml==5.1.1 raven[flask]==6.10.0 redis==3.2.1 requests-toolbelt==0.9.1 requests[security]==2.22.0 retrying==1.3.3 -s3transfer==0.2.0 +s3transfer==0.2.1 six==1.12.0 -snowballstemmer==1.2.1 # via sphinx +snowballstemmer==1.9.0 # via sphinx sphinx-rtd-theme==0.4.3 -sphinx==2.1.0 +sphinx==2.1.2 sphinxcontrib-applehelp==1.0.1 # via sphinx sphinxcontrib-devhelp==1.0.1 # via sphinx sphinxcontrib-htmlhelp==1.0.2 # via sphinx @@ -99,11 +99,14 @@ sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.2 # via sphinx sphinxcontrib-serializinghtml==1.1.3 # via sphinx -sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.4 +sqlalchemy-utils==0.34.0 +sqlalchemy==1.3.5 tabulate==0.8.3 twofish==0.3.0 urllib3==1.25.3 vine==1.3.0 werkzeug==0.15.4 xmltodict==0.12.0 + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.0.1 # via acme, josepy, sphinx diff --git a/requirements-tests.txt b/requirements-tests.txt index 77bc92af..f88d44cf 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -7,33 +7,34 @@ appdirs==1.4.3 # via black asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest -attrs==19.1.0 # via black, pytest -aws-sam-translator==1.11.0 # via cfn-lint +attrs==19.1.0 # via black, jsonschema, pytest +aws-sam-translator==1.12.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto -bandit==1.6.0 +bandit==1.6.2 black==19.3b0 -boto3==1.9.160 # via aws-sam-translator, moto +boto3==1.9.187 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.160 # via aws-xray-sdk, boto3, moto, s3transfer -certifi==2019.3.9 # via requests +botocore==1.12.187 # via aws-xray-sdk, boto3, moto, s3transfer +certifi==2019.6.16 # via requests cffi==1.12.3 # via cryptography -cfn-lint==0.21.4 # via moto +cfn-lint==0.22.2 # via moto chardet==3.0.4 # via requests click==7.0 # via black, flask coverage==4.5.3 -cryptography==2.7 # via moto -docker==4.0.1 # via moto +cryptography==2.7 # via moto, sshpubkeys +datetime==4.3 # via moto +docker==4.0.2 # via moto docutils==0.14 # via botocore -ecdsa==0.13.2 # via python-jose +ecdsa==0.13.2 # via python-jose, sshpubkeys factory-boy==2.12.0 faker==1.0.7 -flask==1.0.3 # via pytest-flask +flask==1.1.1 # via pytest-flask freezegun==0.3.12 future==0.17.1 # via aws-xray-sdk, python-jose gitdb2==2.0.5 # via gitpython gitpython==2.1.11 # via bandit idna==2.8 # via moto, requests -importlib-metadata==0.17 # via pluggy, pytest +importlib-metadata==0.18 # via pluggy, pytest itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask, moto jmespath==0.9.4 # via boto3, botocore @@ -41,34 +42,36 @@ jsondiff==1.1.2 # via moto jsonpatch==1.23 # via cfn-lint jsonpickle==1.2 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch -jsonschema==2.6.0 # via aws-sam-translator, cfn-lint +jsonschema==3.0.1 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 mock==3.0.5 # via moto -more-itertools==7.0.0 # via pytest -moto==1.3.8 +more-itertools==7.1.0 # via pytest +moto==1.3.11 nose==1.3.7 packaging==19.0 # via pytest -pbr==5.2.1 # via stevedore +pbr==5.4.0 # via stevedore pluggy==0.12.0 # via pytest py==1.8.0 # via pytest pyasn1==0.4.5 # via rsa pycparser==2.19 # via cffi pyflakes==2.1.1 pyparsing==2.4.0 # via packaging +pyrsistent==0.15.3 # via jsonschema pytest-flask==0.15.0 pytest-mock==1.10.4 -pytest==4.6.2 +pytest==5.0.1 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==3.0.1 # via moto -pytz==2019.1 # via moto -pyyaml==5.1 +pytz==2019.1 # via datetime, moto +pyyaml==5.1.1 requests-mock==1.6.0 requests==2.22.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose -s3transfer==0.2.0 # via boto3 -six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, mock, moto, packaging, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +s3transfer==0.2.1 # via boto3 +six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, jsonschema, mock, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client smmap2==2.0.5 # via gitdb2 +sshpubkeys==3.1.0 # via moto stevedore==1.30.1 # via bandit text-unidecode==1.2 # via faker toml==0.10.0 # via black @@ -76,6 +79,10 @@ urllib3==1.25.3 # via botocore, requests wcwidth==0.1.7 # via pytest websocket-client==0.56.0 # via docker werkzeug==0.15.4 # via flask, moto, pytest-flask -wrapt==1.11.1 # via aws-xray-sdk +wrapt==1.11.2 # via aws-xray-sdk xmltodict==0.12.0 # via moto -zipp==0.5.1 # via importlib-metadata +zipp==0.5.2 # via importlib-metadata +zope.interface==4.6.0 # via datetime + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.0.1 # via cfn-lint, jsonschema, zope.interface diff --git a/requirements.txt b/requirements.txt index c19c7b6e..7635c29d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,21 +4,21 @@ # # pip-compile --no-index --output-file=requirements.txt requirements.in # -acme==0.34.2 +acme==0.36.0 alembic-autogenerate-enums==0.0.2 -alembic==1.0.10 # via flask-migrate +alembic==1.0.11 # via flask-migrate amqp==2.5.0 # via kombu -aniso8601==6.0.0 # via flask-restful +aniso8601==7.0.0 # via flask-restful arrow==0.14.2 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 -bcrypt==3.1.6 # via flask-bcrypt, paramiko +bcrypt==3.1.7 # via flask-bcrypt, paramiko billiard==3.6.0.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.160 -botocore==1.12.160 +boto3==1.9.187 +botocore==1.12.187 celery[redis]==4.3.0 -certifi==2019.3.9 +certifi==2019.6.16 certsrv==2.1.1 cffi==1.12.3 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests @@ -30,7 +30,7 @@ dnspython==1.15.0 # via dnspython3 docutils==0.14 # via botocore dyn==1.8.1 flask-bcrypt==0.7.1 -flask-cors==3.0.7 +flask-cors==3.0.8 flask-mail==0.9.1 flask-migrate==2.5.2 flask-principal==0.4.0 @@ -38,32 +38,32 @@ flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 -flask==1.0.3 +flask==1.1.1 future==0.17.1 gunicorn==19.9.0 -hvac==0.9.1 +hvac==0.9.3 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask javaobj-py3==0.3.0 # via pyjks jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore -josepy==1.1.0 # via acme +josepy==1.2.0 # via acme jsonlines==1.2.0 # via cloudflare kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 -mako==1.0.11 # via alembic +mako==1.0.13 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.16.3 -marshmallow==2.19.2 +marshmallow-sqlalchemy==0.17.0 +marshmallow==2.19.5 mock==3.0.5 # via acme ndg-httpsclient==0.5.1 -paramiko==2.4.2 +paramiko==2.6.0 pem==19.1.0 -psycopg2==2.8.2 +psycopg2==2.8.3 pyasn1-modules==0.2.5 # via pyjks, python-ldap -pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, pyjks, python-ldap +pyasn1==0.4.5 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap pycparser==2.19 # via cffi pycryptodomex==3.8.2 # via pyjks pyjks==19.0.0 @@ -76,19 +76,22 @@ python-editor==1.0.4 # via alembic python-json-logger==0.1.11 # via logmatic-python python-ldap==3.2.0 pytz==2019.1 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.1 +pyyaml==5.1.1 raven[flask]==6.10.0 redis==3.2.1 requests-toolbelt==0.9.1 # via acme requests[security]==2.22.0 retrying==1.3.3 -s3transfer==0.2.0 # via boto3 +s3transfer==0.2.1 # via boto3 six==1.12.0 -sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.4 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy-utils==0.34.0 +sqlalchemy==1.3.5 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 twofish==0.3.0 # via pyjks urllib3==1.25.3 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.15.4 # via flask xmltodict==0.12.0 + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.0.1 # via acme, josepy From cd1aeb15f179061ab02adb3b02c01909f6f4b19b Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 12 Jul 2019 11:50:12 -0700 Subject: [PATCH 267/357] adding testing for redis --- lemur/common/redis.py | 2 +- lemur/tests/test_redis.py | 13 +++++++++++++ 2 files changed, 14 insertions(+), 1 deletion(-) create mode 100644 lemur/tests/test_redis.py diff --git a/lemur/common/redis.py b/lemur/common/redis.py index 0bddf9b4..34a8778f 100644 --- a/lemur/common/redis.py +++ b/lemur/common/redis.py @@ -23,7 +23,7 @@ class RedisHandler: def redis(self, db=0): # The decode_responses flag here directs the client to convert the responses from Redis into Python strings # using the default encoding utf-8. This is client specific. - red = redis.StrictRedis(host=self.host, port=self.port, db=self.db, charset="utf-8", decode_responses=True) + red = redis.StrictRedis(host=self.host, port=self.port, db=self.db, encoding="utf-8", decode_responses=True) return red diff --git a/lemur/tests/test_redis.py b/lemur/tests/test_redis.py new file mode 100644 index 00000000..aab2e397 --- /dev/null +++ b/lemur/tests/test_redis.py @@ -0,0 +1,13 @@ +import fakeredis +import time +import sys + + +def test_write_and_read_from_redis(): + function = f"{__name__}.{sys._getframe().f_code.co_name}" + + red = fakeredis.FakeStrictRedis() + key = f"{function}.last_success" + value = int(time.time()) + assert red.set(key, value) is True + assert (int(red.get(key)) == value) is True From 0ed00c5011665b316ff66180a3cbd58dd9f29a8b Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 16 Jul 2019 09:01:04 -0700 Subject: [PATCH 268/357] updating test requirement --- requirements-tests.in | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-tests.in b/requirements-tests.in index d315cf7a..5d152fce 100644 --- a/requirements-tests.in +++ b/requirements-tests.in @@ -5,6 +5,7 @@ black coverage factory-boy Faker +fakeredis-1.0.3 freezegun moto nose From 54ecda4e1a129abbe03440da8ab051e7cca679b4 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 16 Jul 2019 09:09:12 -0700 Subject: [PATCH 269/357] updating fakeredis --- requirements-tests.in | 2 +- requirements-tests.txt | 8 +++++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/requirements-tests.in b/requirements-tests.in index 5d152fce..610f26f9 100644 --- a/requirements-tests.in +++ b/requirements-tests.in @@ -5,7 +5,7 @@ black coverage factory-boy Faker -fakeredis-1.0.3 +fakeredis freezegun moto nose diff --git a/requirements-tests.txt b/requirements-tests.txt index 77bc92af..1c4b276e 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -27,6 +27,7 @@ docutils==0.14 # via botocore ecdsa==0.13.2 # via python-jose factory-boy==2.12.0 faker==1.0.7 +fakeredis==1.0.3 flask==1.0.3 # via pytest-flask freezegun==0.3.12 future==0.17.1 # via aws-xray-sdk, python-jose @@ -62,13 +63,15 @@ python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==3.0.1 # via moto pytz==2019.1 # via moto pyyaml==5.1 +redis==3.2.1 # via fakeredis requests-mock==1.6.0 requests==2.22.0 # via cfn-lint, docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose s3transfer==0.2.0 # via boto3 -six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, mock, moto, packaging, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, fakeredis, freezegun, mock, moto, packaging, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client smmap2==2.0.5 # via gitdb2 +sortedcontainers==2.1.0 # via fakeredis stevedore==1.30.1 # via bandit text-unidecode==1.2 # via faker toml==0.10.0 # via black @@ -79,3 +82,6 @@ werkzeug==0.15.4 # via flask, moto, pytest-flask wrapt==1.11.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto zipp==0.5.1 # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.0.1 # via cfn-lint From 09c0fa0f940a9d4706f704362b7f3e2ee2692d7d Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 16 Jul 2019 17:21:01 -0700 Subject: [PATCH 270/357] updating the function declaration --- lemur/common/celery.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 05e66926..67780957 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -108,7 +108,7 @@ def fetch_acme_cert(id): if celery.current_task: task_id = celery.current_task.request.id - function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + function = f"{__name__}.{sys._getframe().f_code.co_name}" log_data = { "function": function, "message": "Resolving pending certificate {}".format(id), @@ -210,7 +210,7 @@ def fetch_all_pending_acme_certs(): """Instantiate celery workers to resolve all pending Acme certificates""" pending_certs = pending_certificate_service.get_unresolved_pending_certs() - function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + function = f"{__name__}.{sys._getframe().f_code.co_name}" log_data = { "function": function, "message": "Starting job.", @@ -236,7 +236,7 @@ def fetch_all_pending_acme_certs(): @celery.task() def remove_old_acme_certs(): """Prune old pending acme certificates from the database""" - function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + function = f"{__name__}.{sys._getframe().f_code.co_name}" log_data = { "function": function, "message": "Starting job.", @@ -262,7 +262,7 @@ def clean_all_sources(): This function will clean unused certificates from sources. This is a destructive operation and should only be ran periodically. This function triggers one celery task per source. """ - function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + function = f"{__name__}.{sys._getframe().f_code.co_name}" sources = validate_sources("all") for source in sources: current_app.logger.debug( @@ -292,7 +292,7 @@ def sync_all_sources(): """ This function will sync certificates from all sources. This function triggers one celery task per source. """ - function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + function = f"{__name__}.{sys._getframe().f_code.co_name}" sources = validate_sources("all") for source in sources: current_app.logger.debug( @@ -313,7 +313,7 @@ def sync_source(source): :return: """ - function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + function = f"{__name__}.{sys._getframe().f_code.co_name}" task_id = None if celery.current_task: task_id = celery.current_task.request.id @@ -357,7 +357,7 @@ def sync_source_destination(): We rely on account numbers to avoid duplicates. """ current_app.logger.debug("Syncing AWS destinations and sources") - function = "{}.{}".format(__name__, sys._getframe().f_code.co_name) + function = f"{__name__}.{sys._getframe().f_code.co_name}" for dst in destinations_service.get_all(): if add_aws_destination_to_sources(dst): From e37a7c775ea78730d8f5c68bd82d9fbef9e1aa9b Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Thu, 18 Jul 2019 14:29:54 -0700 Subject: [PATCH 271/357] Initial commit for the UltraDNS plugin to support Lets Encrypt --- lemur/dns_providers/service.py | 1 + lemur/plugins/lemur_acme/plugin.py | 6 +- lemur/plugins/lemur_acme/ultradns.py | 221 +++++++++++++++++++++++++++ 3 files changed, 225 insertions(+), 3 deletions(-) create mode 100644 lemur/plugins/lemur_acme/ultradns.py diff --git a/lemur/dns_providers/service.py b/lemur/dns_providers/service.py index ec9fa0de..29f98a5b 100644 --- a/lemur/dns_providers/service.py +++ b/lemur/dns_providers/service.py @@ -98,6 +98,7 @@ def get_types(): ], }, {"name": "dyn"}, + {"name": "ultradns"}, ] }, ) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index c734923a..b0774cbe 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -31,7 +31,7 @@ from lemur.exceptions import InvalidAuthority, InvalidConfiguration, UnknownProv from lemur.extensions import metrics, sentry from lemur.plugins import lemur_acme as acme from lemur.plugins.bases import IssuerPlugin -from lemur.plugins.lemur_acme import cloudflare, dyn, route53 +from lemur.plugins.lemur_acme import cloudflare, dyn, route53, ultradns class AuthorizationRecord(object): @@ -370,7 +370,7 @@ class AcmeHandler(object): pass def get_dns_provider(self, type): - provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53} + provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53, "ultradns": ultradns} provider = provider_types.get(type) if not provider: raise UnknownProvider("No such DNS provider: {}".format(type)) @@ -424,7 +424,7 @@ class ACMEIssuerPlugin(IssuerPlugin): def get_dns_provider(self, type): self.acme = AcmeHandler() - provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53} + provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53, "ultradns": ultradns} provider = provider_types.get(type) if not provider: raise UnknownProvider("No such DNS provider: {}".format(type)) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py new file mode 100644 index 00000000..de65b47f --- /dev/null +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -0,0 +1,221 @@ +import time +import requests +import json + +import dns +import dns.exception +import dns.name +import dns.query +import dns.resolver + +from flask import current_app +from lemur.extensions import metrics, sentry + +use_http = False + + +def get_ultradns_token(): + path = "/v2/authorization/token" + data = { + "grant_type": "password", + "username": current_app.config.get("ACME_ULTRADNS_USERNAME", ""), + "password": current_app.config.get("ACME_ULTRADNS_PASSWORD", ""), + } + base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") + resp = requests.post("{0}{1}".format(base_uri, path), data=data, verify=True) + return resp.json()["access_token"] + + +def _generate_header(): + access_token = get_ultradns_token() + return {"Authorization": "Bearer {}".format(access_token), "Content-Type": "application/json"} + + +def _paginate(path, key): + limit = 100 + params = {"offset": 0, "limit": 1} + # params["offset"] = 0 + # params["limit"] = 1 + resp = _get(path, params) + for index in range(0, resp["resultInfo"]["totalCount"], limit): + params["offset"] = index + params["limit"] = limit + resp = _get(path, params) + yield resp[key] + + +def _get(path, params=None): + base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") + resp = requests.get( + "{0}{1}".format(base_uri, path), + headers=_generate_header(), + params=params, + verify=True, + ) + resp.raise_for_status() + return resp.json() + + +def _delete(path): + base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") + resp = requests.delete( + "{0}{1}".format(base_uri, path), + headers=_generate_header(), + verify=True, + ) + resp.raise_for_status() + + +def _post(path, params): + base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") + resp = requests.post( + "{0}{1}".format(base_uri, path), + headers=_generate_header(), + data=json.dumps(params), + verify=True, + ) + resp.raise_for_status() + + +def _has_dns_propagated(name, token): + txt_records = [] + try: + dns_resolver = dns.resolver.Resolver() + dns_resolver.nameservers = [get_authoritative_nameserver(name)] + dns_response = dns_resolver.query(name, "TXT") + for rdata in dns_response: + for txt_record in rdata.strings: + txt_records.append(txt_record.decode("utf-8")) + except dns.exception.DNSException: + metrics.send("has_dns_propagated_fail", "counter", 1) + return False + + for txt_record in txt_records: + if txt_record == token: + metrics.send("has_dns_propagated_success", "counter", 1) + return True + + return False + + +def wait_for_dns_change(change_id, account_number=None): + fqdn, token = change_id + number_of_attempts = 20 + for attempts in range(0, number_of_attempts): + status = _has_dns_propagated(fqdn, token) + current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) + if status: + metrics.send("wait_for_dns_change_success", "counter", 1) + break + time.sleep(10) + if not status: + # TODO: Delete associated DNS text record here + metrics.send("wait_for_dns_change_fail", "counter", 1) + sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)}) + metrics.send( + "wait_for_dns_change_error", + "counter", + 1, + metric_tags={"fqdn": fqdn, "txt_record": token}, + ) + return + + +def get_zones(account_number): + path = "/v2/zones/" + zones = [] + for page in _paginate(path, "zones"): + for elem in page: + zones.append(elem["properties"]["name"][:-1]) + + return zones + + +def get_zone_name(domain, account_number): + zones = get_zones(account_number) + + zone_name = "" + + for z in zones: + if domain.endswith(z): + # Find the most specific zone possible for the domain + # Ex: If fqdn is a.b.c.com, there is a zone for c.com, + # and a zone for b.c.com, we want to use b.c.com. + if z.count(".") > zone_name.count("."): + zone_name = z + if not zone_name: + metrics.send("ultradns_no_zone_name", "counter", 1) + raise Exception("No UltraDNS zone found for domain: {}".format(domain)) + return zone_name + + +def create_txt_record(domain, token, account_number): + zone_name = get_zone_name(domain, account_number) + zone_parts = len(zone_name.split(".")) + node_name = ".".join(domain.split(".")[:-zone_parts]) + fqdn = "{0}.{1}".format(node_name, zone_name) + path = "/v2/zones/{0}/rrsets/TXT/{1}".format(zone_name, node_name) + # zone = Zone(zone_name) + params = { + "ttl": 300, + "rdata": [ + "{}".format(token) + ], + } + + try: + _post(path, params) + current_app.logger.debug( + "TXT record created: {0}, token: {1}".format(fqdn, token) + ) + except Exception as e: + current_app.logger.debug( + "Unable to add record. Domain: {}. Token: {}. " + "Record already exists: {}".format(domain, token, e), + exc_info=True, + ) + + change_id = (fqdn, token) + return change_id + + +def delete_txt_record(change_id, account_number, domain, token): + # client = get_ultradns_client() + if not domain: + current_app.logger.debug("delete_txt_record: No domain passed") + return + + zone_name = get_zone_name(domain, account_number) + zone_parts = len(zone_name.split(".")) + node_name = ".".join(domain.split(".")[:-zone_parts]) + fqdn = "{0}.{1}".format(node_name, zone_name) + path = "/v2/zones/{}/rrsets/16/{}".format(zone_name, node_name) + + try: + # rrsets = client.get_rrsets_by_type_owner(zone_name, "TXT", node_name) + rrsets = _get(path) + except Exception as e: + metrics.send("delete_txt_record_geterror", "counter", 1) + # No Text Records remain or host is not in the zone anymore because all records have been deleted. + return + try: + rrsets["rrSets"][0]["rdata"].remove("{}".format(token)) + except ValueError: + current_app.logger.debug("Token not found") + return + + #client.delete_rrset(zone_name, "TXT", node_name) + _delete(path) + + if len(rrsets["rrSets"][0]["rdata"]) > 0: + #client.create_rrset(zone_name, "TXT", node_name, 300, rrsets["rrSets"][0]["rdata"]) + params = { + "ttl": 300, + "rdata": rrsets["rrSets"][0]["rdata"], + } + _post(path, params) + + +def get_authoritative_nameserver(domain): + # return "8.8.8.8" + return "156.154.64.154" From 36ebba64916e87391bf5f1ad73f51b88bed799b1 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 18 Jul 2019 15:16:01 -0700 Subject: [PATCH 272/357] source is not dict --- lemur/common/celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 67780957..b701a9eb 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -343,7 +343,7 @@ def sync_source(source): log_data["message"] = "Done syncing source" current_app.logger.debug(log_data) - metrics.send(f"{function}.success", 'counter', 1, metric_tags=source) + metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) red.set(f'{function}.last_success', int(time.time())) From 0b52aa8c59984f0e9579a2b5285e4d276a0bc53c Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 22 Jul 2019 11:47:48 -0700 Subject: [PATCH 273/357] Added Zone class to handle ultradns zones --- lemur/plugins/lemur_acme/ultradns.py | 96 +++++++++++++++++++---- lemur/plugins/lemur_acme/ultradns_zone.py | 33 ++++++++ 2 files changed, 114 insertions(+), 15 deletions(-) create mode 100644 lemur/plugins/lemur_acme/ultradns_zone.py diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index de65b47f..eb595789 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -1,6 +1,7 @@ import time import requests import json +from .ultradns_zone import Zone import dns import dns.exception @@ -11,10 +12,11 @@ import dns.resolver from flask import current_app from lemur.extensions import metrics, sentry -use_http = False - def get_ultradns_token(): + # Function to call the UltraDNS Authorization API. Returns the Authorization access_token + # which is valid for 1 hour. Each request calls this function and we generate a new token + # every time. path = "/v2/authorization/token" data = { "grant_type": "password", @@ -27,6 +29,8 @@ def get_ultradns_token(): def _generate_header(): + # Function to generate the header for a request. Contains the Authorization access_key + # obtained from the get_ultradns_token() function. access_token = get_ultradns_token() return {"Authorization": "Bearer {}".format(access_token), "Content-Type": "application/json"} @@ -34,8 +38,6 @@ def _generate_header(): def _paginate(path, key): limit = 100 params = {"offset": 0, "limit": 1} - # params["offset"] = 0 - # params["limit"] = 1 resp = _get(path, params) for index in range(0, resp["resultInfo"]["totalCount"], limit): params["offset"] = index @@ -45,6 +47,7 @@ def _paginate(path, key): def _get(path, params=None): + # Function to execute a GET request on the given URL (base_uri + path) with given params base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.get( "{0}{1}".format(base_uri, path), @@ -57,6 +60,7 @@ def _get(path, params=None): def _delete(path): + # Function to execute a DELETE request on the given URL base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.delete( "{0}{1}".format(base_uri, path), @@ -67,6 +71,7 @@ def _delete(path): def _post(path, params): + # Executes a POST request on given URL. Body is sent in JSON format base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.post( "{0}{1}".format(base_uri, path), @@ -78,6 +83,8 @@ def _post(path, params): def _has_dns_propagated(name, token): + # Check whether the DNS change made by Lemur have propagated to the public DNS or not. + # Invoked by wait_for_dns_change() function txt_records = [] try: dns_resolver = dns.resolver.Resolver() @@ -99,6 +106,7 @@ def _has_dns_propagated(name, token): def wait_for_dns_change(change_id, account_number=None): + # Waits and checks if the DNS changes have propagated or not. fqdn, token = change_id number_of_attempts = 20 for attempts in range(0, number_of_attempts): @@ -122,20 +130,26 @@ def wait_for_dns_change(change_id, account_number=None): def get_zones(account_number): + # Get zones from the UltraDNS path = "/v2/zones/" zones = [] for page in _paginate(path, "zones"): for elem in page: - zones.append(elem["properties"]["name"][:-1]) + # UltraDNS zone names end with a "." - Example - lemur.example.com. + # We pick out the names minus the "." at the end while returning the list + zone = Zone(elem) + # TODO : Check for active & Primary + # if elem["properties"]["type"] == "PRIMARY" and elem["properties"]["status"] == "ACTIVE": + if zone.authoritative_type == "PRIMARY" and zone.status == "ACTIVE": + zones.append(zone.name) return zones def get_zone_name(domain, account_number): + # Get the matching zone for the given domain zones = get_zones(account_number) - zone_name = "" - for z in zones: if domain.endswith(z): # Find the most specific zone possible for the domain @@ -150,12 +164,20 @@ def get_zone_name(domain, account_number): def create_txt_record(domain, token, account_number): + # Create a TXT record for the given domain. + # The part of the domain that matches with the zone becomes the zone name. + # The remainder becomes the owner name (referred to as node name here) + # Example: Let's say we have a zone named "exmaple.com" in UltraDNS and we + # get a request to create a cert for lemur.example.com + # Domain - _acme-challenge.lemur.example.com + # Matching zone - example.com + # Owner name - _acme-challenge.lemur + zone_name = get_zone_name(domain, account_number) zone_parts = len(zone_name.split(".")) node_name = ".".join(domain.split(".")[:-zone_parts]) fqdn = "{0}.{1}".format(node_name, zone_name) path = "/v2/zones/{0}/rrsets/TXT/{1}".format(zone_name, node_name) - # zone = Zone(zone_name) params = { "ttl": 300, "rdata": [ @@ -180,7 +202,16 @@ def create_txt_record(domain, token, account_number): def delete_txt_record(change_id, account_number, domain, token): - # client = get_ultradns_client() + # Delete the TXT record that was created in the create_txt_record() function. + # UltraDNS handles records differently compared to Dyn. It creates an RRSet + # which is a set of records of the same type and owner. This means + # that while deleting the record, we cannot delete any individual record from + # the RRSet. Instead, we have to delete the entire RRSet. If multiple certs are + # being created for the same domain at the same time, the challenge TXT records + # that are created will be added under the same RRSet. If the RRSet had more + # than 1 record, then we create a new RRSet on UltraDNS minus the record that + # has to be deleted. + if not domain: current_app.logger.debug("delete_txt_record: No domain passed") return @@ -188,27 +219,26 @@ def delete_txt_record(change_id, account_number, domain, token): zone_name = get_zone_name(domain, account_number) zone_parts = len(zone_name.split(".")) node_name = ".".join(domain.split(".")[:-zone_parts]) - fqdn = "{0}.{1}".format(node_name, zone_name) path = "/v2/zones/{}/rrsets/16/{}".format(zone_name, node_name) try: - # rrsets = client.get_rrsets_by_type_owner(zone_name, "TXT", node_name) rrsets = _get(path) except Exception as e: metrics.send("delete_txt_record_geterror", "counter", 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return try: + # Remove the record from the RRSet locally rrsets["rrSets"][0]["rdata"].remove("{}".format(token)) except ValueError: current_app.logger.debug("Token not found") return - #client.delete_rrset(zone_name, "TXT", node_name) + # Delete the RRSet from UltraDNS _delete(path) + # Check if the RRSet has more records. If yes, add the modified RRSet back to UltraDNS if len(rrsets["rrSets"][0]["rdata"]) > 0: - #client.create_rrset(zone_name, "TXT", node_name, 300, rrsets["rrSets"][0]["rdata"]) params = { "ttl": 300, "rdata": rrsets["rrSets"][0]["rdata"], @@ -216,6 +246,42 @@ def delete_txt_record(change_id, account_number, domain, token): _post(path, params) +def delete_acme_txt_records(domain): + + if not domain: + current_app.logger.debug("delete_acme_txt_records: No domain passed") + return + acme_challenge_string = "_acme-challenge" + if not domain.startswith(acme_challenge_string): + current_app.logger.debug( + "delete_acme_txt_records: Domain {} doesn't start with string {}. " + "Cowardly refusing to delete TXT records".format( + domain, acme_challenge_string + ) + ) + return + + zone_name = get_zone_name(domain) + zone_parts = len(zone_name.split(".")) + node_name = ".".join(domain.split(".")[:-zone_parts]) + path = "/v2/zones/{}/rrsets/16/{}".format(zone_name, node_name) + + _delete(path) + + def get_authoritative_nameserver(domain): - # return "8.8.8.8" - return "156.154.64.154" + """ + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + REMEMBER TO CHANGE THE RETURN VALUE + """ + return "8.8.8.8" + # return "156.154.64.154" diff --git a/lemur/plugins/lemur_acme/ultradns_zone.py b/lemur/plugins/lemur_acme/ultradns_zone.py new file mode 100644 index 00000000..c6d90422 --- /dev/null +++ b/lemur/plugins/lemur_acme/ultradns_zone.py @@ -0,0 +1,33 @@ +class Zone: + """ + This class implements an Ultra DNS zone. + """ + + def __init__(self, _data, _client="Client"): + self._data = _data + self._client = _client + + @property + def name(self): + """ + Zone name, has a trailing "." at the end, which we manually remove. + """ + return self._data["properties"]["name"][:-1] + + @property + def authoritative_type(self): + """ + Indicates whether the zone is setup as a PRIMARY or SECONDARY + """ + return self._data["properties"]["type"] + + @property + def record_count(self): + return self._data["properties"]["resourceRecordCount"] + + @property + def status(self): + """ + Returns the status of the zone - ACTIVE, SUSPENDED, etc + """ + return self._data["properties"]["status"] From 51f3b7dde0ff14eebc4d06d8eb09d6fcccd53a2d Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 22 Jul 2019 14:23:40 -0700 Subject: [PATCH 274/357] Added the Record class for UltraDNS --- lemur/plugins/lemur_acme/ultradns.py | 10 +++++--- lemur/plugins/lemur_acme/ultradns_record.py | 26 +++++++++++++++++++++ 2 files changed, 33 insertions(+), 3 deletions(-) create mode 100644 lemur/plugins/lemur_acme/ultradns_record.py diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index eb595789..95adc77a 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -2,6 +2,7 @@ import time import requests import json from .ultradns_zone import Zone +from .ultradns_record import Record import dns import dns.exception @@ -223,13 +224,15 @@ def delete_txt_record(change_id, account_number, domain, token): try: rrsets = _get(path) + record = Record(rrsets) except Exception as e: metrics.send("delete_txt_record_geterror", "counter", 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return try: # Remove the record from the RRSet locally - rrsets["rrSets"][0]["rdata"].remove("{}".format(token)) + # rrsets["rrSets"][0]["rdata"].remove("{}".format(token)) + record.rdata.remove("{}".format(token)) except ValueError: current_app.logger.debug("Token not found") return @@ -238,10 +241,11 @@ def delete_txt_record(change_id, account_number, domain, token): _delete(path) # Check if the RRSet has more records. If yes, add the modified RRSet back to UltraDNS - if len(rrsets["rrSets"][0]["rdata"]) > 0: + # if len(rrsets["rrSets"][0]["rdata"]) > 0: + if len(record.rdata) > 0: params = { "ttl": 300, - "rdata": rrsets["rrSets"][0]["rdata"], + "rdata": record.rdata, } _post(path, params) diff --git a/lemur/plugins/lemur_acme/ultradns_record.py b/lemur/plugins/lemur_acme/ultradns_record.py new file mode 100644 index 00000000..9ec8d4d8 --- /dev/null +++ b/lemur/plugins/lemur_acme/ultradns_record.py @@ -0,0 +1,26 @@ +class Record: + """ + This class implements an Ultra DNS record. + Accepts the response from the API call as the argument. + """ + + def __init__(self, _data): + # Since we are dealing with only TXT records for Lemur, we expect only 1 RRSet in the response. + # Thus we default to picking up the first entry (_data["rrsets"][0]) from the response. + self._data = _data["rrSets"][0] + + @property + def name(self): + return self._data["ownerName"] + + @property + def rrtype(self): + return self._data["rrtype"] + + @property + def rdata(self): + return self._data["rdata"] + + @property + def ttl(self): + return self._data["ttl"] From 252410c6e9529a10926a0f6b23768a1f322c163e Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 22 Jul 2019 16:00:20 -0700 Subject: [PATCH 275/357] Updated TTL from 300 to 5 --- lemur/plugins/lemur_acme/ultradns.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index 95adc77a..d6ad64af 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -180,7 +180,7 @@ def create_txt_record(domain, token, account_number): fqdn = "{0}.{1}".format(node_name, zone_name) path = "/v2/zones/{0}/rrsets/TXT/{1}".format(zone_name, node_name) params = { - "ttl": 300, + "ttl": 5, "rdata": [ "{}".format(token) ], @@ -244,7 +244,7 @@ def delete_txt_record(change_id, account_number, domain, token): # if len(rrsets["rrSets"][0]["rdata"]) > 0: if len(record.rdata) > 0: params = { - "ttl": 300, + "ttl": 5, "rdata": record.rdata, } _post(path, params) From 429e6a967c4133e2da0b0e576abb9537b810f33d Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 25 Jul 2019 18:49:19 -0700 Subject: [PATCH 276/357] better error handling for redis --- lemur/common/redis.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/lemur/common/redis.py b/lemur/common/redis.py index 34a8778f..ca15734f 100644 --- a/lemur/common/redis.py +++ b/lemur/common/redis.py @@ -3,7 +3,9 @@ Helper Class for Redis """ import redis +import sys from flask import current_app +from lemur.extensions import sentry from lemur.factory import create_app if current_app: @@ -23,7 +25,19 @@ class RedisHandler: def redis(self, db=0): # The decode_responses flag here directs the client to convert the responses from Redis into Python strings # using the default encoding utf-8. This is client specific. - red = redis.StrictRedis(host=self.host, port=self.port, db=self.db, encoding="utf-8", decode_responses=True) + function = f"{__name__}.{sys._getframe().f_code.co_name}" + try: + red = redis.StrictRedis(host=self.host, port=self.port, db=self.db, encoding="utf-8", decode_responses=True) + red.set("test", 0) + except redis.ConnectionError: + log_data = { + "function": function, + "message": "Redis Connection error", + "host": self.host, + "port": self.port + } + current_app.logger.error(log_data) + sentry.captureException() return red From adabe18c905cb4a9fe4a44f3a9e710e07cdf63d4 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 25 Jul 2019 18:56:28 -0700 Subject: [PATCH 277/357] metric tags, to be able to track which domains where failing during the LetsEncrypt domain validation --- lemur/plugins/lemur_acme/dyn.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lemur/plugins/lemur_acme/dyn.py b/lemur/plugins/lemur_acme/dyn.py index 00a48eb6..fff2e632 100644 --- a/lemur/plugins/lemur_acme/dyn.py +++ b/lemur/plugins/lemur_acme/dyn.py @@ -33,22 +33,22 @@ def get_dynect_session(): return dynect_session -def _has_dns_propagated(name, token): +def _has_dns_propagated(fqdn, token): txt_records = [] try: dns_resolver = dns.resolver.Resolver() - dns_resolver.nameservers = [get_authoritative_nameserver(name)] - dns_response = dns_resolver.query(name, "TXT") + dns_resolver.nameservers = [get_authoritative_nameserver(fqdn)] + dns_response = dns_resolver.query(fqdn, "TXT") for rdata in dns_response: for txt_record in rdata.strings: txt_records.append(txt_record.decode("utf-8")) except dns.exception.DNSException: - metrics.send("has_dns_propagated_fail", "counter", 1) + metrics.send("has_dns_propagated_fail", "counter", 1, metric_tags={"dns": fqdn}) return False for txt_record in txt_records: if txt_record == token: - metrics.send("has_dns_propagated_success", "counter", 1) + metrics.send("has_dns_propagated_success", "counter", 1, metric_tags={"dns": fqdn}) return True return False @@ -61,12 +61,12 @@ def wait_for_dns_change(change_id, account_number=None): status = _has_dns_propagated(fqdn, token) current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) if status: - metrics.send("wait_for_dns_change_success", "counter", 1) + metrics.send("wait_for_dns_change_success", "counter", 1, metric_tags={"dns": fqdn}) break time.sleep(10) if not status: # TODO: Delete associated DNS text record here - metrics.send("wait_for_dns_change_fail", "counter", 1) + metrics.send("wait_for_dns_change_fail", "counter", 1, metric_tags={"dns": fqdn}) sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)}) metrics.send( "wait_for_dns_change_error", From e993194b4f30262a86880a86097d968f7cf9fc2e Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 29 Jul 2019 14:59:28 -0700 Subject: [PATCH 278/357] Check ultraDNS authoritative server first. Upon success, check Googles DNS server. --- lemur/plugins/lemur_acme/ultradns.py | 25 ++++++++++++++++++------- 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index d6ad64af..1a520f2e 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -83,13 +83,15 @@ def _post(path, params): resp.raise_for_status() -def _has_dns_propagated(name, token): +def _has_dns_propagated(name, token, domain="8.8.8.8"): # Check whether the DNS change made by Lemur have propagated to the public DNS or not. # Invoked by wait_for_dns_change() function txt_records = [] try: dns_resolver = dns.resolver.Resolver() - dns_resolver.nameservers = [get_authoritative_nameserver(name)] + # dns_resolver.nameservers = [get_authoritative_nameserver(name)] + # dns_resolver.nameservers = ["156.154.64.154"] + dns_resolver.nameservers = [domain] dns_response = dns_resolver.query(name, "TXT") for rdata in dns_response: for txt_record in rdata.strings: @@ -111,12 +113,21 @@ def wait_for_dns_change(change_id, account_number=None): fqdn, token = change_id number_of_attempts = 20 for attempts in range(0, number_of_attempts): - status = _has_dns_propagated(fqdn, token) + status = _has_dns_propagated(fqdn, token, "156.154.64.154") current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) if status: - metrics.send("wait_for_dns_change_success", "counter", 1) + # metrics.send("wait_for_dns_change_success", "counter", 1) + time.sleep(10) break time.sleep(10) + if status: + for attempts in range(0, number_of_attempts): + status = _has_dns_propagated(fqdn, token, "8.8.8.8") + current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) + if status: + metrics.send("wait_for_dns_change_success", "counter", 1) + break + time.sleep(10) if not status: # TODO: Delete associated DNS text record here metrics.send("wait_for_dns_change_fail", "counter", 1) @@ -132,7 +143,7 @@ def wait_for_dns_change(change_id, account_number=None): def get_zones(account_number): # Get zones from the UltraDNS - path = "/v2/zones/" + path = "/v2/zones" zones = [] for page in _paginate(path, "zones"): for elem in page: @@ -287,5 +298,5 @@ def get_authoritative_nameserver(domain): REMEMBER TO CHANGE THE RETURN VALUE REMEMBER TO CHANGE THE RETURN VALUE """ - return "8.8.8.8" - # return "156.154.64.154" + # return "8.8.8.8" + return "156.154.64.154" From 3ad791e1ec634646edc78aaa1b4e4d40bbb67936 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 29 Jul 2019 18:01:28 -0700 Subject: [PATCH 279/357] Dynamically obtain the authoritative nameserver for the domain --- lemur/plugins/lemur_acme/ultradns.py | 72 ++++++++++++++++++---------- 1 file changed, 47 insertions(+), 25 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index 1a520f2e..c7f853d0 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -83,14 +83,12 @@ def _post(path, params): resp.raise_for_status() -def _has_dns_propagated(name, token, domain="8.8.8.8"): +def _has_dns_propagated(name, token, domain): # Check whether the DNS change made by Lemur have propagated to the public DNS or not. # Invoked by wait_for_dns_change() function txt_records = [] try: dns_resolver = dns.resolver.Resolver() - # dns_resolver.nameservers = [get_authoritative_nameserver(name)] - # dns_resolver.nameservers = ["156.154.64.154"] dns_resolver.nameservers = [domain] dns_response = dns_resolver.query(name, "TXT") for rdata in dns_response: @@ -110,19 +108,21 @@ def _has_dns_propagated(name, token, domain="8.8.8.8"): def wait_for_dns_change(change_id, account_number=None): # Waits and checks if the DNS changes have propagated or not. + # First check the domains authoritative server. Once this succeeds, + # we ask a public DNS server (Google <8.8.8.8> in our case). fqdn, token = change_id number_of_attempts = 20 + nameserver = get_authoritative_nameserver(fqdn) for attempts in range(0, number_of_attempts): - status = _has_dns_propagated(fqdn, token, "156.154.64.154") + status = _has_dns_propagated(fqdn, token, nameserver) current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) if status: - # metrics.send("wait_for_dns_change_success", "counter", 1) time.sleep(10) break time.sleep(10) if status: for attempts in range(0, number_of_attempts): - status = _has_dns_propagated(fqdn, token, "8.8.8.8") + status = _has_dns_propagated(fqdn, token, get_public_authoritative_nameserver()) current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) if status: metrics.send("wait_for_dns_change_success", "counter", 1) @@ -150,8 +150,6 @@ def get_zones(account_number): # UltraDNS zone names end with a "." - Example - lemur.example.com. # We pick out the names minus the "." at the end while returning the list zone = Zone(elem) - # TODO : Check for active & Primary - # if elem["properties"]["type"] == "PRIMARY" and elem["properties"]["status"] == "ACTIVE": if zone.authoritative_type == "PRIMARY" and zone.status == "ACTIVE": zones.append(zone.name) @@ -242,7 +240,6 @@ def delete_txt_record(change_id, account_number, domain, token): return try: # Remove the record from the RRSet locally - # rrsets["rrSets"][0]["rdata"].remove("{}".format(token)) record.rdata.remove("{}".format(token)) except ValueError: current_app.logger.debug("Token not found") @@ -252,7 +249,6 @@ def delete_txt_record(change_id, account_number, domain, token): _delete(path) # Check if the RRSet has more records. If yes, add the modified RRSet back to UltraDNS - # if len(rrsets["rrSets"][0]["rdata"]) > 0: if len(record.rdata) > 0: params = { "ttl": 5, @@ -285,18 +281,44 @@ def delete_acme_txt_records(domain): def get_authoritative_nameserver(domain): - """ - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - REMEMBER TO CHANGE THE RETURN VALUE - """ - # return "8.8.8.8" - return "156.154.64.154" + n = dns.name.from_text(domain) + + depth = 2 + default = dns.resolver.get_default_resolver() + nameserver = default.nameservers[0] + + last = False + while not last: + s = n.split(depth) + + last = s[0].to_unicode() == u"@" + sub = s[1] + + query = dns.message.make_query(sub, dns.rdatatype.NS) + response = dns.query.udp(query, nameserver) + + rcode = response.rcode() + if rcode != dns.rcode.NOERROR: + metrics.send("get_authoritative_nameserver_error", "counter", 1) + if rcode == dns.rcode.NXDOMAIN: + raise Exception("%s does not exist." % sub) + else: + raise Exception("Error %s" % dns.rcode.to_text(rcode)) + + if len(response.authority) > 0: + rrset = response.authority[0] + else: + rrset = response.answer[0] + + rr = rrset[0] + if rr.rdtype != dns.rdatatype.SOA: + authority = rr.target + nameserver = default.query(authority).rrset[0].to_text() + + depth += 1 + + return nameserver + + +def get_public_authoritative_nameserver(): + return "8.8.8.8" From a89cbe933224575fd8c3354b644f60750b2894b2 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 30 Jul 2019 09:57:15 -0700 Subject: [PATCH 280/357] moving all cron jobs to become celery jobs --- lemur/common/celery.py | 85 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index b701a9eb..b889a80a 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -26,6 +26,11 @@ from lemur.pending_certificates import service as pending_certificate_service from lemur.plugins.base import plugins from lemur.sources.cli import clean, sync, validate_sources from lemur.sources.service import add_aws_destination_to_sources +from lemur.certificates import cli as cli_certificate +from lemur.dns_providers import cli as cli_dns_providers +from lemur.notifications import cli as cli_notification +from lemur.endpoints import cli as cli_endpoints + if current_app: flask_app = current_app @@ -366,3 +371,83 @@ def sync_source_destination(): current_app.logger.debug("Completed Syncing AWS destinations and sources") red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) + + +@celery.task() +def certificate_reissue(): + """ + This celery task reissues certificates which are pending reissue + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + current_app.logger.debug(f"{function}: reissuing certificates") + cli_certificate.reissue(None, True) + current_app.logger.debug(f"{function}: reissuance completed") + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + + +@celery.task() +def certificate_rotate(): + """ + This celery task rotates certificates which are reissued but having endpoints attached to the replaced cert + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + current_app.logger.debug(f"{function}: rotating certificates") + cli_certificate.rotate(None, None, None, None, True) + current_app.logger.debug(f"{function}: rotation completed") + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + + +@celery.task() +def endpoints_expire(): + """ + This celery task removes all endpoints that have not been recently updated + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + current_app.logger.debug(f"{function}: endpoints expire") + cli_endpoints.expire(2) + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + + +@celery.task() +def get_all_zones(): + """ + This celery syncs all zones from the available dns providers + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + current_app.logger.debug(f"{function}: get_all_zones") + cli_dns_providers.get_all_zones() + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + + +@celery.task() +def check_revoked(): + """ + This celery task attempts to check if any certs are expired + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + current_app.logger.debug(f"{function}: check if any certificates are revoked revoked") + cli_certificate.check_revoked() + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) + + +@celery.task() +def notify_expirations(): + """ + This celery task notifies about expiring certs + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + current_app.logger.debug(f"{function}: Cert Expiration Notifcation") + cli_notification.expirations(["MetatronUserCertfor", "Metatron-User-Cert-for"]) + red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", 'counter', 1) From 244aa069f04f9b6787bfc41162ff370a9110fbfb Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 30 Jul 2019 10:32:09 -0700 Subject: [PATCH 281/357] lemur is one level deep than makefile --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 1ca94e42..069eb29b 100644 --- a/Makefile +++ b/Makefile @@ -46,7 +46,7 @@ reset-db: @echo "--> Enabling pg_trgm extension" psql lemur -c "create extension IF NOT EXISTS pg_trgm;" @echo "--> Applying migrations" - lemur db upgrade + cd lemur && lemur db upgrade setup-git: @echo "--> Installing git hooks" From 3d48b422b5ee3a23e6df83b5ca9208ffebb4621f Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Tue, 30 Jul 2019 11:39:35 -0700 Subject: [PATCH 282/357] Removed TODO --- lemur/plugins/lemur_acme/ultradns.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index c7f853d0..c43840e4 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -129,7 +129,6 @@ def wait_for_dns_change(change_id, account_number=None): break time.sleep(10) if not status: - # TODO: Delete associated DNS text record here metrics.send("wait_for_dns_change_fail", "counter", 1) sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)}) metrics.send( From 44bc562e8b2e2339d420af0a42647fa4efe91e0c Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Tue, 30 Jul 2019 13:08:16 -0700 Subject: [PATCH 283/357] Update ultradns.py Minor logging changes in wait_for_dns_change --- lemur/plugins/lemur_acme/ultradns.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index c43840e4..24b98e66 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -115,7 +115,7 @@ def wait_for_dns_change(change_id, account_number=None): nameserver = get_authoritative_nameserver(fqdn) for attempts in range(0, number_of_attempts): status = _has_dns_propagated(fqdn, token, nameserver) - current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) + current_app.logger.debug("Record status on ultraDNS authoritative server for fqdn: {}: {}".format(fqdn, status)) if status: time.sleep(10) break @@ -123,7 +123,7 @@ def wait_for_dns_change(change_id, account_number=None): if status: for attempts in range(0, number_of_attempts): status = _has_dns_propagated(fqdn, token, get_public_authoritative_nameserver()) - current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status)) + current_app.logger.debug("Record status on Google DNS for fqdn: {}: {}".format(fqdn, status)) if status: metrics.send("wait_for_dns_change_success", "counter", 1) break From 3ba7fdbd494401331e9da084c1e8b4f0b147e559 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 31 Jul 2019 11:11:39 -0700 Subject: [PATCH 284/357] Updated logger to log a dictionary instead of a string --- lemur/plugins/lemur_acme/ultradns.py | 80 +++++++++++++++++++++------- 1 file changed, 61 insertions(+), 19 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index 24b98e66..d3b68afc 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -1,6 +1,7 @@ import time import requests import json +import sys from .ultradns_zone import Zone from .ultradns_record import Record @@ -115,7 +116,14 @@ def wait_for_dns_change(change_id, account_number=None): nameserver = get_authoritative_nameserver(fqdn) for attempts in range(0, number_of_attempts): status = _has_dns_propagated(fqdn, token, nameserver) - current_app.logger.debug("Record status on ultraDNS authoritative server for fqdn: {}: {}".format(fqdn, status)) + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "fqdn": fqdn, + "status": status, + "message": "Record status on ultraDNS authoritative server" + } + current_app.logger.debug(log_data) if status: time.sleep(10) break @@ -123,7 +131,14 @@ def wait_for_dns_change(change_id, account_number=None): if status: for attempts in range(0, number_of_attempts): status = _has_dns_propagated(fqdn, token, get_public_authoritative_nameserver()) - current_app.logger.debug("Record status on Google DNS for fqdn: {}: {}".format(fqdn, status)) + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "fqdn": fqdn, + "status": status, + "message": "Record status on Public DNS" + } + current_app.logger.debug(log_data) if status: metrics.send("wait_for_dns_change_success", "counter", 1) break @@ -196,15 +211,24 @@ def create_txt_record(domain, token, account_number): try: _post(path, params) - current_app.logger.debug( - "TXT record created: {0}, token: {1}".format(fqdn, token) - ) + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "fqdn": fqdn, + "token": token, + "message": "TXT record created" + } + current_app.logger.debug(log_data) except Exception as e: - current_app.logger.debug( - "Unable to add record. Domain: {}. Token: {}. " - "Record already exists: {}".format(domain, token, e), - exc_info=True, - ) + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "domain": domain, + "token": token, + "Exception": e, + "message": "Unable to add record. Record already exists." + } + current_app.logger.debug(log_data) change_id = (fqdn, token) return change_id @@ -222,7 +246,12 @@ def delete_txt_record(change_id, account_number, domain, token): # has to be deleted. if not domain: - current_app.logger.debug("delete_txt_record: No domain passed") + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "message": "No domain passed" + } + current_app.logger.debug(log_data) return zone_name = get_zone_name(domain, account_number) @@ -241,7 +270,13 @@ def delete_txt_record(change_id, account_number, domain, token): # Remove the record from the RRSet locally record.rdata.remove("{}".format(token)) except ValueError: - current_app.logger.debug("Token not found") + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "token": token, + "message": "Token not found" + } + current_app.logger.debug(log_data) return # Delete the RRSet from UltraDNS @@ -259,16 +294,23 @@ def delete_txt_record(change_id, account_number, domain, token): def delete_acme_txt_records(domain): if not domain: - current_app.logger.debug("delete_acme_txt_records: No domain passed") + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "message": "No domain passed" + } + current_app.logger.debug(log_data) return acme_challenge_string = "_acme-challenge" if not domain.startswith(acme_challenge_string): - current_app.logger.debug( - "delete_acme_txt_records: Domain {} doesn't start with string {}. " - "Cowardly refusing to delete TXT records".format( - domain, acme_challenge_string - ) - ) + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "domain": domain, + "acme_challenge_string": acme_challenge_string, + "message": "Domain does not start with the acme challenge string" + } + current_app.logger.debug(log_data) return zone_name = get_zone_name(domain) From 11cd09513154a2d505755ccc266d33b2e6d4127a Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 31 Jul 2019 11:12:28 -0700 Subject: [PATCH 285/357] Reduced the number of calls to get_public_authoritative_nameserver by using a variable --- lemur/plugins/lemur_acme/ultradns.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index d3b68afc..d516be1a 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -129,8 +129,9 @@ def wait_for_dns_change(change_id, account_number=None): break time.sleep(10) if status: + nameserver = get_public_authoritative_nameserver() for attempts in range(0, number_of_attempts): - status = _has_dns_propagated(fqdn, token, get_public_authoritative_nameserver()) + status = _has_dns_propagated(fqdn, token, nameserver) function = sys._getframe().f_code.co_name log_data = { "function": function, From 503df999fa1d569475898290f50bc9ce3d1ac2ba Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 31 Jul 2019 11:32:04 -0700 Subject: [PATCH 286/357] Updated metrics.send to send function named, followed by status, separated by a period --- lemur/plugins/lemur_acme/ultradns.py | 26 ++++++++++++-------------- 1 file changed, 12 insertions(+), 14 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index d516be1a..0229a59e 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -96,12 +96,14 @@ def _has_dns_propagated(name, token, domain): for txt_record in rdata.strings: txt_records.append(txt_record.decode("utf-8")) except dns.exception.DNSException: - metrics.send("has_dns_propagated_fail", "counter", 1) + function = sys._getframe().f_code.co_name + metrics.send("{}.fail".format(function), "counter", 1) return False for txt_record in txt_records: if txt_record == token: - metrics.send("has_dns_propagated_success", "counter", 1) + function = sys._getframe().f_code.co_name + metrics.send("{}.success".format(function), "counter", 1) return True return False @@ -132,7 +134,6 @@ def wait_for_dns_change(change_id, account_number=None): nameserver = get_public_authoritative_nameserver() for attempts in range(0, number_of_attempts): status = _has_dns_propagated(fqdn, token, nameserver) - function = sys._getframe().f_code.co_name log_data = { "function": function, "fqdn": fqdn, @@ -141,18 +142,12 @@ def wait_for_dns_change(change_id, account_number=None): } current_app.logger.debug(log_data) if status: - metrics.send("wait_for_dns_change_success", "counter", 1) + metrics.send("{}.success".format(function), "counter", 1) break time.sleep(10) if not status: - metrics.send("wait_for_dns_change_fail", "counter", 1) + metrics.send("{}.fail".format, "counter", 1, metric_tags={"fqdn": fqdn, "txt_record": token}) sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)}) - metrics.send( - "wait_for_dns_change_error", - "counter", - 1, - metric_tags={"fqdn": fqdn, "txt_record": token}, - ) return @@ -183,7 +178,8 @@ def get_zone_name(domain, account_number): if z.count(".") > zone_name.count("."): zone_name = z if not zone_name: - metrics.send("ultradns_no_zone_name", "counter", 1) + function = sys._getframe().f_code.co_name + metrics.send("{}.fail".format(function), "counter", 1) raise Exception("No UltraDNS zone found for domain: {}".format(domain)) return zone_name @@ -264,7 +260,8 @@ def delete_txt_record(change_id, account_number, domain, token): rrsets = _get(path) record = Record(rrsets) except Exception as e: - metrics.send("delete_txt_record_geterror", "counter", 1) + function = sys._getframe().f_code.co_name + metrics.send("{}.geterror".format(function), "counter", 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return try: @@ -341,7 +338,8 @@ def get_authoritative_nameserver(domain): rcode = response.rcode() if rcode != dns.rcode.NOERROR: - metrics.send("get_authoritative_nameserver_error", "counter", 1) + function = sys._getframe().f_code.co_name + metrics.send("{}.error".format(function), "counter", 1) if rcode == dns.rcode.NXDOMAIN: raise Exception("%s does not exist." % sub) else: From fe075dc9f56985d2b81a75d7b4db12d87e3f6023 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 31 Jul 2019 12:00:31 -0700 Subject: [PATCH 287/357] Changed function comments to doc strings. --- lemur/plugins/lemur_acme/ultradns.py | 83 +++++++++++++++++----------- 1 file changed, 51 insertions(+), 32 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index 0229a59e..1c3aa961 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -16,9 +16,12 @@ from lemur.extensions import metrics, sentry def get_ultradns_token(): - # Function to call the UltraDNS Authorization API. Returns the Authorization access_token - # which is valid for 1 hour. Each request calls this function and we generate a new token - # every time. + """ + Function to call the UltraDNS Authorization API. + + Returns the Authorization access_token which is valid for 1 hour. + Each request calls this function and we generate a new token every time. + """ path = "/v2/authorization/token" data = { "grant_type": "password", @@ -31,8 +34,11 @@ def get_ultradns_token(): def _generate_header(): - # Function to generate the header for a request. Contains the Authorization access_key - # obtained from the get_ultradns_token() function. + """ + Function to generate the header for a request. + + Contains the Authorization access_key obtained from the get_ultradns_token() function. + """ access_token = get_ultradns_token() return {"Authorization": "Bearer {}".format(access_token), "Content-Type": "application/json"} @@ -49,7 +55,7 @@ def _paginate(path, key): def _get(path, params=None): - # Function to execute a GET request on the given URL (base_uri + path) with given params + """Function to execute a GET request on the given URL (base_uri + path) with given params""" base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.get( "{0}{1}".format(base_uri, path), @@ -62,7 +68,7 @@ def _get(path, params=None): def _delete(path): - # Function to execute a DELETE request on the given URL + """Function to execute a DELETE request on the given URL""" base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.delete( "{0}{1}".format(base_uri, path), @@ -73,7 +79,7 @@ def _delete(path): def _post(path, params): - # Executes a POST request on given URL. Body is sent in JSON format + """Executes a POST request on given URL. Body is sent in JSON format""" base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.post( "{0}{1}".format(base_uri, path), @@ -85,8 +91,11 @@ def _post(path, params): def _has_dns_propagated(name, token, domain): - # Check whether the DNS change made by Lemur have propagated to the public DNS or not. - # Invoked by wait_for_dns_change() function + """ + Check whether the DNS change made by Lemur have propagated to the public DNS or not. + + Invoked by wait_for_dns_change() function + """ txt_records = [] try: dns_resolver = dns.resolver.Resolver() @@ -110,9 +119,12 @@ def _has_dns_propagated(name, token, domain): def wait_for_dns_change(change_id, account_number=None): - # Waits and checks if the DNS changes have propagated or not. - # First check the domains authoritative server. Once this succeeds, - # we ask a public DNS server (Google <8.8.8.8> in our case). + """ + Waits and checks if the DNS changes have propagated or not. + + First check the domains authoritative server. Once this succeeds, + we ask a public DNS server (Google <8.8.8.8> in our case). + """ fqdn, token = change_id number_of_attempts = 20 nameserver = get_authoritative_nameserver(fqdn) @@ -152,7 +164,7 @@ def wait_for_dns_change(change_id, account_number=None): def get_zones(account_number): - # Get zones from the UltraDNS + """Get zones from the UltraDNS""" path = "/v2/zones" zones = [] for page in _paginate(path, "zones"): @@ -167,7 +179,7 @@ def get_zones(account_number): def get_zone_name(domain, account_number): - # Get the matching zone for the given domain + """Get the matching zone for the given domain""" zones = get_zones(account_number) zone_name = "" for z in zones: @@ -185,14 +197,17 @@ def get_zone_name(domain, account_number): def create_txt_record(domain, token, account_number): - # Create a TXT record for the given domain. - # The part of the domain that matches with the zone becomes the zone name. - # The remainder becomes the owner name (referred to as node name here) - # Example: Let's say we have a zone named "exmaple.com" in UltraDNS and we - # get a request to create a cert for lemur.example.com - # Domain - _acme-challenge.lemur.example.com - # Matching zone - example.com - # Owner name - _acme-challenge.lemur + """ + Create a TXT record for the given domain. + + The part of the domain that matches with the zone becomes the zone name. + The remainder becomes the owner name (referred to as node name here) + Example: Let's say we have a zone named "exmaple.com" in UltraDNS and we + get a request to create a cert for lemur.example.com + Domain - _acme-challenge.lemur.example.com + Matching zone - example.com + Owner name - _acme-challenge.lemur + """ zone_name = get_zone_name(domain, account_number) zone_parts = len(zone_name.split(".")) @@ -232,15 +247,18 @@ def create_txt_record(domain, token, account_number): def delete_txt_record(change_id, account_number, domain, token): - # Delete the TXT record that was created in the create_txt_record() function. - # UltraDNS handles records differently compared to Dyn. It creates an RRSet - # which is a set of records of the same type and owner. This means - # that while deleting the record, we cannot delete any individual record from - # the RRSet. Instead, we have to delete the entire RRSet. If multiple certs are - # being created for the same domain at the same time, the challenge TXT records - # that are created will be added under the same RRSet. If the RRSet had more - # than 1 record, then we create a new RRSet on UltraDNS minus the record that - # has to be deleted. + """ + Delete the TXT record that was created in the create_txt_record() function. + + UltraDNS handles records differently compared to Dyn. It creates an RRSet + which is a set of records of the same type and owner. This means + that while deleting the record, we cannot delete any individual record from + the RRSet. Instead, we have to delete the entire RRSet. If multiple certs are + being created for the same domain at the same time, the challenge TXT records + that are created will be added under the same RRSet. If the RRSet had more + than 1 record, then we create a new RRSet on UltraDNS minus the record that + has to be deleted. + """ if not domain: function = sys._getframe().f_code.co_name @@ -320,6 +338,7 @@ def delete_acme_txt_records(domain): def get_authoritative_nameserver(domain): + """Get the authoritative nameserver for the given domain""" n = dns.name.from_text(domain) depth = 2 From 5a401b2d87526f3ef88405a21c0978a15c6d1895 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 31 Jul 2019 12:04:42 -0700 Subject: [PATCH 288/357] Added the Zone class and Record class to ultradns.py and removed the respective files --- lemur/plugins/lemur_acme/ultradns.py | 66 ++++++++++++++++++++- lemur/plugins/lemur_acme/ultradns_record.py | 26 -------- lemur/plugins/lemur_acme/ultradns_zone.py | 33 ----------- 3 files changed, 64 insertions(+), 61 deletions(-) delete mode 100644 lemur/plugins/lemur_acme/ultradns_record.py delete mode 100644 lemur/plugins/lemur_acme/ultradns_zone.py diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index 1c3aa961..40661740 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -2,8 +2,6 @@ import time import requests import json import sys -from .ultradns_zone import Zone -from .ultradns_record import Record import dns import dns.exception @@ -15,6 +13,70 @@ from flask import current_app from lemur.extensions import metrics, sentry +class Record: + """ + This class implements an Ultra DNS record. + + Accepts the response from the API call as the argument. + """ + + def __init__(self, _data): + # Since we are dealing with only TXT records for Lemur, we expect only 1 RRSet in the response. + # Thus we default to picking up the first entry (_data["rrsets"][0]) from the response. + self._data = _data["rrSets"][0] + + @property + def name(self): + return self._data["ownerName"] + + @property + def rrtype(self): + return self._data["rrtype"] + + @property + def rdata(self): + return self._data["rdata"] + + @property + def ttl(self): + return self._data["ttl"] + + +class Zone: + """ + This class implements an Ultra DNS zone. + """ + + def __init__(self, _data, _client="Client"): + self._data = _data + self._client = _client + + @property + def name(self): + """ + Zone name, has a trailing "." at the end, which we manually remove. + """ + return self._data["properties"]["name"][:-1] + + @property + def authoritative_type(self): + """ + Indicates whether the zone is setup as a PRIMARY or SECONDARY + """ + return self._data["properties"]["type"] + + @property + def record_count(self): + return self._data["properties"]["resourceRecordCount"] + + @property + def status(self): + """ + Returns the status of the zone - ACTIVE, SUSPENDED, etc + """ + return self._data["properties"]["status"] + + def get_ultradns_token(): """ Function to call the UltraDNS Authorization API. diff --git a/lemur/plugins/lemur_acme/ultradns_record.py b/lemur/plugins/lemur_acme/ultradns_record.py deleted file mode 100644 index 9ec8d4d8..00000000 --- a/lemur/plugins/lemur_acme/ultradns_record.py +++ /dev/null @@ -1,26 +0,0 @@ -class Record: - """ - This class implements an Ultra DNS record. - Accepts the response from the API call as the argument. - """ - - def __init__(self, _data): - # Since we are dealing with only TXT records for Lemur, we expect only 1 RRSet in the response. - # Thus we default to picking up the first entry (_data["rrsets"][0]) from the response. - self._data = _data["rrSets"][0] - - @property - def name(self): - return self._data["ownerName"] - - @property - def rrtype(self): - return self._data["rrtype"] - - @property - def rdata(self): - return self._data["rdata"] - - @property - def ttl(self): - return self._data["ttl"] diff --git a/lemur/plugins/lemur_acme/ultradns_zone.py b/lemur/plugins/lemur_acme/ultradns_zone.py deleted file mode 100644 index c6d90422..00000000 --- a/lemur/plugins/lemur_acme/ultradns_zone.py +++ /dev/null @@ -1,33 +0,0 @@ -class Zone: - """ - This class implements an Ultra DNS zone. - """ - - def __init__(self, _data, _client="Client"): - self._data = _data - self._client = _client - - @property - def name(self): - """ - Zone name, has a trailing "." at the end, which we manually remove. - """ - return self._data["properties"]["name"][:-1] - - @property - def authoritative_type(self): - """ - Indicates whether the zone is setup as a PRIMARY or SECONDARY - """ - return self._data["properties"]["type"] - - @property - def record_count(self): - return self._data["properties"]["resourceRecordCount"] - - @property - def status(self): - """ - Returns the status of the zone - ACTIVE, SUSPENDED, etc - """ - return self._data["properties"]["status"] From e8e4f826eab7ffda070d368f2f7be3b17fbb3b7a Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 31 Jul 2019 13:08:59 -0700 Subject: [PATCH 289/357] updating logging format --- lemur/common/celery.py | 86 +++++++++++++++++++++++++++++++----------- 1 file changed, 65 insertions(+), 21 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index b889a80a..f5edb9ab 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -268,11 +268,14 @@ def clean_all_sources(): be ran periodically. This function triggers one celery task per source. """ function = f"{__name__}.{sys._getframe().f_code.co_name}" + log_data = { + "function": function, + "message": "Creating celery task to clean source", + } sources = validate_sources("all") for source in sources: - current_app.logger.debug( - "Creating celery task to clean source {}".format(source.label) - ) + log_data["source"] = source.label + current_app.logger.debug(log_data) clean_source.delay(source.label) red.set(f'{function}.last_success', int(time.time())) @@ -288,7 +291,13 @@ def clean_source(source): :param source: :return: """ - current_app.logger.debug("Cleaning source {}".format(source)) + function = f"{__name__}.{sys._getframe().f_code.co_name}" + log_data = { + "function": function, + "message": "Cleaning source", + "source": source, + } + current_app.logger.debug(log_data) clean([source], True) @@ -298,11 +307,14 @@ def sync_all_sources(): This function will sync certificates from all sources. This function triggers one celery task per source. """ function = f"{__name__}.{sys._getframe().f_code.co_name}" + log_data = { + "function": function, + "message": "creating celery task to sync source", + } sources = validate_sources("all") for source in sources: - current_app.logger.debug( - "Creating celery task to sync source {}".format(source.label) - ) + log_data["source"] = source.label + current_app.logger.debug(log_data) sync_source.delay(source.label) red.set(f'{function}.last_success', int(time.time())) @@ -361,14 +373,20 @@ def sync_source_destination(): The destination sync_as_source_name reveals the name of the suitable source-plugin. We rely on account numbers to avoid duplicates. """ - current_app.logger.debug("Syncing AWS destinations and sources") function = f"{__name__}.{sys._getframe().f_code.co_name}" - + log_data = { + "function": function, + "message": "syncing AWS destinations and sources", + } + current_app.logger.debug(log_data) for dst in destinations_service.get_all(): if add_aws_destination_to_sources(dst): - current_app.logger.debug("Source: %s added", dst.label) + log_data["message"] = "new source added" + log_data["source"] = dst.label + current_app.logger.debug(log_data) - current_app.logger.debug("Completed Syncing AWS destinations and sources") + log_data["message"] = "completed Syncing AWS destinations and sources" + current_app.logger.debug(log_data) red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -380,9 +398,14 @@ def certificate_reissue(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - current_app.logger.debug(f"{function}: reissuing certificates") + log_data = { + "function": function, + "message": "reissuing certificates", + } + current_app.logger.debug(log_data) cli_certificate.reissue(None, True) - current_app.logger.debug(f"{function}: reissuance completed") + log_data["message"] = "reissuance completed" + current_app.logger.debug(log_data) red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -394,9 +417,14 @@ def certificate_rotate(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - current_app.logger.debug(f"{function}: rotating certificates") + log_data = { + "function": function, + "message": "rotating certificates", + } + current_app.logger.debug(log_data) cli_certificate.rotate(None, None, None, None, True) - current_app.logger.debug(f"{function}: rotation completed") + log_data["message"] = "rotation completed" + current_app.logger.debug(log_data) red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -408,8 +436,12 @@ def endpoints_expire(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - current_app.logger.debug(f"{function}: endpoints expire") - cli_endpoints.expire(2) + log_data = { + "function": function, + "message": "endpoints expire", + } + current_app.logger.debug(log_data) + cli_endpoints.expire(2) # Time in hours red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -421,7 +453,11 @@ def get_all_zones(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - current_app.logger.debug(f"{function}: get_all_zones") + log_data = { + "function": function, + "message": "refresh all zones from available DNS providers", + } + current_app.logger.debug(log_data) cli_dns_providers.get_all_zones() red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -434,7 +470,11 @@ def check_revoked(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - current_app.logger.debug(f"{function}: check if any certificates are revoked revoked") + log_data = { + "function": function, + "message": "check if any certificates are revoked revoked", + } + current_app.logger.debug(log_data) cli_certificate.check_revoked() red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -447,7 +487,11 @@ def notify_expirations(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - current_app.logger.debug(f"{function}: Cert Expiration Notifcation") - cli_notification.expirations(["MetatronUserCertfor", "Metatron-User-Cert-for"]) + log_data = { + "function": function, + "message": "notify for cert expiration", + } + current_app.logger.debug(log_data) + cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) From 2903799b85e3c2db11d9a9b46a2d05cd850b266b Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 31 Jul 2019 14:19:49 -0700 Subject: [PATCH 290/357] Changed string formatting from "{}".format() to f"{}" for consistency --- lemur/plugins/lemur_acme/ultradns.py | 38 ++++++++++++++-------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/lemur/plugins/lemur_acme/ultradns.py b/lemur/plugins/lemur_acme/ultradns.py index 40661740..dcf3e3c6 100644 --- a/lemur/plugins/lemur_acme/ultradns.py +++ b/lemur/plugins/lemur_acme/ultradns.py @@ -91,7 +91,7 @@ def get_ultradns_token(): "password": current_app.config.get("ACME_ULTRADNS_PASSWORD", ""), } base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") - resp = requests.post("{0}{1}".format(base_uri, path), data=data, verify=True) + resp = requests.post(f"{base_uri}{path}", data=data, verify=True) return resp.json()["access_token"] @@ -102,7 +102,7 @@ def _generate_header(): Contains the Authorization access_key obtained from the get_ultradns_token() function. """ access_token = get_ultradns_token() - return {"Authorization": "Bearer {}".format(access_token), "Content-Type": "application/json"} + return {"Authorization": f"Bearer {access_token}", "Content-Type": "application/json"} def _paginate(path, key): @@ -120,7 +120,7 @@ def _get(path, params=None): """Function to execute a GET request on the given URL (base_uri + path) with given params""" base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.get( - "{0}{1}".format(base_uri, path), + f"{base_uri}{path}", headers=_generate_header(), params=params, verify=True, @@ -133,7 +133,7 @@ def _delete(path): """Function to execute a DELETE request on the given URL""" base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.delete( - "{0}{1}".format(base_uri, path), + f"{base_uri}{path}", headers=_generate_header(), verify=True, ) @@ -144,7 +144,7 @@ def _post(path, params): """Executes a POST request on given URL. Body is sent in JSON format""" base_uri = current_app.config.get("ACME_ULTRADNS_DOMAIN", "") resp = requests.post( - "{0}{1}".format(base_uri, path), + f"{base_uri}{path}", headers=_generate_header(), data=json.dumps(params), verify=True, @@ -168,13 +168,13 @@ def _has_dns_propagated(name, token, domain): txt_records.append(txt_record.decode("utf-8")) except dns.exception.DNSException: function = sys._getframe().f_code.co_name - metrics.send("{}.fail".format(function), "counter", 1) + metrics.send(f"{function}.fail", "counter", 1) return False for txt_record in txt_records: if txt_record == token: function = sys._getframe().f_code.co_name - metrics.send("{}.success".format(function), "counter", 1) + metrics.send(f"{function}.success", "counter", 1) return True return False @@ -216,11 +216,11 @@ def wait_for_dns_change(change_id, account_number=None): } current_app.logger.debug(log_data) if status: - metrics.send("{}.success".format(function), "counter", 1) + metrics.send(f"{function}.success", "counter", 1) break time.sleep(10) if not status: - metrics.send("{}.fail".format, "counter", 1, metric_tags={"fqdn": fqdn, "txt_record": token}) + metrics.send(f"{function}.fail", "counter", 1, metric_tags={"fqdn": fqdn, "txt_record": token}) sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)}) return @@ -253,8 +253,8 @@ def get_zone_name(domain, account_number): zone_name = z if not zone_name: function = sys._getframe().f_code.co_name - metrics.send("{}.fail".format(function), "counter", 1) - raise Exception("No UltraDNS zone found for domain: {}".format(domain)) + metrics.send(f"{function}.fail", "counter", 1) + raise Exception(f"No UltraDNS zone found for domain: {domain}") return zone_name @@ -274,12 +274,12 @@ def create_txt_record(domain, token, account_number): zone_name = get_zone_name(domain, account_number) zone_parts = len(zone_name.split(".")) node_name = ".".join(domain.split(".")[:-zone_parts]) - fqdn = "{0}.{1}".format(node_name, zone_name) - path = "/v2/zones/{0}/rrsets/TXT/{1}".format(zone_name, node_name) + fqdn = f"{node_name}.{zone_name}" + path = f"/v2/zones/{zone_name}/rrsets/TXT/{node_name}" params = { "ttl": 5, "rdata": [ - "{}".format(token) + f"{token}" ], } @@ -334,19 +334,19 @@ def delete_txt_record(change_id, account_number, domain, token): zone_name = get_zone_name(domain, account_number) zone_parts = len(zone_name.split(".")) node_name = ".".join(domain.split(".")[:-zone_parts]) - path = "/v2/zones/{}/rrsets/16/{}".format(zone_name, node_name) + path = f"/v2/zones/{zone_name}/rrsets/16/{node_name}" try: rrsets = _get(path) record = Record(rrsets) except Exception as e: function = sys._getframe().f_code.co_name - metrics.send("{}.geterror".format(function), "counter", 1) + metrics.send(f"{function}.geterror", "counter", 1) # No Text Records remain or host is not in the zone anymore because all records have been deleted. return try: # Remove the record from the RRSet locally - record.rdata.remove("{}".format(token)) + record.rdata.remove(f"{token}") except ValueError: function = sys._getframe().f_code.co_name log_data = { @@ -394,7 +394,7 @@ def delete_acme_txt_records(domain): zone_name = get_zone_name(domain) zone_parts = len(zone_name.split(".")) node_name = ".".join(domain.split(".")[:-zone_parts]) - path = "/v2/zones/{}/rrsets/16/{}".format(zone_name, node_name) + path = f"/v2/zones/{zone_name}/rrsets/16/{node_name}" _delete(path) @@ -420,7 +420,7 @@ def get_authoritative_nameserver(domain): rcode = response.rcode() if rcode != dns.rcode.NOERROR: function = sys._getframe().f_code.co_name - metrics.send("{}.error".format(function), "counter", 1) + metrics.send(f"{function}.error", "counter", 1) if rcode == dns.rcode.NXDOMAIN: raise Exception("%s does not exist." % sub) else: From a7c2b970b0b04fc3a1b15b1d7f0acc1ca1bc8a6b Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Mon, 5 Aug 2019 13:59:59 -0700 Subject: [PATCH 291/357] Unit testing Part 1 --- lemur/plugins/lemur_acme/tests/test_acme.py | 63 ++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 3bf1d05c..29c9534e 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -1,8 +1,10 @@ +import json import unittest +from requests.models import Response from mock import MagicMock, Mock, patch -from lemur.plugins.lemur_acme import plugin +from lemur.plugins.lemur_acme import plugin, ultradns class TestAcme(unittest.TestCase): @@ -360,3 +362,62 @@ class TestAcme(unittest.TestCase): mock_request_certificate.return_value = ("pem_certificate", "chain") result = provider.create_certificate(csr, issuer_options) assert result + + @patch("lemur.plugins.lemur_acme.ultradns.requests") + @patch("lemur.plugins.lemur_acme.ultradns.current_app") + def test_get_ultradns_token(self, mock_current_app, mock_requests): + # ret_val = json.dumps({"access_token": "access"}) + the_response = Response() + the_response._content = b'{"access_token": "access"}' + mock_requests.post = Mock(return_value=the_response) + mock_current_app.config.get = Mock(return_value="Test") + result = ultradns.get_ultradns_token() + self.assertTrue(len(result) > 0) + + @patch("lemur.plugins.lemur_acme.ultradns.get_zone_name") + @patch("lemur.plugins.lemur_acme.ultradns._post") + @patch("lemur.plugins.lemur_acme.ultradns.current_app") + def test_create_txt_record(self, mock_current_app, mock__post, mock_get_zone_name): + domain = "test.example.com" + token = "ABCDEFGHIJ" + account_number = "1234567890" + change_id = (domain, token) + mock_current_app.logger.debug = Mock() + mock_get_zone_name = Mock(domain, account_number, return_value="example.com") + path = "a/b/c" + params = { + "test": "Test" + } + mock__post = Mock(path, params) + result = ultradns.create_txt_record(domain, token, account_number) + self.assertEqual(type(change_id), type(result)) + + # @patch("lemur.plugins.lemur_acme.ultradns.get_zone_name") + # @patch("lemur.plugins.lemur_acme.ultradns._get") + # @patch("lemur.plugins.lemur_acme.ultradns._delete") + # @patch("lemur.plugins.lemur_acme.ultradns._post") + # @patch("lemur.plugins.lemur_acme.ultradns.current_app") + # def test_delete_txt_record(self, mock_get_zone_name): + # domain = "test.example.com" + # token = "ABCDEFGHIJ" + # account_number = "1234567890" + # change_id = (domain, token) + # mock_get_zone_name = Mock(domain, account_number, return_value="example.com") + + # @patch("lemur.plugins.lemur_acme.ultradns.get_authoritative_nameserver") + # @patch("lemur.plugins.lemur_acme.ultradns._has_dns_propagated") + # @patch("lemur.plugins.lemur_acme.ultradns.current_app") + # def test_wait_for_dns_change(self, mock_current_app, mock_has_dns_propagated, mock_get_authoritative_nameserver): + # domain = "test.example.com" + # token = "ABCDEFGHIJ" + # account_number = "1234567890" + # change_id = (domain, token) + # mock_current_app.logger.debug = Mock() + # result = ultradns.wait_for_dns_change(change_id, token) + # self.assertEqual(result, true) + + # def test_has_dns_propagated(self): + + + + From b885cdf9d0cf9515a1351774a1eb4929297c2604 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 7 Aug 2019 10:24:38 -0700 Subject: [PATCH 292/357] adding multi profile name support with DigiCert plug. This requires that the configs are a dict, with multiple entries, where the key is the name of the Authority used to issue certs with. DIGICERT_CIS_PROFILE_NAMES = {"sha2-rsa-ecc-root": "ssl_plus"} DIGICERT_CIS_ROOTS = {"root": "ROOT"} DIGICERT_CIS_INTERMEDIATES = {"inter": "INTERMEDIATE_CA_CERT"} Hence, in DB one need to add 1) the corresponding authority table, with digicert-cis-issuer. Note the names here are used to mapping in the above config 2) the corresponding intermediary in the certificate table , with root_aurhority_id set to the id of the new authority_id --- lemur/plugins/lemur_digicert/plugin.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/lemur/plugins/lemur_digicert/plugin.py b/lemur/plugins/lemur_digicert/plugin.py index c5b01cc4..5e104094 100644 --- a/lemur/plugins/lemur_digicert/plugin.py +++ b/lemur/plugins/lemur_digicert/plugin.py @@ -158,7 +158,7 @@ def map_cis_fields(options, csr): ) data = { - "profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAME"), + "profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAMES")[options['authority'].name], "common_name": options["common_name"], "additional_dns_names": get_additional_names(options), "csr": csr, @@ -423,9 +423,9 @@ class DigiCertCISSourcePlugin(SourcePlugin): required_vars = [ "DIGICERT_CIS_API_KEY", "DIGICERT_CIS_URL", - "DIGICERT_CIS_ROOT", - "DIGICERT_CIS_INTERMEDIATE", - "DIGICERT_CIS_PROFILE_NAME", + "DIGICERT_CIS_ROOTS", + "DIGICERT_CIS_INTERMEDIATES", + "DIGICERT_CIS_PROFILE_NAMES", ] validate_conf(current_app, required_vars) @@ -498,9 +498,9 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): required_vars = [ "DIGICERT_CIS_API_KEY", "DIGICERT_CIS_URL", - "DIGICERT_CIS_ROOT", - "DIGICERT_CIS_INTERMEDIATE", - "DIGICERT_CIS_PROFILE_NAME", + "DIGICERT_CIS_ROOTS", + "DIGICERT_CIS_INTERMEDIATES", + "DIGICERT_CIS_PROFILE_NAMES", ] validate_conf(current_app, required_vars) @@ -537,14 +537,14 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): if "ECC" in issuer_options["key_type"]: return ( "\n".join(str(end_entity).splitlines()), - current_app.config.get("DIGICERT_ECC_CIS_INTERMEDIATE"), + current_app.config.get("DIGICERT_ECC_CIS_INTERMEDIATES")[issuer_options['authority'].name], data["id"], ) # By default return RSA return ( "\n".join(str(end_entity).splitlines()), - current_app.config.get("DIGICERT_CIS_INTERMEDIATE"), + current_app.config.get("DIGICERT_CIS_INTERMEDIATES")[issuer_options['authority'].name], data["id"], ) @@ -577,4 +577,4 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): :return: """ role = {"username": "", "password": "", "name": "digicert"} - return current_app.config.get("DIGICERT_CIS_ROOT"), "", [role] + return current_app.config.get("DIGICERT_CIS_ROOTS")[options['authority'].name], "", [role] From e2ea2ca4d1663820894caac1bc86c962bffac010 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 7 Aug 2019 11:05:07 -0700 Subject: [PATCH 293/357] providing sample config --- lemur/tests/conf.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lemur/tests/conf.py b/lemur/tests/conf.py index 6d0d6967..af0c09ce 100644 --- a/lemur/tests/conf.py +++ b/lemur/tests/conf.py @@ -80,6 +80,13 @@ DIGICERT_API_KEY = "api-key" DIGICERT_ORG_ID = 111111 DIGICERT_ROOT = "ROOT" +DIGICERT_CIS_URL = "mock://www.digicert.com" +DIGICERT_CIS_PROFILE_NAMES = {"sha2-rsa-ecc-root": "ssl_plus"} +DIGICERT_CIS_API_KEY = "api-key" +DIGICERT_CIS_ROOTS = {"root": "ROOT"} +DIGICERT_CIS_INTERMEDIATES = {"inter": "INTERMEDIATE_CA_CERT"} + + VERISIGN_URL = "http://example.com" VERISIGN_PEM_PATH = "~/" VERISIGN_FIRST_NAME = "Jim" From bbda9b1d6f4bec461e1653ea8f9825f2d22d0fcc Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 7 Aug 2019 12:05:13 -0700 Subject: [PATCH 294/357] making sure to handle when no config file provided, though we do a check for that --- lemur/plugins/lemur_digicert/plugin.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lemur/plugins/lemur_digicert/plugin.py b/lemur/plugins/lemur_digicert/plugin.py index 5e104094..6f137281 100644 --- a/lemur/plugins/lemur_digicert/plugin.py +++ b/lemur/plugins/lemur_digicert/plugin.py @@ -158,7 +158,7 @@ def map_cis_fields(options, csr): ) data = { - "profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAMES")[options['authority'].name], + "profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAMES", {}).get(options['authority'].name), "common_name": options["common_name"], "additional_dns_names": get_additional_names(options), "csr": csr, @@ -537,14 +537,14 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): if "ECC" in issuer_options["key_type"]: return ( "\n".join(str(end_entity).splitlines()), - current_app.config.get("DIGICERT_ECC_CIS_INTERMEDIATES")[issuer_options['authority'].name], + current_app.config.get("DIGICERT_ECC_CIS_INTERMEDIATES", {}).get(issuer_options['authority'].name), data["id"], ) # By default return RSA return ( "\n".join(str(end_entity).splitlines()), - current_app.config.get("DIGICERT_CIS_INTERMEDIATES")[issuer_options['authority'].name], + current_app.config.get("DIGICERT_CIS_INTERMEDIATES", {}).get(issuer_options['authority'].name), data["id"], ) @@ -577,4 +577,4 @@ class DigiCertCISIssuerPlugin(IssuerPlugin): :return: """ role = {"username": "", "password": "", "name": "digicert"} - return current_app.config.get("DIGICERT_CIS_ROOTS")[options['authority'].name], "", [role] + return current_app.config.get("DIGICERT_CIS_ROOTS", {}).get(options['authority'].name), "", [role] From ff1f73f985df4258f46aba9f7076059c8d2a2ed0 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 7 Aug 2019 12:05:36 -0700 Subject: [PATCH 295/357] fixing the plugin test to include authority --- lemur/plugins/lemur_digicert/tests/test_digicert.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_digicert/tests/test_digicert.py b/lemur/plugins/lemur_digicert/tests/test_digicert.py index 71efbad4..77b0a1fa 100644 --- a/lemur/plugins/lemur_digicert/tests/test_digicert.py +++ b/lemur/plugins/lemur_digicert/tests/test_digicert.py @@ -66,7 +66,7 @@ def test_map_fields_with_validity_years(app): } -def test_map_cis_fields(app): +def test_map_cis_fields(app, authority): from lemur.plugins.lemur_digicert.plugin import map_cis_fields names = [u"one.example.com", u"two.example.com", u"three.example.com"] @@ -80,6 +80,7 @@ def test_map_cis_fields(app): "organizational_unit": "Example Org", "validity_end": arrow.get(2017, 5, 7), "validity_start": arrow.get(2016, 10, 30), + "authority": authority, } data = map_cis_fields(options, CSR_STR) @@ -104,6 +105,7 @@ def test_map_cis_fields(app): "organization": "Example, Inc.", "organizational_unit": "Example Org", "validity_years": 2, + "authority": authority, } with freeze_time(time_to_freeze=arrow.get(2016, 11, 3).datetime): From 6e84e1fd59f682ec0efa6603835809cadfb741ca Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:04:38 -0700 Subject: [PATCH 296/357] Unit Tests for create_txt_record, delete_txt_record, wait_for_dns_change --- lemur/plugins/lemur_acme/tests/test_acme.py | 117 +++++++++++++------- 1 file changed, 76 insertions(+), 41 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 29c9534e..d0535718 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -374,50 +374,85 @@ class TestAcme(unittest.TestCase): result = ultradns.get_ultradns_token() self.assertTrue(len(result) > 0) - @patch("lemur.plugins.lemur_acme.ultradns.get_zone_name") - @patch("lemur.plugins.lemur_acme.ultradns._post") @patch("lemur.plugins.lemur_acme.ultradns.current_app") - def test_create_txt_record(self, mock_current_app, mock__post, mock_get_zone_name): - domain = "test.example.com" + def test_create_txt_record(self, mock_current_app): + domain = "_acme_challenge.test.example.com" + token = "ABCDEFGHIJ" + account_number = "1234567890" + path = "a/b/c" + paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', 'pdns154.ultradns.biz.', + 'pdns154.ultradns.org.']}}, 'inherit': 'ALL'}, + {'properties': {'name': 'test.example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', + 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'inherit': 'ALL'}, + {'properties': {'name': 'example2.com.', 'accountName': 'netflix', 'type': 'SECONDARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', + 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'inherit': 'ALL'}] + ultradns._paginate = Mock(path, "zones") + ultradns._paginate.side_effect = [[paginate_response]] + mock_current_app.logger.debug = Mock() + ultradns._post = Mock() + log_data = { + "function": "create_txt_record", + "fqdn": domain, + "token": token, + "message": "TXT record created" + } + result = ultradns.create_txt_record(domain, token, account_number) + mock_current_app.logger.debug.assert_called_with(log_data) + + @patch("lemur.plugins.lemur_acme.ultradns.current_app") + @patch("lemur.extensions.metrics") + def test_delete_txt_record(self, mock_metrics, mock_current_app): + domain = "_acme_challenge.test.example.com" token = "ABCDEFGHIJ" account_number = "1234567890" change_id = (domain, token) - mock_current_app.logger.debug = Mock() - mock_get_zone_name = Mock(domain, account_number, return_value="example.com") path = "a/b/c" - params = { - "test": "Test" - } - mock__post = Mock(path, params) - result = ultradns.create_txt_record(domain, token, account_number) - self.assertEqual(type(change_id), type(result)) - - # @patch("lemur.plugins.lemur_acme.ultradns.get_zone_name") - # @patch("lemur.plugins.lemur_acme.ultradns._get") - # @patch("lemur.plugins.lemur_acme.ultradns._delete") - # @patch("lemur.plugins.lemur_acme.ultradns._post") - # @patch("lemur.plugins.lemur_acme.ultradns.current_app") - # def test_delete_txt_record(self, mock_get_zone_name): - # domain = "test.example.com" - # token = "ABCDEFGHIJ" - # account_number = "1234567890" - # change_id = (domain, token) - # mock_get_zone_name = Mock(domain, account_number, return_value="example.com") - - # @patch("lemur.plugins.lemur_acme.ultradns.get_authoritative_nameserver") - # @patch("lemur.plugins.lemur_acme.ultradns._has_dns_propagated") - # @patch("lemur.plugins.lemur_acme.ultradns.current_app") - # def test_wait_for_dns_change(self, mock_current_app, mock_has_dns_propagated, mock_get_authoritative_nameserver): - # domain = "test.example.com" - # token = "ABCDEFGHIJ" - # account_number = "1234567890" - # change_id = (domain, token) - # mock_current_app.logger.debug = Mock() - # result = ultradns.wait_for_dns_change(change_id, token) - # self.assertEqual(result, true) - - # def test_has_dns_propagated(self): - - - + paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', 'pdns154.ultradns.biz.', + 'pdns154.ultradns.org.']}}, 'inherit': 'ALL'}, + {'properties': {'name': 'test.example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', + 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'inherit': 'ALL'}, + {'properties': {'name': 'example2.com.', 'accountName': 'netflix', 'type': 'SECONDARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', + 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'inherit': 'ALL'}] + ultradns._paginate = Mock(path, "zones") + ultradns._paginate.side_effect = [[paginate_response]] + mock_current_app.logger.debug = Mock() + ultradns._post = Mock() + ultradns._get = Mock() + ultradns._get.return_value = {'zoneName': 'test.example.com.com', + 'rrSets': [{'ownerName': '_acme-challenge.test.example.com.', + 'rrtype': 'TXT (16)', 'ttl': 5, 'rdata': ['ABCDEFGHIJ']}], + 'queryInfo': {'sort': 'OWNER', 'reverse': False, 'limit': 100}, + 'resultInfo': {'totalCount': 1, 'offset': 0, 'returnedCount': 1}} + ultradns._delete = Mock() + mock_metrics.send = Mock() + mock_current_app.logger.debug.assert_not_called() + mock_metrics.send.assert_not_called() + @patch("lemur.extensions.metrics") + def test_wait_for_dns_change(self, mock_metrics): + ultradns._has_dns_propagated = Mock(return_value=True) + ultradns.get_authoritative_nameserver = Mock(return_value="0.0.0.0") + mock_metrics.send = Mock() + mock_metrics.send.assert_not_called() From f2cbddf9e21e3360f010bb6f6a8f473c54d51b0b Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:17:16 -0700 Subject: [PATCH 297/357] Unit tests for get_zone_name, get_zones --- lemur/plugins/lemur_acme/tests/test_acme.py | 40 +++++++++++++++++++-- 1 file changed, 38 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index d0535718..a5e7c3e7 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -383,8 +383,9 @@ class TestAcme(unittest.TestCase): paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', 'pdns154.ultradns.biz.', - 'pdns154.ultradns.org.']}}, 'inherit': 'ALL'}, + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', + 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'inherit': 'ALL'}, {'properties': {'name': 'test.example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { @@ -456,3 +457,38 @@ class TestAcme(unittest.TestCase): ultradns.get_authoritative_nameserver = Mock(return_value="0.0.0.0") mock_metrics.send = Mock() mock_metrics.send.assert_not_called() + + def test_get_zone_name(self): + zones = ['example.com', 'test.example.com'] + zone = "test.example.com" + domain = "_acme-challenge.test.example.com" + account_number = "1234567890" + ultradns.get_zones = Mock(return_value=zones) + result = ultradns.get_zone_name(domain, account_number) + self.assertEqual(result, zone) + + def test_get_zones(self): + account_number = "1234567890" + path = "a/b/c" + zones = ['example.com', 'test.example.com'] + paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', 'pdns154.ultradns.biz.', + 'pdns154.ultradns.org.']}}, 'inherit': 'ALL'}, + {'properties': {'name': 'test.example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', + 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'inherit': 'ALL'}, + {'properties': {'name': 'example2.com.', 'accountName': 'netflix', 'type': 'SECONDARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { + 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', + 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'inherit': 'ALL'}] + ultradns._paginate = Mock(path, "zones") + ultradns._paginate.side_effect = [[paginate_response]] + result = ultradns.get_zones(account_number) + self.assertEqual(result, zones) From 785c1ca73ec18ed7c4bdbd806513d063c296f5c4 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:20:24 -0700 Subject: [PATCH 298/357] test_create_txt_record modified - get_zone_name mocked to return the zone name directly, instead of actually running the function. --- lemur/plugins/lemur_acme/tests/test_acme.py | 23 ++------------------- 1 file changed, 2 insertions(+), 21 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index a5e7c3e7..da935a46 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -377,29 +377,10 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.ultradns.current_app") def test_create_txt_record(self, mock_current_app): domain = "_acme_challenge.test.example.com" + zone = "test.example.com" token = "ABCDEFGHIJ" account_number = "1234567890" - path = "a/b/c" - paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', - 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, - 'inherit': 'ALL'}, - {'properties': {'name': 'test.example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', - 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, - 'inherit': 'ALL'}, - {'properties': {'name': 'example2.com.', 'accountName': 'netflix', 'type': 'SECONDARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', - 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, - 'inherit': 'ALL'}] - ultradns._paginate = Mock(path, "zones") - ultradns._paginate.side_effect = [[paginate_response]] + ultradns.get_zone_name = Mock(return_value=zone) mock_current_app.logger.debug = Mock() ultradns._post = Mock() log_data = { From 31c2d207a2bafd83676cba7b94e68d9f23fe61b7 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:23:05 -0700 Subject: [PATCH 299/357] test_delete_txt_record fixed. Function call was missing earlier --- lemur/plugins/lemur_acme/tests/test_acme.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index da935a46..ae78f911 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -429,6 +429,7 @@ class TestAcme(unittest.TestCase): 'resultInfo': {'totalCount': 1, 'offset': 0, 'returnedCount': 1}} ultradns._delete = Mock() mock_metrics.send = Mock() + ultradns.delete_txt_record(change_id, account_number, domain, token) mock_current_app.logger.debug.assert_not_called() mock_metrics.send.assert_not_called() From 37a1b55b0832b85bf2ccfbbd22f2fa92851bb2f9 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:27:21 -0700 Subject: [PATCH 300/357] test_delete_txt_record changed to mock get_zone_name and return the value directly instead of executing the function. --- lemur/plugins/lemur_acme/tests/test_acme.py | 22 ++------------------- 1 file changed, 2 insertions(+), 20 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index ae78f911..f6fe5b2f 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -396,30 +396,12 @@ class TestAcme(unittest.TestCase): @patch("lemur.extensions.metrics") def test_delete_txt_record(self, mock_metrics, mock_current_app): domain = "_acme_challenge.test.example.com" + zone = "test.example.com" token = "ABCDEFGHIJ" account_number = "1234567890" change_id = (domain, token) - path = "a/b/c" - paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', 'pdns154.ultradns.biz.', - 'pdns154.ultradns.org.']}}, 'inherit': 'ALL'}, - {'properties': {'name': 'test.example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', - 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, - 'inherit': 'ALL'}, - {'properties': {'name': 'example2.com.', 'accountName': 'netflix', 'type': 'SECONDARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', - 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, - 'inherit': 'ALL'}] - ultradns._paginate = Mock(path, "zones") - ultradns._paginate.side_effect = [[paginate_response]] mock_current_app.logger.debug = Mock() + ultradns.get_zone_name = Mock(return_value=zone) ultradns._post = Mock() ultradns._get = Mock() ultradns._get.return_value = {'zoneName': 'test.example.com.com', From 894502644c03aa0dc2fcbac12ed34360f8d8d9e0 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:39:20 -0700 Subject: [PATCH 301/357] test_wait_for_dns_change fixed! --- lemur/plugins/lemur_acme/tests/test_acme.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index f6fe5b2f..199b4a05 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -415,12 +415,26 @@ class TestAcme(unittest.TestCase): mock_current_app.logger.debug.assert_not_called() mock_metrics.send.assert_not_called() + @patch("lemur.plugins.lemur_acme.ultradns.current_app") @patch("lemur.extensions.metrics") - def test_wait_for_dns_change(self, mock_metrics): + def test_wait_for_dns_change(self, mock_metrics, mock_current_app): ultradns._has_dns_propagated = Mock(return_value=True) ultradns.get_authoritative_nameserver = Mock(return_value="0.0.0.0") mock_metrics.send = Mock() - mock_metrics.send.assert_not_called() + domain = "_acme-challenge.test.example.com" + token = "ABCDEFGHIJ" + change_id = (domain, token) + mock_current_app.logger.debug = Mock() + ultradns.wait_for_dns_change(change_id) + # mock_metrics.send.assert_not_called() + log_data = { + "function": "wait_for_dns_change", + "fqdn": domain, + "status": True, + "message": "Record status on Public DNS" + } + mock_current_app.logger.debug.assert_called_with(log_data) + def test_get_zone_name(self): zones = ['example.com', 'test.example.com'] From 3ff56fc5950ffd7ecb236a450f15b8218e5eedb5 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:42:11 -0700 Subject: [PATCH 302/357] Blank line removed --- lemur/plugins/lemur_acme/tests/test_acme.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 199b4a05..61a738bc 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -435,7 +435,6 @@ class TestAcme(unittest.TestCase): } mock_current_app.logger.debug.assert_called_with(log_data) - def test_get_zone_name(self): zones = ['example.com', 'test.example.com'] zone = "test.example.com" From fa7f71d8599800fa5b54c2a2696f9793865a7481 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:53:10 -0700 Subject: [PATCH 303/357] Modified paginate response to dummy values --- lemur/plugins/lemur_acme/tests/test_acme.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 61a738bc..2d2055d8 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -390,6 +390,7 @@ class TestAcme(unittest.TestCase): "message": "TXT record created" } result = ultradns.create_txt_record(domain, token, account_number) + # TODO: check change_id mock_current_app.logger.debug.assert_called_with(log_data) @patch("lemur.plugins.lemur_acme.ultradns.current_app") @@ -448,22 +449,22 @@ class TestAcme(unittest.TestCase): account_number = "1234567890" path = "a/b/c" zones = ['example.com', 'test.example.com'] - paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'example', 'type': 'PRIMARY', 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', 'pdns154.ultradns.biz.', - 'pdns154.ultradns.org.']}}, 'inherit': 'ALL'}, - {'properties': {'name': 'test.example.com.', 'accountName': 'netflix', 'type': 'PRIMARY', + 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', 'example.ultradns.biz.', + 'example.ultradns.org.']}}, 'inherit': 'ALL'}, + {'properties': {'name': 'test.example.com.', 'accountName': 'example', 'type': 'PRIMARY', 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', - 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', + 'example.ultradns.biz.', 'example.ultradns.org.']}}, 'inherit': 'ALL'}, - {'properties': {'name': 'example2.com.', 'accountName': 'netflix', 'type': 'SECONDARY', + {'properties': {'name': 'example2.com.', 'accountName': 'example', 'type': 'SECONDARY', 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['pdns154.ultradns.com.', 'pdns154.ultradns.net.', - 'pdns154.ultradns.biz.', 'pdns154.ultradns.org.']}}, + 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', + 'example.ultradns.biz.', 'example.ultradns.org.']}}, 'inherit': 'ALL'}] ultradns._paginate = Mock(path, "zones") ultradns._paginate.side_effect = [[paginate_response]] From b4f4e4dc241bb182b87fa97584dc2f399e3aa71c Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 13:55:02 -0700 Subject: [PATCH 304/357] Added extra check for return value to test_create_txt_record --- lemur/plugins/lemur_acme/tests/test_acme.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 2d2055d8..58857b75 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -380,6 +380,7 @@ class TestAcme(unittest.TestCase): zone = "test.example.com" token = "ABCDEFGHIJ" account_number = "1234567890" + change_id = (domain, token) ultradns.get_zone_name = Mock(return_value=zone) mock_current_app.logger.debug = Mock() ultradns._post = Mock() @@ -390,8 +391,8 @@ class TestAcme(unittest.TestCase): "message": "TXT record created" } result = ultradns.create_txt_record(domain, token, account_number) - # TODO: check change_id mock_current_app.logger.debug.assert_called_with(log_data) + self.assertEqual(result, change_id) @patch("lemur.plugins.lemur_acme.ultradns.current_app") @patch("lemur.extensions.metrics") From cadf372f7b5ca909a142ad732fdd92aaa56d0399 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 14:02:10 -0700 Subject: [PATCH 305/357] Removed hardcoded value from function call --- lemur/plugins/lemur_acme/tests/test_acme.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 58857b75..b66e6d58 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -421,7 +421,8 @@ class TestAcme(unittest.TestCase): @patch("lemur.extensions.metrics") def test_wait_for_dns_change(self, mock_metrics, mock_current_app): ultradns._has_dns_propagated = Mock(return_value=True) - ultradns.get_authoritative_nameserver = Mock(return_value="0.0.0.0") + nameserver = "0.0.0.0" + ultradns.get_authoritative_nameserver = Mock(return_value=nameserver) mock_metrics.send = Mock() domain = "_acme-challenge.test.example.com" token = "ABCDEFGHIJ" From 43f5c8b34e74648bab47c63bebd783c1bd5a1410 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 14:08:06 -0700 Subject: [PATCH 306/357] Fixed indentation --- lemur/plugins/lemur_acme/tests/test_acme.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index b66e6d58..c4d10039 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -454,8 +454,9 @@ class TestAcme(unittest.TestCase): paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'example', 'type': 'PRIMARY', 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', 'example.ultradns.biz.', - 'example.ultradns.org.']}}, 'inherit': 'ALL'}, + 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', + 'example.ultradns.biz.', 'example.ultradns.org.']}}, + 'inherit': 'ALL'}, {'properties': {'name': 'test.example.com.', 'accountName': 'example', 'type': 'PRIMARY', 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { From a6bf081bec357fde6a17153fa57c1f51f0a621a6 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 14:08:27 -0700 Subject: [PATCH 307/357] Remove unused import --- lemur/plugins/lemur_acme/tests/test_acme.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index c4d10039..31a9e370 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -1,4 +1,3 @@ -import json import unittest from requests.models import Response From a97283f0a43bd3203dc8a00220e6d65a524bc13b Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 14:23:09 -0700 Subject: [PATCH 308/357] Fixed indentation --- lemur/plugins/lemur_acme/tests/test_acme.py | 43 ++++++++++++--------- 1 file changed, 25 insertions(+), 18 deletions(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 31a9e370..f49141a8 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -450,24 +450,31 @@ class TestAcme(unittest.TestCase): account_number = "1234567890" path = "a/b/c" zones = ['example.com', 'test.example.com'] - paginate_response = [{'properties': {'name': 'example.com.', 'accountName': 'example', 'type': 'PRIMARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', - 'example.ultradns.biz.', 'example.ultradns.org.']}}, - 'inherit': 'ALL'}, - {'properties': {'name': 'test.example.com.', 'accountName': 'example', 'type': 'PRIMARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', - 'example.ultradns.biz.', 'example.ultradns.org.']}}, - 'inherit': 'ALL'}, - {'properties': {'name': 'example2.com.', 'accountName': 'example', 'type': 'SECONDARY', - 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, - 'lastModifiedDateTime': '2017-06-14T06:45Z'}, 'registrarInfo': { - 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', - 'example.ultradns.biz.', 'example.ultradns.org.']}}, - 'inherit': 'ALL'}] + paginate_response = [{ + 'properties': { + 'name': 'example.com.', 'accountName': 'example', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, + 'registrarInfo': { + 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', + 'example.ultradns.biz.', 'example.ultradns.org.']}}, + 'inherit': 'ALL'}, { + 'properties': { + 'name': 'test.example.com.', 'accountName': 'example', 'type': 'PRIMARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, + 'registrarInfo': { + 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', + 'example.ultradns.biz.', 'example.ultradns.org.']}}, + 'inherit': 'ALL'}, { + 'properties': { + 'name': 'example2.com.', 'accountName': 'example', 'type': 'SECONDARY', + 'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9, + 'lastModifiedDateTime': '2017-06-14T06:45Z'}, + 'registrarInfo': { + 'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.', + 'example.ultradns.biz.', 'example.ultradns.org.']}}, + 'inherit': 'ALL'}] ultradns._paginate = Mock(path, "zones") ultradns._paginate.side_effect = [[paginate_response]] result = ultradns.get_zones(account_number) From d9aef2da3e51cfe1c20383301b457207b183d3d0 Mon Sep 17 00:00:00 2001 From: Kush Bavishi Date: Wed, 7 Aug 2019 14:38:18 -0700 Subject: [PATCH 309/357] Changed dummy nameserver value --- lemur/plugins/lemur_acme/tests/test_acme.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index f49141a8..2f9dd719 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -420,7 +420,7 @@ class TestAcme(unittest.TestCase): @patch("lemur.extensions.metrics") def test_wait_for_dns_change(self, mock_metrics, mock_current_app): ultradns._has_dns_propagated = Mock(return_value=True) - nameserver = "0.0.0.0" + nameserver = "1.1.1.1" ultradns.get_authoritative_nameserver = Mock(return_value=nameserver) mock_metrics.send = Mock() domain = "_acme-challenge.test.example.com" From 9a02230d63419285ef4836784462de3538ce4847 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 7 Aug 2019 17:48:06 -0700 Subject: [PATCH 310/357] adding soft time outs for celery --- lemur/common/celery.py | 91 +++++++++++++++++++++++++++++++++++------- 1 file changed, 77 insertions(+), 14 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index f5edb9ab..e868585a 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -282,7 +282,7 @@ def clean_all_sources(): metrics.send(f"{function}.success", 'counter', 1) -@celery.task() +@celery.task(soft_time_limit=600) def clean_source(source): """ This celery task will clean the specified source. This is a destructive operation that will delete unused @@ -298,7 +298,13 @@ def clean_source(source): "source": source, } current_app.logger.debug(log_data) - clean([source], True) + try: + clean([source], True) + except SoftTimeLimitExceeded: + log_data["message"] = "Checking revoked: Time limit exceeded." + current_app.logger.error(log_data) + sentry.captureException() + metrics.send("clean_source_timeout", "counter", 1) @celery.task() @@ -391,7 +397,7 @@ def sync_source_destination(): metrics.send(f"{function}.success", 'counter', 1) -@celery.task() +@celery.task(soft_time_limit=3600) def certificate_reissue(): """ This celery task reissues certificates which are pending reissue @@ -403,14 +409,21 @@ def certificate_reissue(): "message": "reissuing certificates", } current_app.logger.debug(log_data) - cli_certificate.reissue(None, True) + try: + cli_certificate.reissue(None, True) + except SoftTimeLimitExceeded: + log_data["message"] = "Checking revoked: Time limit exceeded." + current_app.logger.error(log_data) + sentry.captureException() + metrics.send("certificate_reissue_timeout", "counter", 1) + return log_data["message"] = "reissuance completed" current_app.logger.debug(log_data) red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) -@celery.task() +@celery.task(soft_time_limit=3600) def certificate_rotate(): """ This celery task rotates certificates which are reissued but having endpoints attached to the replaced cert @@ -422,14 +435,21 @@ def certificate_rotate(): "message": "rotating certificates", } current_app.logger.debug(log_data) - cli_certificate.rotate(None, None, None, None, True) + try: + cli_certificate.rotate(None, None, None, None, True) + except SoftTimeLimitExceeded: + log_data["message"] = "Checking revoked: Time limit exceeded." + current_app.logger.error(log_data) + sentry.captureException() + metrics.send("certificate_rotate_timeout", "counter", 1) + return log_data["message"] = "rotation completed" current_app.logger.debug(log_data) red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) -@celery.task() +@celery.task(soft_time_limit=3600) def endpoints_expire(): """ This celery task removes all endpoints that have not been recently updated @@ -441,12 +461,19 @@ def endpoints_expire(): "message": "endpoints expire", } current_app.logger.debug(log_data) - cli_endpoints.expire(2) # Time in hours + try: + cli_endpoints.expire(2) # Time in hours + except SoftTimeLimitExceeded: + log_data["message"] = "Checking revoked: Time limit exceeded." + current_app.logger.error(log_data) + sentry.captureException() + metrics.send("endpoints_expire_timeout", "counter", 1) + return red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) -@celery.task() +@celery.task(soft_time_limit=600) def get_all_zones(): """ This celery syncs all zones from the available dns providers @@ -458,29 +485,58 @@ def get_all_zones(): "message": "refresh all zones from available DNS providers", } current_app.logger.debug(log_data) - cli_dns_providers.get_all_zones() + try: + cli_dns_providers.get_all_zones() + except SoftTimeLimitExceeded: + log_data["message"] = "Checking revoked: Time limit exceeded." + current_app.logger.error(log_data) + sentry.captureException() + metrics.send("get_all_zones_timeout", "counter", 1) + return red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) -@celery.task() +@celery.task(soft_time_limit=3600) def check_revoked(): """ This celery task attempts to check if any certs are expired :return: """ + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + function = f"{__name__}.{sys._getframe().f_code.co_name}" log_data = { "function": function, "message": "check if any certificates are revoked revoked", } + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) - cli_certificate.check_revoked() + try: + cli_certificate.check_revoked() + except SoftTimeLimitExceeded: + log_data["message"] = "Checking revoked: Time limit exceeded." + current_app.logger.error(log_data) + sentry.captureException() + metrics.send("check_revoked_timeout", "counter", 1) + return + red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) -@celery.task() +check_revoked() + + +@celery.task(soft_time_limit=3600) def notify_expirations(): """ This celery task notifies about expiring certs @@ -492,6 +548,13 @@ def notify_expirations(): "message": "notify for cert expiration", } current_app.logger.debug(log_data) - cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) + try: + cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) + except SoftTimeLimitExceeded: + log_data["message"] = "Checking revoked: Time limit exceeded." + current_app.logger.error(log_data) + sentry.captureException() + metrics.send("notify_expirations_timeout", "counter", 1) + return red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) From 3b9b94623fc0571e3777232906049b4c857e058f Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 7 Aug 2019 18:06:59 -0700 Subject: [PATCH 311/357] cleaning up --- lemur/common/celery.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index e868585a..dfeb7017 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -533,9 +533,6 @@ def check_revoked(): metrics.send(f"{function}.success", 'counter', 1) -check_revoked() - - @celery.task(soft_time_limit=3600) def notify_expirations(): """ From da9c91afb4f5b6bc6cb4016d5e3e049e4db41f13 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 8 Aug 2019 17:56:22 -0700 Subject: [PATCH 312/357] fixing metric bug --- lemur/common/celery.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index f5edb9ab..b19a9607 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -348,7 +348,7 @@ def sync_source(source): return try: sync([source]) - metrics.send(f"{function}.success", 'counter', '1', metric_tags={"source": source}) + metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) except SoftTimeLimitExceeded: log_data["message"] = "Error syncing source: Time limit exceeded." current_app.logger.error(log_data) From bf47f87c215f9c6042374ddf3a43f5f4bbc24d43 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 12 Aug 2019 13:52:01 -0700 Subject: [PATCH 313/357] preventing celery duplicate tasks --- lemur/common/celery.py | 91 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 91 insertions(+) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index b19a9607..a79ec838 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -248,6 +248,15 @@ def remove_old_acme_certs(): } pending_certs = pending_certificate_service.get_pending_certs("all") + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + # Delete pending certs more than a week old for cert in pending_certs: if datetime.now(timezone.utc) - cert.last_updated > timedelta(days=7): @@ -311,6 +320,17 @@ def sync_all_sources(): "function": function, "message": "creating celery task to sync source", } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + + sources = validate_sources("all") for source in sources: log_data["source"] = source.label @@ -340,6 +360,17 @@ def sync_source(source): "source": source, "task_id": task_id, } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + + current_app.logger.debug(log_data) if task_id and is_task_active(function, task_id, (source,)): @@ -378,6 +409,16 @@ def sync_source_destination(): "function": function, "message": "syncing AWS destinations and sources", } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) for dst in destinations_service.get_all(): if add_aws_destination_to_sources(dst): @@ -402,6 +443,16 @@ def certificate_reissue(): "function": function, "message": "reissuing certificates", } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) cli_certificate.reissue(None, True) log_data["message"] = "reissuance completed" @@ -421,6 +472,16 @@ def certificate_rotate(): "function": function, "message": "rotating certificates", } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) cli_certificate.rotate(None, None, None, None, True) log_data["message"] = "rotation completed" @@ -440,6 +501,16 @@ def endpoints_expire(): "function": function, "message": "endpoints expire", } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) cli_endpoints.expire(2) # Time in hours red.set(f'{function}.last_success', int(time.time())) @@ -457,6 +528,16 @@ def get_all_zones(): "function": function, "message": "refresh all zones from available DNS providers", } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) cli_dns_providers.get_all_zones() red.set(f'{function}.last_success', int(time.time())) @@ -491,6 +572,16 @@ def notify_expirations(): "function": function, "message": "notify for cert expiration", } + + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + if task_id and is_task_active(function, task_id, (id,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) red.set(f'{function}.last_success', int(time.time())) From 07a9c56fb86b97c07a04b67fe576540d961cfb7b Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 13 Aug 2019 09:35:57 -0700 Subject: [PATCH 314/357] making lint happy --- lemur/common/celery.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index a79ec838..bcd7b580 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -31,7 +31,6 @@ from lemur.dns_providers import cli as cli_dns_providers from lemur.notifications import cli as cli_notification from lemur.endpoints import cli as cli_endpoints - if current_app: flask_app = current_app else: @@ -256,7 +255,7 @@ def remove_old_acme_certs(): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return - + # Delete pending certs more than a week old for cert in pending_certs: if datetime.now(timezone.utc) - cert.last_updated > timedelta(days=7): @@ -330,7 +329,6 @@ def sync_all_sources(): current_app.logger.debug(log_data) return - sources = validate_sources("all") for source in sources: log_data["source"] = source.label @@ -370,7 +368,6 @@ def sync_source(source): current_app.logger.debug(log_data) return - current_app.logger.debug(log_data) if task_id and is_task_active(function, task_id, (source,)): From 4d728738eeb2bcbc7afa4a30d15f6d7656674873 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 13 Aug 2019 11:42:43 -0700 Subject: [PATCH 315/357] handling celery tasks without any arguments --- lemur/common/celery.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index bcd7b580..fa739029 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -65,6 +65,9 @@ celery = make_celery(flask_app) def is_task_active(fun, task_id, args): from celery.task.control import inspect + if not args: + args = '()' # empty args + i = inspect() active_tasks = i.active() for _, tasks in active_tasks.items(): From c29f2825607df669ff67d3713fcfc4feb3fa96bc Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 13 Aug 2019 11:52:56 -0700 Subject: [PATCH 316/357] improved the flow for checking if the task is active --- lemur/common/celery.py | 169 ++++++++++++++++++++++++++--------------- 1 file changed, 109 insertions(+), 60 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index fa739029..a37f96e5 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -91,6 +91,21 @@ def report_celery_last_success_metrics(): """ function = f"{__name__}.{sys._getframe().f_code.co_name}" + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + log_data = { + "function": function, + "message": "recurrent task", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_time = int(time.time()) schedule = current_app.config.get('CELERYBEAT_SCHEDULE') for _, t in schedule.items(): @@ -215,15 +230,25 @@ def fetch_acme_cert(id): @celery.task() def fetch_all_pending_acme_certs(): """Instantiate celery workers to resolve all pending Acme certificates""" - pending_certs = pending_certificate_service.get_unresolved_pending_certs() function = f"{__name__}.{sys._getframe().f_code.co_name}" + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + log_data = { "function": function, "message": "Starting job.", + "task_id": task_id, } + if task_id and is_task_active(function, task_id, None): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) + pending_certs = pending_certificate_service.get_unresolved_pending_certs() # We only care about certs using the acme-issuer plugin for cert in pending_certs: @@ -244,21 +269,23 @@ def fetch_all_pending_acme_certs(): def remove_old_acme_certs(): """Prune old pending acme certificates from the database""" function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "Starting job.", - } - pending_certs = pending_certificate_service.get_pending_certs("all") - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "Starting job.", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return + pending_certs = pending_certificate_service.get_pending_certs("all") + # Delete pending certs more than a week old for cert in pending_certs: if datetime.now(timezone.utc) - cert.last_updated > timedelta(days=7): @@ -279,10 +306,21 @@ def clean_all_sources(): be ran periodically. This function triggers one celery task per source. """ function = f"{__name__}.{sys._getframe().f_code.co_name}" + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + log_data = { "function": function, "message": "Creating celery task to clean source", + "task_id": task_id, } + + if task_id and is_task_active(function, task_id, None): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + sources = validate_sources("all") for source in sources: log_data["source"] = source.label @@ -303,11 +341,22 @@ def clean_source(source): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + log_data = { "function": function, "message": "Cleaning source", "source": source, + "task_id": task_id, } + + if task_id and is_task_active(function, task_id, (source,)): + log_data["message"] = "Skipping task: Task is already active" + current_app.logger.debug(log_data) + return + current_app.logger.debug(log_data) clean([source], True) @@ -318,16 +367,17 @@ def sync_all_sources(): This function will sync certificates from all sources. This function triggers one celery task per source. """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "creating celery task to sync source", - } - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "creating celery task to sync source", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return @@ -355,6 +405,7 @@ def sync_source(source): task_id = None if celery.current_task: task_id = celery.current_task.request.id + log_data = { "function": function, "message": "Syncing source", @@ -362,21 +413,12 @@ def sync_source(source): "task_id": task_id, } - task_id = None - if celery.current_task: - task_id = celery.current_task.request.id - - if task_id and is_task_active(function, task_id, (id,)): + if task_id and is_task_active(function, task_id, (source,)): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return current_app.logger.debug(log_data) - - if task_id and is_task_active(function, task_id, (source,)): - log_data["message"] = "Skipping task: Task is already active" - current_app.logger.debug(log_data) - return try: sync([source]) metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) @@ -405,16 +447,17 @@ def sync_source_destination(): We rely on account numbers to avoid duplicates. """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "syncing AWS destinations and sources", - } - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "syncing AWS destinations and sources", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return @@ -439,16 +482,17 @@ def certificate_reissue(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "reissuing certificates", - } - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "reissuing certificates", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return @@ -468,16 +512,18 @@ def certificate_rotate(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "rotating certificates", - } - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "rotating certificates", + "task_id": task_id, + + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return @@ -497,16 +543,17 @@ def endpoints_expire(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "endpoints expire", - } - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "endpoints expire", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return @@ -524,16 +571,17 @@ def get_all_zones(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "refresh all zones from available DNS providers", - } - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "refresh all zones from available DNS providers", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return @@ -568,16 +616,17 @@ def notify_expirations(): :return: """ function = f"{__name__}.{sys._getframe().f_code.co_name}" - log_data = { - "function": function, - "message": "notify for cert expiration", - } - task_id = None if celery.current_task: task_id = celery.current_task.request.id - if task_id and is_task_active(function, task_id, (id,)): + log_data = { + "function": function, + "message": "notify for cert expiration", + "task_id": task_id, + } + + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return From a3dfc3ef0ad9c43fab1862a6c5b4361096b452ff Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 13 Aug 2019 11:58:58 -0700 Subject: [PATCH 317/357] consistency --- lemur/common/celery.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index dfeb7017..acb20081 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -503,18 +503,18 @@ def check_revoked(): This celery task attempts to check if any certs are expired :return: """ - + function = f"{__name__}.{sys._getframe().f_code.co_name}" task_id = None if celery.current_task: task_id = celery.current_task.request.id - function = f"{__name__}.{sys._getframe().f_code.co_name}" log_data = { "function": function, "message": "check if any certificates are revoked revoked", + "task_id": task_id, } - if task_id and is_task_active(function, task_id, (id,)): + if task_id and is_task_active(function, task_id, None): log_data["message"] = "Skipping task: Task is already active" current_app.logger.debug(log_data) return From 22c60fedad9047edf22d4126442e5ed48a338a70 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 13 Aug 2019 12:11:04 -0700 Subject: [PATCH 318/357] cosmetics --- lemur/common/celery.py | 1 + 1 file changed, 1 insertion(+) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index a37f96e5..06b93a31 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -31,6 +31,7 @@ from lemur.dns_providers import cli as cli_dns_providers from lemur.notifications import cli as cli_notification from lemur.endpoints import cli as cli_endpoints + if current_app: flask_app = current_app else: From 6e17d36d76b228d878ecab1c2f983de25237080f Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 13 Aug 2019 12:16:23 -0700 Subject: [PATCH 319/357] typos --- lemur/common/celery.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index acb20081..e36d8b35 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -301,7 +301,7 @@ def clean_source(source): try: clean([source], True) except SoftTimeLimitExceeded: - log_data["message"] = "Checking revoked: Time limit exceeded." + log_data["message"] = "Clean source: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() metrics.send("clean_source_timeout", "counter", 1) @@ -412,7 +412,7 @@ def certificate_reissue(): try: cli_certificate.reissue(None, True) except SoftTimeLimitExceeded: - log_data["message"] = "Checking revoked: Time limit exceeded." + log_data["message"] = "Certificate reissue: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() metrics.send("certificate_reissue_timeout", "counter", 1) @@ -438,7 +438,7 @@ def certificate_rotate(): try: cli_certificate.rotate(None, None, None, None, True) except SoftTimeLimitExceeded: - log_data["message"] = "Checking revoked: Time limit exceeded." + log_data["message"] = "Certificate rotate: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() metrics.send("certificate_rotate_timeout", "counter", 1) @@ -464,7 +464,7 @@ def endpoints_expire(): try: cli_endpoints.expire(2) # Time in hours except SoftTimeLimitExceeded: - log_data["message"] = "Checking revoked: Time limit exceeded." + log_data["message"] = "endpoint expire: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() metrics.send("endpoints_expire_timeout", "counter", 1) @@ -488,7 +488,7 @@ def get_all_zones(): try: cli_dns_providers.get_all_zones() except SoftTimeLimitExceeded: - log_data["message"] = "Checking revoked: Time limit exceeded." + log_data["message"] = "get all zones: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() metrics.send("get_all_zones_timeout", "counter", 1) @@ -548,7 +548,7 @@ def notify_expirations(): try: cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) except SoftTimeLimitExceeded: - log_data["message"] = "Checking revoked: Time limit exceeded." + log_data["message"] = "Notify expiring Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() metrics.send("notify_expirations_timeout", "counter", 1) From 2de3f287ab3ace80e6fb750e12d6f4b990bb02c5 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 13 Aug 2019 12:21:27 -0700 Subject: [PATCH 320/357] standardizing the timeouts to easier monitor any timeouts --- lemur/common/celery.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/lemur/common/celery.py b/lemur/common/celery.py index e36d8b35..a3f9cc5f 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -304,7 +304,7 @@ def clean_source(source): log_data["message"] = "Clean source: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("clean_source_timeout", "counter", 1) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) @celery.task() @@ -359,9 +359,8 @@ def sync_source(source): log_data["message"] = "Error syncing source: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send( - "sync_source_timeout", "counter", 1, metric_tags={"source": source} - ) + metrics.send("sync_source_timeout", "counter", 1, metric_tags={"source": source}) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return log_data["message"] = "Done syncing source" @@ -415,8 +414,9 @@ def certificate_reissue(): log_data["message"] = "Certificate reissue: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("certificate_reissue_timeout", "counter", 1) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return + log_data["message"] = "reissuance completed" current_app.logger.debug(log_data) red.set(f'{function}.last_success', int(time.time())) @@ -441,8 +441,9 @@ def certificate_rotate(): log_data["message"] = "Certificate rotate: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("certificate_rotate_timeout", "counter", 1) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return + log_data["message"] = "rotation completed" current_app.logger.debug(log_data) red.set(f'{function}.last_success', int(time.time())) @@ -467,8 +468,9 @@ def endpoints_expire(): log_data["message"] = "endpoint expire: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("endpoints_expire_timeout", "counter", 1) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return + red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -491,8 +493,9 @@ def get_all_zones(): log_data["message"] = "get all zones: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("get_all_zones_timeout", "counter", 1) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return + red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) @@ -526,7 +529,7 @@ def check_revoked(): log_data["message"] = "Checking revoked: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("check_revoked_timeout", "counter", 1) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return red.set(f'{function}.last_success', int(time.time())) @@ -551,7 +554,8 @@ def notify_expirations(): log_data["message"] = "Notify expiring Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("notify_expirations_timeout", "counter", 1) + metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return + red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", 'counter', 1) From 1c6fee7292b93da2ca2157c2ac340bdd7c7bcb4e Mon Sep 17 00:00:00 2001 From: Curtis Castrapel Date: Thu, 15 Aug 2019 10:52:26 -0700 Subject: [PATCH 321/357] Allow better DNS autodetection for domains that directly match a DNS hosted zone --- lemur/plugins/lemur_acme/plugin.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index b0774cbe..f31ffdcb 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -294,7 +294,7 @@ class AcmeHandler(object): if not dns_provider.domains: continue for name in dns_provider.domains: - if domain.endswith("." + name): + if name == domain or domain.endswith("." + name): if len(name) > match_length: self.dns_providers_for_domain[domain] = [dns_provider] match_length = len(name) @@ -370,7 +370,12 @@ class AcmeHandler(object): pass def get_dns_provider(self, type): - provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53, "ultradns": ultradns} + provider_types = { + "cloudflare": cloudflare, + "dyn": dyn, + "route53": route53, + "ultradns": ultradns, + } provider = provider_types.get(type) if not provider: raise UnknownProvider("No such DNS provider: {}".format(type)) @@ -424,7 +429,12 @@ class ACMEIssuerPlugin(IssuerPlugin): def get_dns_provider(self, type): self.acme = AcmeHandler() - provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53, "ultradns": ultradns} + provider_types = { + "cloudflare": cloudflare, + "dyn": dyn, + "route53": route53, + "ultradns": ultradns, + } provider = provider_types.get(type) if not provider: raise UnknownProvider("No such DNS provider: {}".format(type)) From 9b04d901c482d1d8909d7d5d571ddded35235b94 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 15 Aug 2019 19:14:08 -0700 Subject: [PATCH 322/357] metric for missing certificate from an endpoint --- lemur/sources/service.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index ec988623..b6bdb1be 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -15,6 +15,7 @@ from lemur.sources.models import Source from lemur.certificates.models import Certificate from lemur.certificates import service as certificate_service from lemur.endpoints import service as endpoint_service +from lemur.extensions import metrics from lemur.destinations import service as destination_service from lemur.certificates.schemas import CertificateUploadInputSchema @@ -94,6 +95,8 @@ def sync_endpoints(source): certificate_name, endpoint["name"] ) ) + metrics.send("endpoint.certificate.not.found", + "counter", 1, metric_tags={"cert": certificate_name, "endpoint": endpoint["name"]}) continue policy = endpoint.pop("policy") From e5e395f0d964d6fa8b43442576ef0b92979f2a96 Mon Sep 17 00:00:00 2001 From: Javier Ramos Date: Tue, 20 Aug 2019 09:29:58 +0200 Subject: [PATCH 323/357] Show number of found items in pager This commit does not involve any additional query as the data is already in API calls' responses --- lemur/static/app/angular/pager.html | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lemur/static/app/angular/pager.html b/lemur/static/app/angular/pager.html index 3dc8a7d0..d9ee5204 100644 --- a/lemur/static/app/angular/pager.html +++ b/lemur/static/app/angular/pager.html @@ -4,6 +4,9 @@ +

From db91e48395da9273b16405283bfddf20a27ca43b Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 21 Aug 2019 09:54:18 -0700 Subject: [PATCH 324/357] adding account number for better logging, since the endpoint is not available in Lemur DB --- lemur/sources/service.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index b6bdb1be..d5bd7426 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -96,7 +96,8 @@ def sync_endpoints(source): ) ) metrics.send("endpoint.certificate.not.found", - "counter", 1, metric_tags={"cert": certificate_name, "endpoint": endpoint["name"]}) + "counter", 1, + metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], "acct": s.get_option("accountNumber", source.options)}) continue policy = endpoint.pop("policy") From 972051a61eb8dd9e1c60c5de9ad5e3aeef860eb6 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 20 Sep 2019 10:16:23 -0700 Subject: [PATCH 325/357] removing 3 and 4 years from validity range options --- .../app/angular/certificates/certificate/tracking.tpl.html | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html index 19d8f37f..7ac2107f 100644 --- a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html +++ b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html @@ -133,7 +133,7 @@
@@ -141,8 +141,6 @@ - -
From a13c45e9cce2a6bf9de53d2516f3aec32f54ddc0 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 20 Sep 2019 13:49:38 -0700 Subject: [PATCH 326/357] updating dependencies, and fixing the deprecated arrow.replaces to shift --- lemur/certificates/service.py | 4 +- lemur/common/missing.py | 4 +- lemur/pending_certificates/schemas.py | 4 +- lemur/pending_certificates/service.py | 2 +- lemur/plugins/lemur_digicert/plugin.py | 10 ++-- lemur/plugins/lemur_verisign/plugin.py | 14 ++--- lemur/tests/test_missing.py | 4 +- package.json | 4 +- requirements-dev.txt | 30 ++++++---- requirements-docs.txt | 75 ++++++++++++----------- requirements-tests.txt | 83 ++++++++++++++------------ requirements.in | 2 +- requirements.txt | 67 +++++++++++---------- 13 files changed, 159 insertions(+), 144 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 5a65c383..1a0cdc5a 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -419,7 +419,7 @@ def render(args): ) if time_range: - to = arrow.now().replace(weeks=+time_range).format("YYYY-MM-DD") + to = arrow.now().shift(weeks=+time_range).format("YYYY-MM-DD") now = arrow.now().format("YYYY-MM-DD") query = query.filter(Certificate.not_after <= to).filter( Certificate.not_after >= now @@ -561,7 +561,7 @@ def stats(**kwargs): """ if kwargs.get("metric") == "not_after": start = arrow.utcnow() - end = start.replace(weeks=+32) + end = start.shift(weeks=+32) items = ( database.db.session.query(Certificate.issuer, func.count(Certificate.id)) .group_by(Certificate.issuer) diff --git a/lemur/common/missing.py b/lemur/common/missing.py index 2f5156df..f991d2e3 100644 --- a/lemur/common/missing.py +++ b/lemur/common/missing.py @@ -15,11 +15,11 @@ def convert_validity_years(data): now = arrow.utcnow() data["validity_start"] = now.isoformat() - end = now.replace(years=+int(data["validity_years"])) + end = now.shift(years=+int(data["validity_years"])) if not current_app.config.get("LEMUR_ALLOW_WEEKEND_EXPIRATION", True): if is_weekend(end): - end = end.replace(days=-2) + end = end.shift(days=-2) data["validity_end"] = end.isoformat() return data diff --git a/lemur/pending_certificates/schemas.py b/lemur/pending_certificates/schemas.py index 68f22b4a..989178e9 100644 --- a/lemur/pending_certificates/schemas.py +++ b/lemur/pending_certificates/schemas.py @@ -46,10 +46,10 @@ class PendingCertificateOutputSchema(LemurOutputSchema): # Note aliasing is the first step in deprecating these fields. notify = fields.Boolean() - active = fields.Boolean(attribute="notify") + active = fields.Boolean(attribute="notify", dump_only=True) cn = fields.String() - common_name = fields.String(attribute="cn") + common_name = fields.String(attribute="cn", dump_only=True) owner = fields.Email() diff --git a/lemur/pending_certificates/service.py b/lemur/pending_certificates/service.py index 935ea689..8b4d033c 100644 --- a/lemur/pending_certificates/service.py +++ b/lemur/pending_certificates/service.py @@ -244,7 +244,7 @@ def render(args): ) if time_range: - to = arrow.now().replace(weeks=+time_range).format("YYYY-MM-DD") + to = arrow.now().shift(weeks=+time_range).format("YYYY-MM-DD") now = arrow.now().format("YYYY-MM-DD") query = query.filter(PendingCertificate.not_after <= to).filter( PendingCertificate.not_after >= now diff --git a/lemur/plugins/lemur_digicert/plugin.py b/lemur/plugins/lemur_digicert/plugin.py index c5b01cc4..d2648bc1 100644 --- a/lemur/plugins/lemur_digicert/plugin.py +++ b/lemur/plugins/lemur_digicert/plugin.py @@ -72,11 +72,11 @@ def determine_validity_years(end_date): """ now = arrow.utcnow() - if end_date < now.replace(years=+1): + if end_date < now.shift(years=+1): return 1 - elif end_date < now.replace(years=+2): + elif end_date < now.shift(years=+2): return 2 - elif end_date < now.replace(years=+3): + elif end_date < now.shift(years=+3): return 3 raise Exception( @@ -148,12 +148,12 @@ def map_cis_fields(options, csr): """ if not options.get("validity_years"): if not options.get("validity_end"): - options["validity_end"] = arrow.utcnow().replace( + options["validity_end"] = arrow.utcnow().shift( years=current_app.config.get("DIGICERT_DEFAULT_VALIDITY", 1) ) options["validity_years"] = determine_validity_years(options["validity_end"]) else: - options["validity_end"] = arrow.utcnow().replace( + options["validity_end"] = arrow.utcnow().shift( years=options["validity_years"] ) diff --git a/lemur/plugins/lemur_verisign/plugin.py b/lemur/plugins/lemur_verisign/plugin.py index 65bd1cac..7bf517b7 100644 --- a/lemur/plugins/lemur_verisign/plugin.py +++ b/lemur/plugins/lemur_verisign/plugin.py @@ -111,16 +111,14 @@ def process_options(options): data["subject_alt_names"] = ",".join(get_additional_names(options)) - if options.get("validity_end") > arrow.utcnow().replace(years=2): + if options.get("validity_end") > arrow.utcnow().shift(years=2): raise Exception( "Verisign issued certificates cannot exceed two years in validity" ) if options.get("validity_end"): # VeriSign (Symantec) only accepts strictly smaller than 2 year end date - if options.get("validity_end") < arrow.utcnow().replace(years=2).replace( - days=-1 - ): + if options.get("validity_end") < arrow.utcnow().shift(years=2, days=-1): period = get_default_issuance(options) data["specificEndDate"] = options["validity_end"].format("MM/DD/YYYY") data["validityPeriod"] = period @@ -149,9 +147,9 @@ def get_default_issuance(options): """ now = arrow.utcnow() - if options["validity_end"] < now.replace(years=+1): + if options["validity_end"] < now.shift(years=+1): validity_period = "1Y" - elif options["validity_end"] < now.replace(years=+2): + elif options["validity_end"] < now.shift(years=+2): validity_period = "2Y" else: raise Exception( @@ -261,7 +259,7 @@ class VerisignIssuerPlugin(IssuerPlugin): url = current_app.config.get("VERISIGN_URL") + "/reportingws" end = arrow.now() - start = end.replace(days=-7) + start = end.shift(days=-7) data = { "reportType": "detail", @@ -299,7 +297,7 @@ class VerisignSourcePlugin(SourcePlugin): def get_certificates(self): url = current_app.config.get("VERISIGN_URL") + "/reportingws" end = arrow.now() - start = end.replace(years=-5) + start = end.shift(years=-5) data = { "reportType": "detail", "startDate": start.format("MM/DD/YYYY"), diff --git a/lemur/tests/test_missing.py b/lemur/tests/test_missing.py index be615ced..59bac2d6 100644 --- a/lemur/tests/test_missing.py +++ b/lemur/tests/test_missing.py @@ -10,11 +10,11 @@ def test_convert_validity_years(session): data = convert_validity_years(dict(validity_years=2)) assert data["validity_start"] == arrow.utcnow().isoformat() - assert data["validity_end"] == arrow.utcnow().replace(years=+2).isoformat() + assert data["validity_end"] == arrow.utcnow().shift(years=+2).isoformat() with freeze_time("2015-01-10"): data = convert_validity_years(dict(validity_years=1)) assert ( data["validity_end"] - == arrow.utcnow().replace(years=+1, days=-2).isoformat() + == arrow.utcnow().shift(years=+1, days=-2).isoformat() ) diff --git a/package.json b/package.json index fe1267a6..9b899176 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,7 @@ }, "dependencies": { "bower": "^1.8.2", - "browser-sync": "^2.3.1", + "browser-sync": "^2.26.7", "del": "^2.2.2", "gulp-autoprefixer": "^3.1.1", "gulp-cache": "^0.4.5", @@ -25,7 +25,7 @@ "gulp-minify-css": "^1.2.4", "gulp-minify-html": "~1.0.6", "gulp-ng-annotate": "~2.0.0", - "gulp-ng-html2js": "~0.2.2", + "gulp-ng-html2js": "^0.2.3", "gulp-notify": "^2.2.0", "gulp-plumber": "^1.1.0", "gulp-print": "^2.0.1", diff --git a/requirements-dev.txt b/requirements-dev.txt index 030c3f93..6dff5655 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,31 +6,35 @@ # aspy.yaml==1.3.0 # via pre-commit bleach==3.1.0 # via readme-renderer -certifi==2019.3.9 # via requests -cfgv==2.0.0 # via pre-commit +certifi==2019.9.11 # via requests +cfgv==2.0.1 # via pre-commit chardet==3.0.4 # via requests -docutils==0.14 # via readme-renderer +docutils==0.15.2 # via readme-renderer flake8==3.5.0 -identify==1.4.3 # via pre-commit +identify==1.4.7 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.17 # via pre-commit -invoke==1.2.0 +importlib-metadata==0.23 # via pre-commit +invoke==1.3.0 mccabe==0.6.1 # via flake8 +more-itertools==7.2.0 # via zipp nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.16.1 +pre-commit==1.18.3 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 pygments==2.4.2 # via readme-renderer -pyyaml==5.1 +pyyaml==5.1.2 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.22.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.32.1 # via twine -twine==1.13.0 -urllib3==1.25.3 # via requests -virtualenv==16.6.0 # via pre-commit +tqdm==4.36.1 # via twine +twine==1.15.0 +urllib3==1.25.5 # via requests +virtualenv==16.7.5 # via pre-commit webencodings==0.5.1 # via bleach -zipp==0.5.1 # via importlib-metadata +zipp==0.6.0 # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.2.0 # via twine diff --git a/requirements-docs.txt b/requirements-docs.txt index c0fe427e..05cfb49c 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,23 +4,23 @@ # # pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # -acme==0.34.2 +acme==0.38.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.0.10 -amqp==2.5.0 -aniso8601==6.0.0 -arrow==0.14.2 +alembic==1.2.0 +amqp==2.5.1 +aniso8601==8.0.0 +arrow==0.15.2 asn1crypto==0.24.0 asyncpool==1.0 babel==2.7.0 # via sphinx -bcrypt==3.1.6 -billiard==3.6.0.0 +bcrypt==3.1.7 +billiard==3.6.1.0 blinker==1.4 -boto3==1.9.160 -botocore==1.12.160 +boto3==1.9.232 +botocore==1.12.232 celery[redis]==4.3.0 -certifi==2019.3.9 +certifi==2019.9.11 certsrv==2.1.1 cffi==1.12.3 chardet==3.0.4 @@ -29,10 +29,10 @@ cloudflare==2.3.0 cryptography==2.7 dnspython3==1.15.0 dnspython==1.15.0 -docutils==0.14 +docutils==0.15.2 dyn==1.8.1 flask-bcrypt==0.7.1 -flask-cors==3.0.7 +flask-cors==3.0.8 flask-mail==0.9.1 flask-migrate==2.5.2 flask-principal==0.4.0 @@ -40,10 +40,10 @@ flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 -flask==1.0.3 +flask==1.1.1 future==0.17.1 gunicorn==19.9.0 -hvac==0.9.1 +hvac==0.9.5 idna==2.8 imagesize==1.1.0 # via sphinx inflection==0.3.1 @@ -51,47 +51,47 @@ itsdangerous==1.1.0 javaobj-py3==0.3.0 jinja2==2.10.1 jmespath==0.9.4 -josepy==1.1.0 +josepy==1.2.0 jsonlines==1.2.0 kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 -mako==1.0.11 +mako==1.1.0 markupsafe==1.1.1 -marshmallow-sqlalchemy==0.16.3 -marshmallow==2.19.2 +marshmallow-sqlalchemy==0.19.0 +marshmallow==2.20.4 mock==3.0.5 ndg-httpsclient==0.5.1 -packaging==19.0 # via sphinx -paramiko==2.4.2 -pem==19.1.0 -psycopg2==2.8.2 -pyasn1-modules==0.2.5 -pyasn1==0.4.5 +packaging==19.2 # via sphinx +paramiko==2.6.0 +pem==19.2.0 +psycopg2==2.8.3 +pyasn1-modules==0.2.6 +pyasn1==0.4.7 pycparser==2.19 -pycryptodomex==3.8.2 +pycryptodomex==3.9.0 pygments==2.4.2 # via sphinx pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 pyopenssl==19.0.0 -pyparsing==2.4.0 # via packaging +pyparsing==2.4.2 # via packaging pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 python-json-logger==0.1.11 -pytz==2019.1 -pyyaml==5.1 +pytz==2019.2 +pyyaml==5.1.2 raven[flask]==6.10.0 -redis==3.2.1 +redis==3.3.8 requests-toolbelt==0.9.1 requests[security]==2.22.0 retrying==1.3.3 -s3transfer==0.2.0 +s3transfer==0.2.1 six==1.12.0 -snowballstemmer==1.2.1 # via sphinx +snowballstemmer==1.9.1 # via sphinx sphinx-rtd-theme==0.4.3 -sphinx==2.1.0 +sphinx==2.2.0 sphinxcontrib-applehelp==1.0.1 # via sphinx sphinxcontrib-devhelp==1.0.1 # via sphinx sphinxcontrib-htmlhelp==1.0.2 # via sphinx @@ -99,11 +99,14 @@ sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.2 # via sphinx sphinxcontrib-serializinghtml==1.1.3 # via sphinx -sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.4 +sqlalchemy-utils==0.34.2 +sqlalchemy==1.3.8 tabulate==0.8.3 twofish==0.3.0 -urllib3==1.25.3 +urllib3==1.25.5 vine==1.3.0 -werkzeug==0.15.4 +werkzeug==0.16.0 xmltodict==0.12.0 + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.2.0 # via acme, josepy, sphinx diff --git a/requirements-tests.txt b/requirements-tests.txt index 77bc92af..242e7e5c 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -7,75 +7,82 @@ appdirs==1.4.3 # via black asn1crypto==0.24.0 # via cryptography atomicwrites==1.3.0 # via pytest -attrs==19.1.0 # via black, pytest -aws-sam-translator==1.11.0 # via cfn-lint +attrs==19.1.0 # via black, jsonschema, pytest +aws-sam-translator==1.14.0 # via cfn-lint aws-xray-sdk==2.4.2 # via moto -bandit==1.6.0 +bandit==1.6.2 black==19.3b0 -boto3==1.9.160 # via aws-sam-translator, moto +boto3==1.9.232 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.160 # via aws-xray-sdk, boto3, moto, s3transfer -certifi==2019.3.9 # via requests +botocore==1.12.232 # via aws-xray-sdk, boto3, moto, s3transfer +certifi==2019.9.11 # via requests cffi==1.12.3 # via cryptography -cfn-lint==0.21.4 # via moto +cfn-lint==0.24.1 # via moto chardet==3.0.4 # via requests click==7.0 # via black, flask -coverage==4.5.3 -cryptography==2.7 # via moto -docker==4.0.1 # via moto -docutils==0.14 # via botocore -ecdsa==0.13.2 # via python-jose +coverage==4.5.4 +cryptography==2.7 # via moto, sshpubkeys +datetime==4.3 # via moto +docker==4.0.2 # via moto +docutils==0.15.2 # via botocore +ecdsa==0.13.2 # via python-jose, sshpubkeys factory-boy==2.12.0 -faker==1.0.7 -flask==1.0.3 # via pytest-flask +faker==2.0.2 +flask==1.1.1 # via pytest-flask freezegun==0.3.12 future==0.17.1 # via aws-xray-sdk, python-jose gitdb2==2.0.5 # via gitpython -gitpython==2.1.11 # via bandit +gitpython==3.0.2 # via bandit idna==2.8 # via moto, requests -importlib-metadata==0.17 # via pluggy, pytest +importlib-metadata==0.23 # via pluggy, pytest itsdangerous==1.1.0 # via flask jinja2==2.10.1 # via flask, moto jmespath==0.9.4 # via boto3, botocore jsondiff==1.1.2 # via moto -jsonpatch==1.23 # via cfn-lint +jsonpatch==1.24 # via cfn-lint jsonpickle==1.2 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch -jsonschema==2.6.0 # via aws-sam-translator, cfn-lint +jsonschema==3.0.2 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 mock==3.0.5 # via moto -more-itertools==7.0.0 # via pytest -moto==1.3.8 +more-itertools==7.2.0 # via pytest, zipp +moto==1.3.13 nose==1.3.7 -packaging==19.0 # via pytest -pbr==5.2.1 # via stevedore -pluggy==0.12.0 # via pytest +packaging==19.2 # via pytest +pbr==5.4.3 # via stevedore +pluggy==0.13.0 # via pytest py==1.8.0 # via pytest -pyasn1==0.4.5 # via rsa +pyasn1==0.4.7 # via rsa pycparser==2.19 # via cffi pyflakes==2.1.1 -pyparsing==2.4.0 # via packaging +pyparsing==2.4.2 # via packaging +pyrsistent==0.15.4 # via jsonschema pytest-flask==0.15.0 pytest-mock==1.10.4 -pytest==4.6.2 +pytest==5.1.2 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==3.0.1 # via moto -pytz==2019.1 # via moto -pyyaml==5.1 -requests-mock==1.6.0 -requests==2.22.0 # via cfn-lint, docker, moto, requests-mock, responses +pytz==2019.2 # via datetime, moto +pyyaml==5.1.2 +requests-mock==1.7.0 +requests==2.22.0 # via docker, moto, requests-mock, responses responses==0.10.6 # via moto rsa==4.0 # via python-jose -s3transfer==0.2.0 # via boto3 -six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, mock, moto, packaging, pytest, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +s3transfer==0.2.1 # via boto3 +six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, freezegun, jsonschema, mock, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client smmap2==2.0.5 # via gitdb2 -stevedore==1.30.1 # via bandit -text-unidecode==1.2 # via faker +sshpubkeys==3.1.0 # via moto +stevedore==1.31.0 # via bandit +text-unidecode==1.3 # via faker toml==0.10.0 # via black -urllib3==1.25.3 # via botocore, requests +urllib3==1.25.5 # via botocore, requests wcwidth==0.1.7 # via pytest websocket-client==0.56.0 # via docker -werkzeug==0.15.4 # via flask, moto, pytest-flask -wrapt==1.11.1 # via aws-xray-sdk +werkzeug==0.16.0 # via flask, moto, pytest-flask +wrapt==1.11.2 # via aws-xray-sdk xmltodict==0.12.0 # via moto -zipp==0.5.1 # via importlib-metadata +zipp==0.6.0 # via importlib-metadata +zope.interface==4.6.0 # via datetime + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.2.0 # via cfn-lint, jsonschema, zope.interface diff --git a/requirements.in b/requirements.in index d766b7a9..c7c79137 100644 --- a/requirements.in +++ b/requirements.in @@ -32,7 +32,7 @@ kombu<4.6.0 # Bug with inspecting active tasks: https://github.com/celery/kombu/ lockfile logmatic-python marshmallow-sqlalchemy -marshmallow +marshmallow<2.20.5 #schema duplicate issues https://github.com/marshmallow-code/marshmallow-sqlalchemy/issues/121 ndg-httpsclient paramiko # required for the SFTP destination plugin pem diff --git a/requirements.txt b/requirements.txt index c19c7b6e..db7e46a7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,21 +4,21 @@ # # pip-compile --no-index --output-file=requirements.txt requirements.in # -acme==0.34.2 +acme==0.38.0 alembic-autogenerate-enums==0.0.2 -alembic==1.0.10 # via flask-migrate -amqp==2.5.0 # via kombu -aniso8601==6.0.0 # via flask-restful -arrow==0.14.2 +alembic==1.2.0 # via flask-migrate +amqp==2.5.1 # via kombu +aniso8601==8.0.0 # via flask-restful +arrow==0.15.2 asn1crypto==0.24.0 # via cryptography asyncpool==1.0 -bcrypt==3.1.6 # via flask-bcrypt, paramiko -billiard==3.6.0.0 # via celery +bcrypt==3.1.7 # via flask-bcrypt, paramiko +billiard==3.6.1.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.160 -botocore==1.12.160 +boto3==1.9.232 +botocore==1.12.232 celery[redis]==4.3.0 -certifi==2019.3.9 +certifi==2019.9.11 certsrv==2.1.1 cffi==1.12.3 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests @@ -27,10 +27,10 @@ cloudflare==2.3.0 cryptography==2.7 dnspython3==1.15.0 dnspython==1.15.0 # via dnspython3 -docutils==0.14 # via botocore +docutils==0.15.2 # via botocore dyn==1.8.1 flask-bcrypt==0.7.1 -flask-cors==3.0.7 +flask-cors==3.0.8 flask-mail==0.9.1 flask-migrate==2.5.2 flask-principal==0.4.0 @@ -38,34 +38,34 @@ flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.0 -flask==1.0.3 +flask==1.1.1 future==0.17.1 gunicorn==19.9.0 -hvac==0.9.1 +hvac==0.9.5 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask javaobj-py3==0.3.0 # via pyjks jinja2==2.10.1 jmespath==0.9.4 # via boto3, botocore -josepy==1.1.0 # via acme +josepy==1.2.0 # via acme jsonlines==1.2.0 # via cloudflare kombu==4.5.0 lockfile==0.12.2 logmatic-python==0.1.7 -mako==1.0.11 # via alembic +mako==1.1.0 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.16.3 -marshmallow==2.19.2 +marshmallow-sqlalchemy==0.19.0 +marshmallow==2.20.4 mock==3.0.5 # via acme ndg-httpsclient==0.5.1 -paramiko==2.4.2 -pem==19.1.0 -psycopg2==2.8.2 -pyasn1-modules==0.2.5 # via pyjks, python-ldap -pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, pyjks, python-ldap +paramiko==2.6.0 +pem==19.2.0 +psycopg2==2.8.3 +pyasn1-modules==0.2.6 # via pyjks, python-ldap +pyasn1==0.4.7 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap pycparser==2.19 # via cffi -pycryptodomex==3.8.2 # via pyjks +pycryptodomex==3.9.0 # via pyjks pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 # via paramiko @@ -75,20 +75,23 @@ python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-json-logger==0.1.11 # via logmatic-python python-ldap==3.2.0 -pytz==2019.1 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.1 +pytz==2019.2 # via acme, celery, flask-restful, pyrfc3339 +pyyaml==5.1.2 raven[flask]==6.10.0 -redis==3.2.1 +redis==3.3.8 requests-toolbelt==0.9.1 # via acme requests[security]==2.22.0 retrying==1.3.3 -s3transfer==0.2.0 # via boto3 +s3transfer==0.2.1 # via boto3 six==1.12.0 -sqlalchemy-utils==0.33.11 -sqlalchemy==1.3.4 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +sqlalchemy-utils==0.34.2 +sqlalchemy==1.3.8 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils tabulate==0.8.3 twofish==0.3.0 # via pyjks -urllib3==1.25.3 # via botocore, requests +urllib3==1.25.5 # via botocore, requests vine==1.3.0 # via amqp, celery -werkzeug==0.15.4 # via flask +werkzeug==0.16.0 # via flask xmltodict==0.12.0 + +# The following packages are considered to be unsafe in a requirements file: +# setuptools==41.2.0 # via acme, josepy From 96b2149433fb79f56567a4785f32b0d472173bb2 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 20 Sep 2019 15:22:45 -0700 Subject: [PATCH 327/357] removing unintended commit --- lemur/pending_certificates/schemas.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lemur/pending_certificates/schemas.py b/lemur/pending_certificates/schemas.py index 989178e9..68f22b4a 100644 --- a/lemur/pending_certificates/schemas.py +++ b/lemur/pending_certificates/schemas.py @@ -46,10 +46,10 @@ class PendingCertificateOutputSchema(LemurOutputSchema): # Note aliasing is the first step in deprecating these fields. notify = fields.Boolean() - active = fields.Boolean(attribute="notify", dump_only=True) + active = fields.Boolean(attribute="notify") cn = fields.String() - common_name = fields.String(attribute="cn", dump_only=True) + common_name = fields.String(attribute="cn") owner = fields.Email() From 86f661a8afab8fd93146c6af649ae82dd2388422 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 23 Sep 2019 12:36:08 -0700 Subject: [PATCH 328/357] With NLBs the DNS formatting has changed, which resulted in Lemur not getting the region correctly parsed --- lemur/plugins/lemur_aws/plugin.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 4414a62c..d4a4a6d4 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -40,7 +40,11 @@ from lemur.plugins.lemur_aws import iam, s3, elb, ec2 def get_region_from_dns(dns): - return dns.split(".")[-4] + # XXX.REGION.elb.amazonaws.com + if dns.endswith(".elb.amazonaws.com"): + return dns.split(".")[-4] + else: # NLBs have a different pattern on the dns XXXX.elb.REGION.amazonaws.com + return dns.split(".")[-3] def format_elb_cipher_policy_v2(policy): From 477db836f4ba7bbf3f029a2e1ebb86be23f1290f Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Mon, 23 Sep 2019 12:52:17 -0700 Subject: [PATCH 329/357] lint --- lemur/plugins/lemur_aws/plugin.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index d4a4a6d4..cf6c8643 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -40,10 +40,11 @@ from lemur.plugins.lemur_aws import iam, s3, elb, ec2 def get_region_from_dns(dns): - # XXX.REGION.elb.amazonaws.com + # XXX.REGION.elb.amazonaws.com if dns.endswith(".elb.amazonaws.com"): return dns.split(".")[-4] - else: # NLBs have a different pattern on the dns XXXX.elb.REGION.amazonaws.com + else: + # NLBs have a different pattern on the dns XXXX.elb.REGION.amazonaws.com return dns.split(".")[-3] From f0652ca6a9ff2c10e065e604fe0a03fee1251a40 Mon Sep 17 00:00:00 2001 From: pmelse Date: Thu, 10 Oct 2019 15:49:31 -0400 Subject: [PATCH 330/357] bug fix for overwriting certificates --- lemur/plugins/lemur_sftp/plugin.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/lemur/plugins/lemur_sftp/plugin.py b/lemur/plugins/lemur_sftp/plugin.py index de8df427..9cc8140e 100644 --- a/lemur/plugins/lemur_sftp/plugin.py +++ b/lemur/plugins/lemur_sftp/plugin.py @@ -170,8 +170,17 @@ class SFTPDestinationPlugin(DestinationPlugin): current_app.logger.debug( "Uploading {0} to {1}".format(filename, dst_path_cn) ) - with sftp.open(dst_path_cn + "/" + filename, "w") as f: - f.write(data) + try: + with sftp.open(dst_path_cn + "/" + filename, "w") as f: + f.write(data) + except (PermissionError) as permerror: + if permerror.errno == 13: + current_app.logger.debug( + "Uploading {0} to {1} returned Permission Denied Error, making file writable and retrying".format(filename, dst_path_cn) + ) + sftp.chmod(dst_path_cn + "/" + filename, 0o600) + with sftp.open(dst_path_cn + "/" + filename, "w") as f: + f.write(data) # read only for owner, -r-------- sftp.chmod(dst_path_cn + "/" + filename, 0o400) From 6f96a8f5b0ccd6aa16d1a3a606bb4c25f4c7ab46 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Tue, 15 Oct 2019 15:47:21 -0700 Subject: [PATCH 331/357] updating requirements --- requirements-dev.txt | 6 +++--- requirements-docs.txt | 36 +++++++++++++++++----------------- requirements-tests.txt | 44 +++++++++++++++++++++--------------------- requirements.txt | 34 ++++++++++++++++---------------- 4 files changed, 60 insertions(+), 60 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 6dff5655..4e940357 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -30,11 +30,11 @@ requests==2.22.0 # via requests-toolbelt, twine six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit tqdm==4.36.1 # via twine -twine==1.15.0 -urllib3==1.25.5 # via requests +twine==2.0.0 +urllib3==1.25.6 # via requests virtualenv==16.7.5 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.6.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.2.0 # via twine +# setuptools==41.4.0 # via twine diff --git a/requirements-docs.txt b/requirements-docs.txt index 05cfb49c..260c8608 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,25 +4,25 @@ # # pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # -acme==0.38.0 +acme==0.39.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.2.0 -amqp==2.5.1 +alembic==1.2.1 +amqp==2.5.2 aniso8601==8.0.0 arrow==0.15.2 -asn1crypto==0.24.0 +asn1crypto==1.1.0 asyncpool==1.0 babel==2.7.0 # via sphinx bcrypt==3.1.7 billiard==3.6.1.0 blinker==1.4 -boto3==1.9.232 -botocore==1.12.232 +boto3==1.9.250 +botocore==1.12.250 celery[redis]==4.3.0 certifi==2019.9.11 certsrv==2.1.1 -cffi==1.12.3 +cffi==1.13.0 chardet==3.0.4 click==7.0 cloudflare==2.3.0 @@ -39,9 +39,9 @@ flask-principal==0.4.0 flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 -flask-sqlalchemy==2.4.0 +flask-sqlalchemy==2.4.1 flask==1.1.1 -future==0.17.1 +future==0.18.0 gunicorn==19.9.0 hvac==0.9.5 idna==2.8 @@ -49,7 +49,7 @@ imagesize==1.1.0 # via sphinx inflection==0.3.1 itsdangerous==1.1.0 javaobj-py3==0.3.0 -jinja2==2.10.1 +jinja2==2.10.3 jmespath==0.9.4 josepy==1.2.0 jsonlines==1.2.0 @@ -66,7 +66,7 @@ packaging==19.2 # via sphinx paramiko==2.6.0 pem==19.2.0 psycopg2==2.8.3 -pyasn1-modules==0.2.6 +pyasn1-modules==0.2.7 pyasn1==0.4.7 pycparser==2.19 pycryptodomex==3.9.0 @@ -80,16 +80,16 @@ pyrfc3339==1.1 python-dateutil==2.8.0 python-editor==1.0.4 python-json-logger==0.1.11 -pytz==2019.2 +pytz==2019.3 pyyaml==5.1.2 raven[flask]==6.10.0 -redis==3.3.8 +redis==3.3.11 requests-toolbelt==0.9.1 requests[security]==2.22.0 retrying==1.3.3 s3transfer==0.2.1 six==1.12.0 -snowballstemmer==1.9.1 # via sphinx +snowballstemmer==2.0.0 # via sphinx sphinx-rtd-theme==0.4.3 sphinx==2.2.0 sphinxcontrib-applehelp==1.0.1 # via sphinx @@ -100,13 +100,13 @@ sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.2 # via sphinx sphinxcontrib-serializinghtml==1.1.3 # via sphinx sqlalchemy-utils==0.34.2 -sqlalchemy==1.3.8 -tabulate==0.8.3 +sqlalchemy==1.3.10 +tabulate==0.8.5 twofish==0.3.0 -urllib3==1.25.5 +urllib3==1.25.6 vine==1.3.0 werkzeug==0.16.0 xmltodict==0.12.0 # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.2.0 # via acme, josepy, sphinx +# setuptools==41.4.0 # via acme, josepy, sphinx diff --git a/requirements-tests.txt b/requirements-tests.txt index 29d272a0..e6dc53c5 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -5,45 +5,45 @@ # pip-compile --no-index --output-file=requirements-tests.txt requirements-tests.in # appdirs==1.4.3 # via black -asn1crypto==0.24.0 # via cryptography +asn1crypto==1.1.0 # via cryptography atomicwrites==1.3.0 # via pytest -attrs==19.1.0 # via black, jsonschema, pytest -aws-sam-translator==1.14.0 # via cfn-lint +attrs==19.3.0 # via black, jsonschema, pytest +aws-sam-translator==1.15.1 # via cfn-lint aws-xray-sdk==2.4.2 # via moto bandit==1.6.2 black==19.3b0 -boto3==1.9.232 # via aws-sam-translator, moto +boto3==1.9.250 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.232 # via aws-xray-sdk, boto3, moto, s3transfer +botocore==1.12.250 # via aws-xray-sdk, boto3, moto, s3transfer certifi==2019.9.11 # via requests -cffi==1.12.3 # via cryptography -cfn-lint==0.24.1 # via moto +cffi==1.13.0 # via cryptography +cfn-lint==0.24.4 # via moto chardet==3.0.4 # via requests click==7.0 # via black, flask coverage==4.5.4 cryptography==2.7 # via moto, sshpubkeys datetime==4.3 # via moto -docker==4.0.2 # via moto +docker==4.1.0 # via moto docutils==0.15.2 # via botocore -ecdsa==0.13.2 # via python-jose, sshpubkeys +ecdsa==0.13.3 # via python-jose, sshpubkeys factory-boy==2.12.0 -faker==2.0.2 +faker==2.0.3 fakeredis==1.0.5 flask==1.1.1 # via pytest-flask freezegun==0.3.12 -future==0.17.1 # via aws-xray-sdk, python-jose -gitdb2==2.0.5 # via gitpython -gitpython==3.0.2 # via bandit +future==0.18.0 # via aws-xray-sdk, python-jose +gitdb2==2.0.6 # via gitpython +gitpython==3.0.3 # via bandit idna==2.8 # via moto, requests -importlib-metadata==0.23 # via pluggy, pytest +importlib-metadata==0.23 # via jsonschema, pluggy, pytest itsdangerous==1.1.0 # via flask -jinja2==2.10.1 # via flask, moto +jinja2==2.10.3 # via flask, moto jmespath==0.9.4 # via boto3, botocore jsondiff==1.1.2 # via moto jsonpatch==1.24 # via cfn-lint jsonpickle==1.2 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch -jsonschema==3.0.2 # via aws-sam-translator, cfn-lint +jsonschema==3.1.1 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 mock==3.0.5 # via moto more-itertools==7.2.0 # via pytest, zipp @@ -59,13 +59,13 @@ pyflakes==2.1.1 pyparsing==2.4.2 # via packaging pyrsistent==0.15.4 # via jsonschema pytest-flask==0.15.0 -pytest-mock==1.10.4 -pytest==5.1.2 +pytest-mock==1.11.1 +pytest==5.2.1 python-dateutil==2.8.0 # via botocore, faker, freezegun, moto python-jose==3.0.1 # via moto -pytz==2019.2 # via datetime, moto +pytz==2019.3 # via datetime, moto pyyaml==5.1.2 -redis==3.3.8 # via fakeredis +redis==3.3.11 # via fakeredis requests-mock==1.7.0 requests==2.22.0 # via docker, moto, requests-mock, responses responses==0.10.6 # via moto @@ -78,7 +78,7 @@ sshpubkeys==3.1.0 # via moto stevedore==1.31.0 # via bandit text-unidecode==1.3 # via faker toml==0.10.0 # via black -urllib3==1.25.5 # via botocore, requests +urllib3==1.25.6 # via botocore, requests wcwidth==0.1.7 # via pytest websocket-client==0.56.0 # via docker werkzeug==0.16.0 # via flask, moto, pytest-flask @@ -88,4 +88,4 @@ zipp==0.6.0 # via importlib-metadata zope.interface==4.6.0 # via datetime # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.2.0 # via cfn-lint, jsonschema, zope.interface +# setuptools==41.4.0 # via cfn-lint, jsonschema, zope.interface diff --git a/requirements.txt b/requirements.txt index db7e46a7..305fe7e1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,23 +4,23 @@ # # pip-compile --no-index --output-file=requirements.txt requirements.in # -acme==0.38.0 +acme==0.39.0 alembic-autogenerate-enums==0.0.2 -alembic==1.2.0 # via flask-migrate -amqp==2.5.1 # via kombu +alembic==1.2.1 # via flask-migrate +amqp==2.5.2 # via kombu aniso8601==8.0.0 # via flask-restful arrow==0.15.2 -asn1crypto==0.24.0 # via cryptography +asn1crypto==1.1.0 # via cryptography asyncpool==1.0 bcrypt==3.1.7 # via flask-bcrypt, paramiko billiard==3.6.1.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.232 -botocore==1.12.232 +boto3==1.9.250 +botocore==1.12.250 celery[redis]==4.3.0 certifi==2019.9.11 certsrv==2.1.1 -cffi==1.12.3 # via bcrypt, cryptography, pynacl +cffi==1.13.0 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask cloudflare==2.3.0 @@ -37,16 +37,16 @@ flask-principal==0.4.0 flask-replicated==1.3 flask-restful==0.3.7 flask-script==2.0.6 -flask-sqlalchemy==2.4.0 +flask-sqlalchemy==2.4.1 flask==1.1.1 -future==0.17.1 +future==0.18.0 gunicorn==19.9.0 hvac==0.9.5 idna==2.8 # via requests inflection==0.3.1 itsdangerous==1.1.0 # via flask javaobj-py3==0.3.0 # via pyjks -jinja2==2.10.1 +jinja2==2.10.3 jmespath==0.9.4 # via boto3, botocore josepy==1.2.0 # via acme jsonlines==1.2.0 # via cloudflare @@ -62,7 +62,7 @@ ndg-httpsclient==0.5.1 paramiko==2.6.0 pem==19.2.0 psycopg2==2.8.3 -pyasn1-modules==0.2.6 # via pyjks, python-ldap +pyasn1-modules==0.2.7 # via pyjks, python-ldap pyasn1==0.4.7 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap pycparser==2.19 # via cffi pycryptodomex==3.9.0 # via pyjks @@ -75,23 +75,23 @@ python-dateutil==2.8.0 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-json-logger==0.1.11 # via logmatic-python python-ldap==3.2.0 -pytz==2019.2 # via acme, celery, flask-restful, pyrfc3339 +pytz==2019.3 # via acme, celery, flask-restful, pyrfc3339 pyyaml==5.1.2 raven[flask]==6.10.0 -redis==3.3.8 +redis==3.3.11 requests-toolbelt==0.9.1 # via acme requests[security]==2.22.0 retrying==1.3.3 s3transfer==0.2.1 # via boto3 six==1.12.0 sqlalchemy-utils==0.34.2 -sqlalchemy==1.3.8 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils -tabulate==0.8.3 +sqlalchemy==1.3.10 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +tabulate==0.8.5 twofish==0.3.0 # via pyjks -urllib3==1.25.5 # via botocore, requests +urllib3==1.25.6 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.16.0 # via flask xmltodict==0.12.0 # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.2.0 # via acme, josepy +# setuptools==41.4.0 # via acme, josepy From a076497cf0c8bb33be81ade490e7bb5b258eec5b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2019 22:49:30 +0000 Subject: [PATCH 332/357] Bump ecdsa from 0.13.2 to 0.13.3 Bumps [ecdsa](https://github.com/warner/python-ecdsa) from 0.13.2 to 0.13.3. - [Release notes](https://github.com/warner/python-ecdsa/releases) - [Changelog](https://github.com/warner/python-ecdsa/blob/master/NEWS) - [Commits](https://github.com/warner/python-ecdsa/compare/python-ecdsa-0.13.2...python-ecdsa-0.13.3) Signed-off-by: dependabot[bot] --- requirements-tests.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements-tests.txt b/requirements-tests.txt index 29d272a0..8f646bc0 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -25,7 +25,7 @@ cryptography==2.7 # via moto, sshpubkeys datetime==4.3 # via moto docker==4.0.2 # via moto docutils==0.15.2 # via botocore -ecdsa==0.13.2 # via python-jose, sshpubkeys +ecdsa==0.13.3 # via python-jose, sshpubkeys factory-boy==2.12.0 faker==2.0.2 fakeredis==1.0.5 @@ -88,4 +88,4 @@ zipp==0.6.0 # via importlib-metadata zope.interface==4.6.0 # via datetime # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.2.0 # via cfn-lint, jsonschema, zope.interface +# setuptools==41.4.0 # via cfn-lint, jsonschema, zope.interface From b5ab87877b34e760de6c1d0abef166301bd9618a Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 17 Oct 2019 10:16:33 -0700 Subject: [PATCH 333/357] adding retry to acme setup client, since it can experience timeouts or other types of Connection Errors --- lemur/plugins/lemur_acme/plugin.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index f31ffdcb..e38870d8 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -32,6 +32,7 @@ from lemur.extensions import metrics, sentry from lemur.plugins import lemur_acme as acme from lemur.plugins.bases import IssuerPlugin from lemur.plugins.lemur_acme import cloudflare, dyn, route53, ultradns +from retrying import retry class AuthorizationRecord(object): @@ -197,6 +198,7 @@ class AcmeHandler(object): ) return pem_certificate, pem_certificate_chain + @retry(stop_max_attempt_number=5, wait_fixed=5000) def setup_acme_client(self, authority): if not authority.options: raise InvalidAuthority("Invalid authority. Options not set") From 10b600424efbabcdbe2727e1a94d3ba15778ae71 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 08:45:32 -0700 Subject: [PATCH 334/357] refactoring searching for cert --- lemur/sources/service.py | 47 +++++++++++++++++++++++----------------- 1 file changed, 27 insertions(+), 20 deletions(-) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index d5bd7426..070e1a47 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -124,40 +124,47 @@ def sync_endpoints(source): return new, updated +def find_cert(certificate): + updated_by_hash = 0 + exists = False + + if certificate.get("search", None): + conditions = certificate.pop("search") + exists = certificate_service.get_by_attributes(conditions) + + if not exists and certificate.get("name"): + result = certificate_service.get_by_name(certificate["name"]) + if result: + exists = [result] + + if not exists and certificate.get("serial"): + exists = certificate_service.get_by_serial(certificate["serial"]) + + if not exists: + cert = parse_certificate(certificate["body"]) + matching_serials = certificate_service.get_by_serial(serial(cert)) + exists = find_matching_certificates_by_hash(cert, matching_serials) + updated_by_hash += 1 + + exists = [x for x in exists if x] + return exists, updated_by_hash + # TODO this is very slow as we don't batch update certificates def sync_certificates(source, user): - new, updated = 0, 0 + new, updated, updated_by_hash = 0, 0, 0 current_app.logger.debug("Retrieving certificates from {0}".format(source.label)) s = plugins.get(source.plugin_name) certificates = s.get_certificates(source.options) for certificate in certificates: - exists = False - - if certificate.get("search", None): - conditions = certificate.pop("search") - exists = certificate_service.get_by_attributes(conditions) - - if not exists and certificate.get("name"): - result = certificate_service.get_by_name(certificate["name"]) - if result: - exists = [result] - - if not exists and certificate.get("serial"): - exists = certificate_service.get_by_serial(certificate["serial"]) - - if not exists: - cert = parse_certificate(certificate["body"]) - matching_serials = certificate_service.get_by_serial(serial(cert)) - exists = find_matching_certificates_by_hash(cert, matching_serials) + exists, updated_by_hash = find_cert(certificate) if not certificate.get("owner"): certificate["owner"] = user.email certificate["creator"] = user - exists = [x for x in exists if x] if not exists: certificate_create(certificate, source) From d43e859c34ca61caca375485a5c0a912655d5474 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 08:46:01 -0700 Subject: [PATCH 335/357] describing the cert for each endpoint, for better cert search --- lemur/plugins/lemur_aws/plugin.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index cf6c8643..a03e92a8 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -32,7 +32,9 @@ .. moduleauthor:: Mikhail Khodorovskiy .. moduleauthor:: Harm Weites """ +from acme.errors import ClientError from flask import current_app +from lemur.extensions import sentry, metrics from lemur.plugins import lemur_aws as aws from lemur.plugins.bases import DestinationPlugin, ExportDestinationPlugin, SourcePlugin @@ -110,6 +112,8 @@ def get_elb_endpoints(account_number, region, elb_dict): listener["Listener"]["SSLCertificateId"] ), ) + endpoint["certificate"] = get_elb_certificate_by_name(certificate_name=endpoint["certificate_name"], + account_number=account_number) if listener["PolicyNames"]: policy = elb.describe_load_balancer_policies( @@ -127,6 +131,28 @@ def get_elb_endpoints(account_number, region, elb_dict): return endpoints +def get_elb_certificate_by_name(certificate_name, account_number): + # certificate name may contain path, in which case we remove it + if "/" in certificate_name: + certificate_name = certificate_name.split('/')[1] + try: + cert = iam.get_certificate(certificate_name, account_number=account_number) + return dict( + body=cert["CertificateBody"], + chain=cert.get("CertificateChain"), + name=cert["ServerCertificateMetadata"]["ServerCertificateName"], + ) + except ClientError: + current_app.logger.warning( + "get_elb_certificate_failed: Unable to get certificate for {0}".format(certificate_name)) + sentry.captureException() + metrics.send( + "get_elb_certificate_failed", "counter", 1, + metric_tags={"certificate_name": certificate_name, "account_number": account_number} + ) + return None + + def get_elb_endpoints_v2(account_number, region, elb_dict): """ Retrieves endpoint information from elbv2 response data. @@ -153,6 +179,8 @@ def get_elb_endpoints_v2(account_number, region, elb_dict): port=listener["Port"], certificate_name=iam.get_name_from_arn(certificate["CertificateArn"]), ) + endpoint["certificate"] = get_elb_certificate_by_name(certificate_name=endpoint["certificate_name"], + account_number=account_number) if listener["SslPolicy"]: policy = elb.describe_ssl_policies_v2( From f075c5af3d7e6c8d5353186770b3b7bc05453b50 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 08:48:11 -0700 Subject: [PATCH 336/357] in case no cert match via name-search, search via the cert itself (serial number, hash comparison) --- lemur/sources/service.py | 39 ++++++++++++++++++++++++++++++++------- 1 file changed, 32 insertions(+), 7 deletions(-) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 070e1a47..23f2af72 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -66,7 +66,7 @@ def sync_update_destination(certificate, source): def sync_endpoints(source): - new, updated = 0, 0 + new, updated, updated_by_hash = 0, 0, 0 current_app.logger.debug("Retrieving endpoints from {0}".format(source.label)) s = plugins.get(source.plugin_name) @@ -89,6 +89,29 @@ def sync_endpoints(source): endpoint["certificate"] = certificate_service.get_by_name(certificate_name) + # if get cert by name failed, we attempt a search via serial number and hash comparison + # and link the endpoint certificate to Lemur certificate + if not endpoint["certificate"]: + certificate_attached_to_endpoint = endpoint.pop("certificate") + if certificate_attached_to_endpoint: + lemur_matching_cert, updated_by_hash_tmp = find_cert(certificate_attached_to_endpoint) + updated_by_hash += updated_by_hash_tmp + + if lemur_matching_cert: + endpoint["certificate"] = lemur_matching_cert[0] + + if len(lemur_matching_cert) > 1: + current_app.logger.error( + "Too Many Certificates Found. Name: {0} Endpoint: {1}".format( + certificate_name, endpoint["name"] + ) + ) + metrics.send("endpoint.certificate.conflict", + "counter", 1, + metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], + "acct": s.get_option("accountNumber", source.options)}) + + # this indicates the we were not able to describe the endpoint cert if not endpoint["certificate"]: current_app.logger.error( "Certificate Not Found. Name: {0} Endpoint: {1}".format( @@ -97,7 +120,8 @@ def sync_endpoints(source): ) metrics.send("endpoint.certificate.not.found", "counter", 1, - metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], "acct": s.get_option("accountNumber", source.options)}) + metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], + "acct": s.get_option("accountNumber", source.options)}) continue policy = endpoint.pop("policy") @@ -122,7 +146,8 @@ def sync_endpoints(source): endpoint_service.update(exists.id, **endpoint) updated += 1 - return new, updated + return new, updated, updated_by_hash + def find_cert(certificate): updated_by_hash = 0 @@ -159,7 +184,7 @@ def sync_certificates(source, user): certificates = s.get_certificates(source.options) for certificate in certificates: - exists, updated_by_hash = find_cert(certificate) + exists, updated_by_hash = find_cert(certificate) if not certificate.get("owner"): certificate["owner"] = user.email @@ -179,12 +204,12 @@ def sync_certificates(source, user): certificate_update(e, source) updated += 1 - return new, updated + return new, updated, updated_by_hash def sync(source, user): - new_certs, updated_certs = sync_certificates(source, user) - new_endpoints, updated_endpoints = sync_endpoints(source) + new_certs, updated_certs, updated_certs_by_hash = sync_certificates(source, user) + new_endpoints, updated_endpoints, updated_endpoints_by_hash = sync_endpoints(source) source.last_run = arrow.utcnow() database.update(source) From 8aea257e6abb3f2d940ebf230fa81075c2425547 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 09:24:49 -0700 Subject: [PATCH 337/357] optimizing the call to describe cert to only the few certs with the naming issue --- lemur/plugins/lemur_aws/plugin.py | 48 ++++++++++++++----------------- lemur/sources/service.py | 16 +++++++++-- 2 files changed, 35 insertions(+), 29 deletions(-) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index a03e92a8..46c65c4f 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -112,8 +112,6 @@ def get_elb_endpoints(account_number, region, elb_dict): listener["Listener"]["SSLCertificateId"] ), ) - endpoint["certificate"] = get_elb_certificate_by_name(certificate_name=endpoint["certificate_name"], - account_number=account_number) if listener["PolicyNames"]: policy = elb.describe_load_balancer_policies( @@ -131,28 +129,6 @@ def get_elb_endpoints(account_number, region, elb_dict): return endpoints -def get_elb_certificate_by_name(certificate_name, account_number): - # certificate name may contain path, in which case we remove it - if "/" in certificate_name: - certificate_name = certificate_name.split('/')[1] - try: - cert = iam.get_certificate(certificate_name, account_number=account_number) - return dict( - body=cert["CertificateBody"], - chain=cert.get("CertificateChain"), - name=cert["ServerCertificateMetadata"]["ServerCertificateName"], - ) - except ClientError: - current_app.logger.warning( - "get_elb_certificate_failed: Unable to get certificate for {0}".format(certificate_name)) - sentry.captureException() - metrics.send( - "get_elb_certificate_failed", "counter", 1, - metric_tags={"certificate_name": certificate_name, "account_number": account_number} - ) - return None - - def get_elb_endpoints_v2(account_number, region, elb_dict): """ Retrieves endpoint information from elbv2 response data. @@ -179,8 +155,6 @@ def get_elb_endpoints_v2(account_number, region, elb_dict): port=listener["Port"], certificate_name=iam.get_name_from_arn(certificate["CertificateArn"]), ) - endpoint["certificate"] = get_elb_certificate_by_name(certificate_name=endpoint["certificate_name"], - account_number=account_number) if listener["SslPolicy"]: policy = elb.describe_ssl_policies_v2( @@ -299,6 +273,28 @@ class AWSSourcePlugin(SourcePlugin): account_number = self.get_option("accountNumber", options) iam.delete_cert(certificate.name, account_number=account_number) + def get_certificate_by_name(self, certificate_name, options): + account_number = self.get_option("accountNumber", options) + # certificate name may contain path, in which case we remove it + if "/" in certificate_name: + certificate_name = certificate_name.split('/')[1] + try: + cert = iam.get_certificate(certificate_name, account_number=account_number) + return dict( + body=cert["CertificateBody"], + chain=cert.get("CertificateChain"), + name=cert["ServerCertificateMetadata"]["ServerCertificateName"], + ) + except ClientError: + current_app.logger.warning( + "get_elb_certificate_failed: Unable to get certificate for {0}".format(certificate_name)) + sentry.captureException() + metrics.send( + "get_elb_certificate_failed", "counter", 1, + metric_tags={"certificate_name": certificate_name, "account_number": account_number} + ) + return None + class AWSDestinationPlugin(DestinationPlugin): title = "AWS" diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 23f2af72..498adfeb 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -15,7 +15,7 @@ from lemur.sources.models import Source from lemur.certificates.models import Certificate from lemur.certificates import service as certificate_service from lemur.endpoints import service as endpoint_service -from lemur.extensions import metrics +from lemur.extensions import metrics, sentry from lemur.destinations import service as destination_service from lemur.certificates.schemas import CertificateUploadInputSchema @@ -92,7 +92,18 @@ def sync_endpoints(source): # if get cert by name failed, we attempt a search via serial number and hash comparison # and link the endpoint certificate to Lemur certificate if not endpoint["certificate"]: - certificate_attached_to_endpoint = endpoint.pop("certificate") + certificate_attached_to_endpoint = None + try: + certificate_attached_to_endpoint = s.get_certificate_by_name(certificate_name, source.options) + except NotImplementedError: + current_app.logger.warning( + "Unable to describe server certificate for endpoints in source {0}:" + " plugin has not implemented 'get_certificate_by_name'".format( + source.label + ) + ) + sentry.captureException() + if certificate_attached_to_endpoint: lemur_matching_cert, updated_by_hash_tmp = find_cert(certificate_attached_to_endpoint) updated_by_hash += updated_by_hash_tmp @@ -111,7 +122,6 @@ def sync_endpoints(source): metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], "acct": s.get_option("accountNumber", source.options)}) - # this indicates the we were not able to describe the endpoint cert if not endpoint["certificate"]: current_app.logger.error( "Certificate Not Found. Name: {0} Endpoint: {1}".format( From 1768aad9e2ee95ed28ecfa9837b7db3597ff8551 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 10:17:58 -0700 Subject: [PATCH 338/357] capturing no such entity exception. --- lemur/plugins/lemur_aws/iam.py | 10 ++++++---- lemur/plugins/lemur_aws/plugin.py | 11 ++++++----- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index 67c35262..13590ddd 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -10,7 +10,7 @@ import botocore from retrying import retry -from lemur.extensions import metrics +from lemur.extensions import metrics, sentry from lemur.plugins.lemur_aws.sts import sts_client @@ -122,9 +122,11 @@ def get_certificate(name, **kwargs): """ client = kwargs.pop("client") metrics.send("get_certificate", "counter", 1, metric_tags={"name": name}) - return client.get_server_certificate(ServerCertificateName=name)[ - "ServerCertificate" - ] + try: + return client.get_server_certificate(ServerCertificateName=name)["ServerCertificate"] + except client.exceptions.NoSuchEntityException: + sentry.captureException() + return None @sts_client("iam") diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 46c65c4f..86cd7912 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -280,11 +280,12 @@ class AWSSourcePlugin(SourcePlugin): certificate_name = certificate_name.split('/')[1] try: cert = iam.get_certificate(certificate_name, account_number=account_number) - return dict( - body=cert["CertificateBody"], - chain=cert.get("CertificateChain"), - name=cert["ServerCertificateMetadata"]["ServerCertificateName"], - ) + if cert: + return dict( + body=cert["CertificateBody"], + chain=cert.get("CertificateChain"), + name=cert["ServerCertificateMetadata"]["ServerCertificateName"], + ) except ClientError: current_app.logger.warning( "get_elb_certificate_failed: Unable to get certificate for {0}".format(certificate_name)) From 9037f8843072ed3ab1695d4bca681d38e01f46de Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 11:02:41 -0700 Subject: [PATCH 339/357] just in case the path varies --- lemur/plugins/lemur_aws/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 86cd7912..98b01672 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -277,7 +277,7 @@ class AWSSourcePlugin(SourcePlugin): account_number = self.get_option("accountNumber", options) # certificate name may contain path, in which case we remove it if "/" in certificate_name: - certificate_name = certificate_name.split('/')[1] + certificate_name = certificate_name.split('/')[-1] try: cert = iam.get_certificate(certificate_name, account_number=account_number) if cert: From 14e13b512e70f1819f88964291744ab690417aff Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 11:03:28 -0700 Subject: [PATCH 340/357] providing a count for conflicts --- lemur/sources/service.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 498adfeb..8ba4ea0d 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -113,12 +113,12 @@ def sync_endpoints(source): if len(lemur_matching_cert) > 1: current_app.logger.error( - "Too Many Certificates Found. Name: {0} Endpoint: {1}".format( - certificate_name, endpoint["name"] + "Too Many Certificates Found{0}. Name: {1} Endpoint: {2}".format( + len(lemur_matching_cert), certificate_name, endpoint["name"] ) ) metrics.send("endpoint.certificate.conflict", - "counter", 1, + "gauge", len(lemur_matching_cert), metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], "acct": s.get_option("accountNumber", source.options)}) From 06f4aed6939f8b7081b30002f705a5be5d2cdc62 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 11:20:52 -0700 Subject: [PATCH 341/357] keeping track of certs found by hash --- lemur/sources/service.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index 8ba4ea0d..f69f70f5 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -221,6 +221,14 @@ def sync(source, user): new_certs, updated_certs, updated_certs_by_hash = sync_certificates(source, user) new_endpoints, updated_endpoints, updated_endpoints_by_hash = sync_endpoints(source) + metrics.send("sync.updated_certs_by_hash", + "gauge", updated_certs_by_hash, + metric_tags={"source": source.label}) + + metrics.send("sync.updated_endpoints_by_hash", + "gauge", updated_endpoints_by_hash, + metric_tags={"source": source.label}) + source.last_run = arrow.utcnow() database.update(source) From 0d983bd2b5f2e0ea7fec565b70bfa8f423358236 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 18 Oct 2019 15:39:36 -0700 Subject: [PATCH 342/357] missed edge case --- lemur/sources/service.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index f69f70f5..f4783313 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -78,7 +78,7 @@ def sync_endpoints(source): source.label ) ) - return new, updated + return new, updated, updated_by_hash for endpoint in endpoints: exists = endpoint_service.get_by_dnsname_and_port( From f803fab41300ba2acba3634b5d5795072112d7b0 Mon Sep 17 00:00:00 2001 From: Jay Zarfoss Date: Wed, 6 Nov 2019 10:14:49 -0800 Subject: [PATCH 343/357] add plugin to send atlas metric via redis --- lemur/plugins/lemur_atlas_redis/__init__.py | 4 + lemur/plugins/lemur_atlas_redis/plugin.py | 97 +++++++++++++++++++++ setup.py | 1 + 3 files changed, 102 insertions(+) create mode 100644 lemur/plugins/lemur_atlas_redis/__init__.py create mode 100644 lemur/plugins/lemur_atlas_redis/plugin.py diff --git a/lemur/plugins/lemur_atlas_redis/__init__.py b/lemur/plugins/lemur_atlas_redis/__init__.py new file mode 100644 index 00000000..f8afd7e3 --- /dev/null +++ b/lemur/plugins/lemur_atlas_redis/__init__.py @@ -0,0 +1,4 @@ +try: + VERSION = __import__("pkg_resources").get_distribution(__name__).version +except Exception as e: + VERSION = "unknown" diff --git a/lemur/plugins/lemur_atlas_redis/plugin.py b/lemur/plugins/lemur_atlas_redis/plugin.py new file mode 100644 index 00000000..9a36743f --- /dev/null +++ b/lemur/plugins/lemur_atlas_redis/plugin.py @@ -0,0 +1,97 @@ +""" +.. module: lemur.plugins.lemur_atlas_redis.plugin + :platform: Unix + :copyright: (c) 2018 by Netflix Inc., see AUTHORS for more + :license: Apache, see LICENSE for more details. + +.. moduleauthor:: Jay Zarfoss +""" +from time import time +from redis import Redis +import json +from datetime import datetime + +from flask import current_app +from lemur.plugins import lemur_atlas as atlas +from lemur.plugins.bases.metric import MetricPlugin + + +def millis_since_epoch(): + """ + current time since epoch in milliseconds + """ + epoch = datetime.utcfromtimestamp(0) + delta = datetime.now() - epoch + return int(delta.total_seconds() * 1000.0) + + +class AtlasMetricRedisPlugin(MetricPlugin): + title = "AtlasRedis" + slug = "atlas-metric-Redius" + description = "Adds support for sending key metrics to Atlas via local Redis" + version = atlas.VERSION + + author = "Jay Zarfoss" + author_url = "https://github.com/netflix/lemur" + + options = [ + { + "name": "redis_host", + "type": "str", + "required": False, + "help_message": "If no host is provided localhost is assumed", + "default": "localhost", + }, + {"name": "redis_port", "type": "int", "required": False, "default": 28527}, + ] + + metric_data = {} + redis_host = None + redis_port = None + + def submit( + self, metric_name, metric_type, metric_value, metric_tags=None, options=None + ): + if not options: + options = self.options + + valid_types = ["COUNTER", "GAUGE", "TIMER"] + if metric_type.upper() not in valid_types: + raise Exception( + "Invalid Metric Type for Atlas: '{metric}' choose from: {options}".format( + metric=metric_type, options=",".join(valid_types) + ) + ) + + if metric_tags: + if not isinstance(metric_tags, dict): + raise Exception( + "Invalid Metric Tags for Atlas: Tags must be in dict format" + ) + + self.metric_data["timestamp"] = millis_since_epoch() + self.metric_data["type"] = metric_type.upper() + self.metric_data["name"] = str(metric_name) + self.metric_data["tags"] = metric_tags + + if ( + metric_value == "NaN" + or isinstance(metric_value, int) + or isinstance(metric_value, float) + ): + self.metric_data["value"] = metric_value + else: + raise Exception("Invalid Metric Value for Atlas: Metric must be a number") + + self.redis_host = self.get_option("redis_host", options) + self.redis_port = self.get_option("redis_port", options) + + try: + r = Redis(host=self.redis_host, port=self.redis_port, socket_timeout=0.1) + r.rpush('atlas-agent', json.dumps(self.metric_data)) + except Exception: + current_app.logger.warning( + "AtlasMetricsRedis: could not post atlas metrics to AtlasRedis {host}:{port}".format( + host=self.redis_host, port=self.redis_port + ) + ) diff --git a/setup.py b/setup.py index a01c110f..1c61e9f9 100644 --- a/setup.py +++ b/setup.py @@ -147,6 +147,7 @@ setup( 'java_keystore_export = lemur.plugins.lemur_jks.plugin:JavaKeystoreExportPlugin', 'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin', 'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin', + 'atlas_metric_redis = lemur.plugins.lemur_atlas.plugin:AtlasMetricRedisPlugin', 'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin', 'cryptography_issuer = lemur.plugins.lemur_cryptography.plugin:CryptographyIssuerPlugin', 'cfssl_issuer = lemur.plugins.lemur_cfssl.plugin:CfsslIssuerPlugin', From 113c9dd65744a4783e4ec2498797f2fe5e341061 Mon Sep 17 00:00:00 2001 From: Jay Zarfoss Date: Wed, 6 Nov 2019 10:42:59 -0800 Subject: [PATCH 344/357] atlas redis plugin typo cleanup and better exception handling --- lemur/plugins/lemur_atlas_redis/plugin.py | 10 +++++----- setup.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/lemur/plugins/lemur_atlas_redis/plugin.py b/lemur/plugins/lemur_atlas_redis/plugin.py index 9a36743f..e69ae672 100644 --- a/lemur/plugins/lemur_atlas_redis/plugin.py +++ b/lemur/plugins/lemur_atlas_redis/plugin.py @@ -6,7 +6,7 @@ .. moduleauthor:: Jay Zarfoss """ -from time import time + from redis import Redis import json from datetime import datetime @@ -27,7 +27,7 @@ def millis_since_epoch(): class AtlasMetricRedisPlugin(MetricPlugin): title = "AtlasRedis" - slug = "atlas-metric-Redius" + slug = "atlas-metric-redis" description = "Adds support for sending key metrics to Atlas via local Redis" version = atlas.VERSION @@ -89,9 +89,9 @@ class AtlasMetricRedisPlugin(MetricPlugin): try: r = Redis(host=self.redis_host, port=self.redis_port, socket_timeout=0.1) r.rpush('atlas-agent', json.dumps(self.metric_data)) - except Exception: + except Exception as e: current_app.logger.warning( - "AtlasMetricsRedis: could not post atlas metrics to AtlasRedis {host}:{port}".format( - host=self.redis_host, port=self.redis_port + "AtlasMetricsRedis: exception [{exception}] could not post atlas metrics to AtlasRedis [{host}:{port}], metric [{metricdata}]".format( + exception=e, host=self.redis_host, port=self.redis_port, metricdata=json.dumps(self.metric_data) ) ) diff --git a/setup.py b/setup.py index 1c61e9f9..90c0b2f8 100644 --- a/setup.py +++ b/setup.py @@ -147,7 +147,7 @@ setup( 'java_keystore_export = lemur.plugins.lemur_jks.plugin:JavaKeystoreExportPlugin', 'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin', 'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin', - 'atlas_metric_redis = lemur.plugins.lemur_atlas.plugin:AtlasMetricRedisPlugin', + 'atlas_metric_redis = lemur.plugins.lemur_atlas_redis.plugin:AtlasMetricRedisPlugin', 'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin', 'cryptography_issuer = lemur.plugins.lemur_cryptography.plugin:CryptographyIssuerPlugin', 'cfssl_issuer = lemur.plugins.lemur_cfssl.plugin:CfsslIssuerPlugin', From 00a0a27826f81ea7ca5859191dfaa8b7ef35efef Mon Sep 17 00:00:00 2001 From: Jay Zarfoss Date: Wed, 20 Nov 2019 09:44:31 -0800 Subject: [PATCH 345/357] used fixedName variable to transport db lookup optimization --- lemur/certificates/service.py | 3 +++ lemur/static/app/angular/certificates/view/view.js | 2 +- lemur/static/app/angular/certificates/view/view.tpl.html | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index 1a0cdc5a..0e91b563 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -393,6 +393,9 @@ def render(args): Certificate.cn.ilike(term), ) ) + elif "fixedName" in terms: + # only what matches the fixed name directly if a fixedname is provided + query = query.filter(Certificate.name == terms[1]) else: query = database.filter(query, Certificate, terms) diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index e4ae0314..0ee0d2c2 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -11,7 +11,7 @@ angular.module('lemur') controller: 'CertificatesViewController' }) .state('certificate', { - url: '/certificates/:name', + url: '/certificates/:fixedName', // use "fixedName" if in URL to indicate 'like' query can be avoided templateUrl: '/angular/certificates/view/view.tpl.html', controller: 'CertificatesViewController' }); diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index 9d5c7772..3f952aa2 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -52,7 +52,7 @@
- Permalink + Permalink From f188aea2c22c4342cb6d2610586be67590b43f86 Mon Sep 17 00:00:00 2001 From: Niels Bischof Date: Mon, 2 Dec 2019 06:22:09 -0500 Subject: [PATCH 346/357] type on quickstart/index.rst --- docs/quickstart/index.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/quickstart/index.rst b/docs/quickstart/index.rst index 280bb612..01a5c7ca 100644 --- a/docs/quickstart/index.rst +++ b/docs/quickstart/index.rst @@ -31,7 +31,7 @@ If installing Lemur on a bare Ubuntu OS you will need to grab the following pack .. note:: PostgreSQL is only required if your database is going to be on the same host as the webserver. npm is needed if you're installing Lemur from the source (e.g., from git). -.. note:: Installing node from a package manager may creat the nodejs bin at /usr/bin/nodejs instead of /usr/bin/node If that is the case run the following +.. note:: Installing node from a package manager may create the nodejs bin at /usr/bin/nodejs instead of /usr/bin/node If that is the case run the following sudo ln -s /user/bin/nodejs /usr/bin/node Now, install Python ``virtualenv`` package: From 189e8b2725792e6f58c9d8399ebb0c50942b5d69 Mon Sep 17 00:00:00 2001 From: Ilya Labun Date: Fri, 13 Dec 2019 14:33:39 +0100 Subject: [PATCH 347/357] Eliminate subqueries when showing certificates list --- lemur/certificates/schemas.py | 25 +++++++++++++++++++ lemur/certificates/views.py | 3 ++- lemur/common/schema.py | 7 +++++- .../certificates/certificate/certificate.js | 10 +++++++- .../app/angular/certificates/view/view.js | 2 ++ .../angular/certificates/view/view.tpl.html | 2 +- 6 files changed, 45 insertions(+), 4 deletions(-) diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index 7f3c2ac0..c987e5fa 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -6,6 +6,8 @@ .. moduleauthor:: Kevin Glisson """ from flask import current_app +from flask_restful import inputs +from flask_restful.reqparse import RequestParser from marshmallow import fields, validate, validates_schema, post_load, pre_load from marshmallow.exceptions import ValidationError @@ -285,6 +287,16 @@ class CertificateOutputSchema(LemurOutputSchema): rotation_policy = fields.Nested(RotationPolicyNestedOutputSchema) +class CertificateShortOutputSchema(LemurOutputSchema): + id = fields.Integer() + name = fields.String() + owner = fields.Email() + notify = fields.Boolean() + authority = fields.Nested(AuthorityNestedOutputSchema) + issuer = fields.String() + cn = fields.String() + + class CertificateUploadInputSchema(CertificateCreationSchema): name = fields.String() authority = fields.Nested(AssociatedAuthoritySchema, required=False) @@ -363,9 +375,22 @@ class CertificateRevokeSchema(LemurInputSchema): comments = fields.String() +certificates_list_request_parser = RequestParser() +certificates_list_request_parser.add_argument("short", type=inputs.boolean, default=False, location="args") + + +def certificates_list_output_schema_factory(): + args = certificates_list_request_parser.parse_args() + if args["short"]: + return certificates_short_output_schema + else: + return certificates_output_schema + + certificate_input_schema = CertificateInputSchema() certificate_output_schema = CertificateOutputSchema() certificates_output_schema = CertificateOutputSchema(many=True) +certificates_short_output_schema = CertificateShortOutputSchema(many=True) certificate_upload_input_schema = CertificateUploadInputSchema() certificate_export_input_schema = CertificateExportInputSchema() certificate_edit_input_schema = CertificateEditInputSchema() diff --git a/lemur/certificates/views.py b/lemur/certificates/views.py index 1a003e78..51f7f615 100644 --- a/lemur/certificates/views.py +++ b/lemur/certificates/views.py @@ -27,6 +27,7 @@ from lemur.certificates.schemas import ( certificates_output_schema, certificate_export_input_schema, certificate_edit_input_schema, + certificates_list_output_schema_factory, ) from lemur.roles import service as role_service @@ -250,7 +251,7 @@ class CertificatesList(AuthenticatedResource): self.reqparse = reqparse.RequestParser() super(CertificatesList, self).__init__() - @validate_schema(None, certificates_output_schema) + @validate_schema(None, certificates_list_output_schema_factory) def get(self): """ .. http:get:: /certificates diff --git a/lemur/common/schema.py b/lemur/common/schema.py index bfa0a091..ee1db464 100644 --- a/lemur/common/schema.py +++ b/lemur/common/schema.py @@ -169,7 +169,12 @@ def validate_schema(input_schema, output_schema): if not resp: return dict(message="No data found"), 404 - return unwrap_pagination(resp, output_schema), 200 + if callable(output_schema): + output_schema_to_use = output_schema() + else: + output_schema_to_use = output_schema + + return unwrap_pagination(resp, output_schema_to_use), 200 return decorated_function diff --git a/lemur/static/app/angular/certificates/certificate/certificate.js b/lemur/static/app/angular/certificates/certificate/certificate.js index 273fc9d5..21f61f22 100644 --- a/lemur/static/app/angular/certificates/certificate/certificate.js +++ b/lemur/static/app/angular/certificates/certificate/certificate.js @@ -371,4 +371,12 @@ angular.module('lemur') }); }); }; -}); +}) +.controller('CertificateInfoController', function ($scope, CertificateApi) { + $scope.fetchFullCertificate = function (certId) { + CertificateApi.get(certId).then(function (certificate) { + $scope.certificate = certificate; + }); + }; +}) +; diff --git a/lemur/static/app/angular/certificates/view/view.js b/lemur/static/app/angular/certificates/view/view.js index 0ee0d2c2..72a31618 100644 --- a/lemur/static/app/angular/certificates/view/view.js +++ b/lemur/static/app/angular/certificates/view/view.js @@ -28,6 +28,7 @@ angular.module('lemur') sorting: { id: 'desc' // initial sorting }, + short: true, filter: $scope.filter }, { total: 0, // length of data @@ -54,6 +55,7 @@ angular.module('lemur') sorting: { id: 'desc' // initial sorting }, + short: true, filter: $scope.filter }, { getData: function ($defer, params) { diff --git a/lemur/static/app/angular/certificates/view/view.tpl.html b/lemur/static/app/angular/certificates/view/view.tpl.html index 3f952aa2..7b0919f8 100644 --- a/lemur/static/app/angular/certificates/view/view.tpl.html +++ b/lemur/static/app/angular/certificates/view/view.tpl.html @@ -71,7 +71,7 @@
- + From 9fb4be12737989c3c0983838f3ab4057e479f0a5 Mon Sep 17 00:00:00 2001 From: pmelse Date: Fri, 27 Dec 2019 13:25:03 -0500 Subject: [PATCH 348/357] remove trailing whitespace --- lemur/plugins/lemur_sftp/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_sftp/plugin.py b/lemur/plugins/lemur_sftp/plugin.py index 9cc8140e..66784048 100644 --- a/lemur/plugins/lemur_sftp/plugin.py +++ b/lemur/plugins/lemur_sftp/plugin.py @@ -174,7 +174,7 @@ class SFTPDestinationPlugin(DestinationPlugin): with sftp.open(dst_path_cn + "/" + filename, "w") as f: f.write(data) except (PermissionError) as permerror: - if permerror.errno == 13: + if permerror.errno == 13: current_app.logger.debug( "Uploading {0} to {1} returned Permission Denied Error, making file writable and retrying".format(filename, dst_path_cn) ) From 1ccc15859fdc8b263aee5fd4315f79a1c601609e Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Fri, 3 Jan 2020 14:14:01 -0800 Subject: [PATCH 349/357] updating requirements. removing this pin, since the issue is now resolved. kombu<4.6.0 # Bug with inspecting active tasks: https://github.com/celery/kombu/issues/1051 --- requirements-dev.txt | 27 +++++++------- requirements-docs.txt | 74 +++++++++++++++++++------------------- requirements-tests.txt | 81 +++++++++++++++++++++--------------------- requirements.in | 1 - requirements.txt | 64 +++++++++++++++++---------------- 5 files changed, 125 insertions(+), 122 deletions(-) diff --git a/requirements-dev.txt b/requirements-dev.txt index 4e940357..d1423888 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -6,35 +6,36 @@ # aspy.yaml==1.3.0 # via pre-commit bleach==3.1.0 # via readme-renderer -certifi==2019.9.11 # via requests +certifi==2019.11.28 # via requests cfgv==2.0.1 # via pre-commit chardet==3.0.4 # via requests docutils==0.15.2 # via readme-renderer flake8==3.5.0 -identify==1.4.7 # via pre-commit +identify==1.4.9 # via pre-commit idna==2.8 # via requests -importlib-metadata==0.23 # via pre-commit +importlib-metadata==1.3.0 # via keyring, pre-commit, twine invoke==1.3.0 +keyring==21.0.0 # via twine mccabe==0.6.1 # via flake8 -more-itertools==7.2.0 # via zipp +more-itertools==8.0.2 # via zipp nodeenv==1.3.3 pkginfo==1.5.0.1 # via twine -pre-commit==1.18.3 +pre-commit==1.21.0 pycodestyle==2.3.1 # via flake8 pyflakes==1.6.0 # via flake8 -pygments==2.4.2 # via readme-renderer -pyyaml==5.1.2 +pygments==2.5.2 # via readme-renderer +pyyaml==5.2 readme-renderer==24.0 # via twine requests-toolbelt==0.9.1 # via twine requests==2.22.0 # via requests-toolbelt, twine -six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer +six==1.13.0 # via bleach, cfgv, pre-commit, readme-renderer toml==0.10.0 # via pre-commit -tqdm==4.36.1 # via twine -twine==2.0.0 -urllib3==1.25.6 # via requests -virtualenv==16.7.5 # via pre-commit +tqdm==4.41.1 # via twine +twine==3.1.1 +urllib3==1.25.7 # via requests +virtualenv==16.7.9 # via pre-commit webencodings==0.5.1 # via bleach zipp==0.6.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.4.0 # via twine +# setuptools diff --git a/requirements-docs.txt b/requirements-docs.txt index 260c8608..893965ca 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,29 +4,28 @@ # # pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # -acme==0.39.0 +acme==1.0.0 alabaster==0.7.12 # via sphinx alembic-autogenerate-enums==0.0.2 -alembic==1.2.1 +alembic==1.3.2 amqp==2.5.2 aniso8601==8.0.0 -arrow==0.15.2 -asn1crypto==1.1.0 +arrow==0.15.5 asyncpool==1.0 -babel==2.7.0 # via sphinx +babel==2.8.0 # via sphinx bcrypt==3.1.7 billiard==3.6.1.0 blinker==1.4 -boto3==1.9.250 -botocore==1.12.250 -celery[redis]==4.3.0 -certifi==2019.9.11 +boto3==1.10.46 +botocore==1.13.46 +celery[redis]==4.4.0 +certifi==2019.11.28 certsrv==2.1.1 -cffi==1.13.0 +cffi==1.13.2 chardet==3.0.4 click==7.0 -cloudflare==2.3.0 -cryptography==2.7 +cloudflare==2.3.1 +cryptography==2.8 dnspython3==1.15.0 dnspython==1.15.0 docutils==0.15.2 @@ -41,57 +40,59 @@ flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.1 flask==1.1.1 -future==0.18.0 -gunicorn==19.9.0 -hvac==0.9.5 +future==0.18.2 +gunicorn==20.0.4 +hvac==0.9.6 idna==2.8 -imagesize==1.1.0 # via sphinx +imagesize==1.2.0 # via sphinx +importlib-metadata==1.3.0 inflection==0.3.1 itsdangerous==1.1.0 -javaobj-py3==0.3.0 +javaobj-py3==0.4.0.1 jinja2==2.10.3 jmespath==0.9.4 josepy==1.2.0 jsonlines==1.2.0 -kombu==4.5.0 +kombu==4.6.7 lockfile==0.12.2 logmatic-python==0.1.7 mako==1.1.0 markupsafe==1.1.1 -marshmallow-sqlalchemy==0.19.0 +marshmallow-sqlalchemy==0.21.0 marshmallow==2.20.4 mock==3.0.5 +more-itertools==8.0.2 ndg-httpsclient==0.5.1 packaging==19.2 # via sphinx -paramiko==2.6.0 -pem==19.2.0 -psycopg2==2.8.3 +paramiko==2.7.1 +pem==19.3.0 +psycopg2==2.8.4 pyasn1-modules==0.2.7 -pyasn1==0.4.7 +pyasn1==0.4.8 pycparser==2.19 -pycryptodomex==3.9.0 -pygments==2.4.2 # via sphinx +pycryptodomex==3.9.4 +pygments==2.5.2 # via sphinx pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 -pyopenssl==19.0.0 -pyparsing==2.4.2 # via packaging +pyopenssl==19.1.0 +pyparsing==2.4.6 # via packaging pyrfc3339==1.1 -python-dateutil==2.8.0 +python-dateutil==2.8.1 python-editor==1.0.4 python-json-logger==0.1.11 pytz==2019.3 -pyyaml==5.1.2 +pyyaml==5.2 raven[flask]==6.10.0 redis==3.3.11 requests-toolbelt==0.9.1 requests[security]==2.22.0 retrying==1.3.3 s3transfer==0.2.1 -six==1.12.0 +six==1.13.0 snowballstemmer==2.0.0 # via sphinx sphinx-rtd-theme==0.4.3 -sphinx==2.2.0 +sphinx==2.3.1 sphinxcontrib-applehelp==1.0.1 # via sphinx sphinxcontrib-devhelp==1.0.1 # via sphinx sphinxcontrib-htmlhelp==1.0.2 # via sphinx @@ -99,14 +100,15 @@ sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-qthelp==1.0.2 # via sphinx sphinxcontrib-serializinghtml==1.1.3 # via sphinx -sqlalchemy-utils==0.34.2 -sqlalchemy==1.3.10 -tabulate==0.8.5 +sqlalchemy-utils==0.36.1 +sqlalchemy==1.3.12 +tabulate==0.8.6 twofish==0.3.0 -urllib3==1.25.6 +urllib3==1.25.7 vine==1.3.0 werkzeug==0.16.0 xmltodict==0.12.0 +zipp==0.6.0 # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.4.0 # via acme, josepy, sphinx +# setuptools diff --git a/requirements-tests.txt b/requirements-tests.txt index e6dc53c5..293bd350 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -5,37 +5,34 @@ # pip-compile --no-index --output-file=requirements-tests.txt requirements-tests.in # appdirs==1.4.3 # via black -asn1crypto==1.1.0 # via cryptography -atomicwrites==1.3.0 # via pytest attrs==19.3.0 # via black, jsonschema, pytest -aws-sam-translator==1.15.1 # via cfn-lint -aws-xray-sdk==2.4.2 # via moto +aws-sam-translator==1.19.1 # via cfn-lint +aws-xray-sdk==2.4.3 # via moto bandit==1.6.2 -black==19.3b0 -boto3==1.9.250 # via aws-sam-translator, moto +black==19.10b0 +boto3==1.10.46 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.12.250 # via aws-xray-sdk, boto3, moto, s3transfer -certifi==2019.9.11 # via requests -cffi==1.13.0 # via cryptography -cfn-lint==0.24.4 # via moto +botocore==1.13.46 # via aws-xray-sdk, boto3, moto, s3transfer +certifi==2019.11.28 # via requests +cffi==1.13.2 # via cryptography +cfn-lint==0.26.2 # via moto chardet==3.0.4 # via requests click==7.0 # via black, flask -coverage==4.5.4 -cryptography==2.7 # via moto, sshpubkeys -datetime==4.3 # via moto +coverage==5.0.1 +cryptography==2.8 # via moto, sshpubkeys docker==4.1.0 # via moto docutils==0.15.2 # via botocore -ecdsa==0.13.3 # via python-jose, sshpubkeys +ecdsa==0.15 # via python-jose, sshpubkeys factory-boy==2.12.0 -faker==2.0.3 -fakeredis==1.0.5 +faker==3.0.0 +fakeredis==1.1.0 flask==1.1.1 # via pytest-flask freezegun==0.3.12 -future==0.18.0 # via aws-xray-sdk, python-jose +future==0.18.2 # via aws-xray-sdk gitdb2==2.0.6 # via gitpython -gitpython==3.0.3 # via bandit +gitpython==3.0.5 # via bandit idna==2.8 # via moto, requests -importlib-metadata==0.23 # via jsonschema, pluggy, pytest +importlib-metadata==1.3.0 # via jsonschema, pluggy, pytest itsdangerous==1.1.0 # via flask jinja2==2.10.3 # via flask, moto jmespath==0.9.4 # via boto3, botocore @@ -43,49 +40,51 @@ jsondiff==1.1.2 # via moto jsonpatch==1.24 # via cfn-lint jsonpickle==1.2 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch -jsonschema==3.1.1 # via aws-sam-translator, cfn-lint +jsonschema==3.2.0 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 mock==3.0.5 # via moto -more-itertools==7.2.0 # via pytest, zipp -moto==1.3.13 +more-itertools==8.0.2 # via pytest, zipp +moto==1.3.14 nose==1.3.7 packaging==19.2 # via pytest -pbr==5.4.3 # via stevedore -pluggy==0.13.0 # via pytest -py==1.8.0 # via pytest -pyasn1==0.4.7 # via rsa +pathspec==0.7.0 # via black +pbr==5.4.4 # via stevedore +pluggy==0.13.1 # via pytest +py==1.8.1 # via pytest +pyasn1==0.4.8 # via python-jose, rsa pycparser==2.19 # via cffi pyflakes==2.1.1 -pyparsing==2.4.2 # via packaging -pyrsistent==0.15.4 # via jsonschema +pyparsing==2.4.6 # via packaging +pyrsistent==0.15.6 # via jsonschema pytest-flask==0.15.0 -pytest-mock==1.11.1 -pytest==5.2.1 -python-dateutil==2.8.0 # via botocore, faker, freezegun, moto -python-jose==3.0.1 # via moto -pytz==2019.3 # via datetime, moto -pyyaml==5.1.2 +pytest-mock==1.13.0 +pytest==5.3.2 +python-dateutil==2.8.1 # via botocore, faker, freezegun, moto +python-jose==3.1.0 # via moto +pytz==2019.3 # via moto +pyyaml==5.2 redis==3.3.11 # via fakeredis +regex==2019.12.20 # via black requests-mock==1.7.0 requests==2.22.0 # via docker, moto, requests-mock, responses -responses==0.10.6 # via moto +responses==0.10.9 # via moto rsa==4.0 # via python-jose s3transfer==0.2.1 # via boto3 -six==1.12.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, faker, fakeredis, freezegun, jsonschema, mock, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +six==1.13.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, ecdsa, faker, fakeredis, freezegun, jsonschema, mock, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client smmap2==2.0.5 # via gitdb2 sortedcontainers==2.1.0 # via fakeredis sshpubkeys==3.1.0 # via moto stevedore==1.31.0 # via bandit text-unidecode==1.3 # via faker toml==0.10.0 # via black -urllib3==1.25.6 # via botocore, requests -wcwidth==0.1.7 # via pytest -websocket-client==0.56.0 # via docker +typed-ast==1.4.0 # via black +urllib3==1.25.7 # via botocore, requests +wcwidth==0.1.8 # via pytest +websocket-client==0.57.0 # via docker werkzeug==0.16.0 # via flask, moto, pytest-flask wrapt==1.11.2 # via aws-xray-sdk xmltodict==0.12.0 # via moto zipp==0.6.0 # via importlib-metadata -zope.interface==4.6.0 # via datetime # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.4.0 # via cfn-lint, jsonschema, zope.interface +# setuptools diff --git a/requirements.in b/requirements.in index c7c79137..ed2093c9 100644 --- a/requirements.in +++ b/requirements.in @@ -28,7 +28,6 @@ gunicorn hvac # required for the vault destination plugin inflection jinja2 -kombu<4.6.0 # Bug with inspecting active tasks: https://github.com/celery/kombu/issues/1051 lockfile logmatic-python marshmallow-sqlalchemy diff --git a/requirements.txt b/requirements.txt index 305fe7e1..639c9377 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,27 +4,26 @@ # # pip-compile --no-index --output-file=requirements.txt requirements.in # -acme==0.39.0 +acme==1.0.0 alembic-autogenerate-enums==0.0.2 -alembic==1.2.1 # via flask-migrate +alembic==1.3.2 # via flask-migrate amqp==2.5.2 # via kombu aniso8601==8.0.0 # via flask-restful -arrow==0.15.2 -asn1crypto==1.1.0 # via cryptography +arrow==0.15.5 asyncpool==1.0 bcrypt==3.1.7 # via flask-bcrypt, paramiko billiard==3.6.1.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.9.250 -botocore==1.12.250 -celery[redis]==4.3.0 -certifi==2019.9.11 +boto3==1.10.46 +botocore==1.13.46 +celery[redis]==4.4.0 +certifi==2019.11.28 certsrv==2.1.1 -cffi==1.13.0 # via bcrypt, cryptography, pynacl +cffi==1.13.2 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests click==7.0 # via flask -cloudflare==2.3.0 -cryptography==2.7 +cloudflare==2.3.1 +cryptography==2.8 dnspython3==1.15.0 dnspython==1.15.0 # via dnspython3 docutils==0.15.2 # via botocore @@ -39,59 +38,62 @@ flask-restful==0.3.7 flask-script==2.0.6 flask-sqlalchemy==2.4.1 flask==1.1.1 -future==0.18.0 -gunicorn==19.9.0 -hvac==0.9.5 +future==0.18.2 +gunicorn==20.0.4 +hvac==0.9.6 idna==2.8 # via requests +importlib-metadata==1.3.0 # via kombu inflection==0.3.1 itsdangerous==1.1.0 # via flask -javaobj-py3==0.3.0 # via pyjks +javaobj-py3==0.4.0.1 # via pyjks jinja2==2.10.3 jmespath==0.9.4 # via boto3, botocore josepy==1.2.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.5.0 +kombu==4.6.7 # via celery lockfile==0.12.2 logmatic-python==0.1.7 mako==1.1.0 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.19.0 +marshmallow-sqlalchemy==0.21.0 marshmallow==2.20.4 mock==3.0.5 # via acme +more-itertools==8.0.2 # via zipp ndg-httpsclient==0.5.1 -paramiko==2.6.0 -pem==19.2.0 -psycopg2==2.8.3 +paramiko==2.7.1 +pem==19.3.0 +psycopg2==2.8.4 pyasn1-modules==0.2.7 # via pyjks, python-ldap -pyasn1==0.4.7 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap +pyasn1==0.4.8 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap pycparser==2.19 # via cffi -pycryptodomex==3.9.0 # via pyjks +pycryptodomex==3.9.4 # via pyjks pyjks==19.0.0 pyjwt==1.7.1 pynacl==1.3.0 # via paramiko -pyopenssl==19.0.0 +pyopenssl==19.1.0 pyrfc3339==1.1 # via acme -python-dateutil==2.8.0 # via alembic, arrow, botocore +python-dateutil==2.8.1 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-json-logger==0.1.11 # via logmatic-python python-ldap==3.2.0 pytz==2019.3 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.1.2 +pyyaml==5.2 raven[flask]==6.10.0 redis==3.3.11 requests-toolbelt==0.9.1 # via acme requests[security]==2.22.0 retrying==1.3.3 s3transfer==0.2.1 # via boto3 -six==1.12.0 -sqlalchemy-utils==0.34.2 -sqlalchemy==1.3.10 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils -tabulate==0.8.5 +six==1.13.0 +sqlalchemy-utils==0.36.1 +sqlalchemy==1.3.12 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +tabulate==0.8.6 twofish==0.3.0 # via pyjks -urllib3==1.25.6 # via botocore, requests +urllib3==1.25.7 # via botocore, requests vine==1.3.0 # via amqp, celery werkzeug==0.16.0 # via flask xmltodict==0.12.0 +zipp==0.6.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: -# setuptools==41.4.0 # via acme, josepy +# setuptools From 1537d591a8353f2bc14e9a652a2b557eba59e323 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 8 Jan 2020 14:42:16 -0800 Subject: [PATCH 350/357] Improved messaging to point out to the Auto Rotate option for certificate issuance and renewal. --- lemur/plugins/lemur_email/templates/expiration.html | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_email/templates/expiration.html b/lemur/plugins/lemur_email/templates/expiration.html index 3c500c38..f5185acd 100644 --- a/lemur/plugins/lemur_email/templates/expiration.html +++ b/lemur/plugins/lemur_email/templates/expiration.html @@ -106,7 +106,13 @@ - If the above certificates are still in use. You should re-issue and deploy new certificates as soon as possible. + Your action is required if the above certificates are still needed for your service. +

+ If your endpoints are still in use, you can access your certificate in Lemur, and enable Auto Rotate under the Action->Edit menu. + Lemur will take care of re-issuance and rotation of the certificate on the listed endpoints within one day. +

+ If your certificate is deployed with your service, you should re-issue and manually deploy a new certificate as soon as possible. + From 8be8c95b170b93da3316a20798d0993b21ca655b Mon Sep 17 00:00:00 2001 From: jenkins-x-bot Date: Thu, 9 Jan 2020 15:16:19 +0200 Subject: [PATCH 351/357] handled cfssl-key type error --- lemur/plugins/lemur_cfssl/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_cfssl/plugin.py b/lemur/plugins/lemur_cfssl/plugin.py index ae16d168..02f3159d 100644 --- a/lemur/plugins/lemur_cfssl/plugin.py +++ b/lemur/plugins/lemur_cfssl/plugin.py @@ -56,7 +56,7 @@ class CfsslIssuerPlugin(IssuerPlugin): try: hex_key = current_app.config.get("CFSSL_KEY") key = bytes.fromhex(hex_key) - except (ValueError, NameError): + except (ValueError, NameError, TypeError): # unable to find CFSSL_KEY in config, continue using normal sign method pass else: From 78f9c490ddedfd3b9a1efd8dfe9c8c9b92f752fc Mon Sep 17 00:00:00 2001 From: Ilya Labun Date: Mon, 13 Jan 2020 15:26:35 +0100 Subject: [PATCH 352/357] Fix Dockercompose for tests --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index b9439be7..fc83a034 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,7 +3,7 @@ RUN apt-get update RUN apt-get install -y make software-properties-common curl RUN curl -sL https://deb.nodesource.com/setup_7.x | bash - RUN apt-get update -RUN apt-get install -y nodejs libldap2-dev libsasl2-dev libldap2-dev libssl-dev +RUN apt-get install -y npm libldap2-dev libsasl2-dev libldap2-dev libssl-dev RUN pip install -U setuptools RUN pip install coveralls bandit WORKDIR /app From 58d8a145c30e117218edef6363f1c48d84004dce Mon Sep 17 00:00:00 2001 From: pmelse Date: Mon, 13 Jan 2020 22:13:30 -0500 Subject: [PATCH 353/357] update for #2857 workaround update for #2857 workaround --- docs/quickstart/index.rst | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/quickstart/index.rst b/docs/quickstart/index.rst index 01a5c7ca..82bfc357 100644 --- a/docs/quickstart/index.rst +++ b/docs/quickstart/index.rst @@ -180,6 +180,13 @@ Lemur provides a helpful command that will initialize your database for you. It In addition to creating a new user, Lemur also creates a few default email notifications. These notifications are based on a few configuration options such as ``LEMUR_SECURITY_TEAM_EMAIL``. They basically guarantee that every certificate within Lemur will send one expiration notification to the security team. +Your database installation requires the pg_trgm extension. If you do not have this installed already, you can allow the script to install this for you by adding the SUPERUSER permission to the lemur database user. + +.. code-block:: bash + sudo -u postgres -i + psql + postgres=# ALTER USER lemur WITH SUPERUSER + Additional notifications can be created through the UI or API. See :ref:`Creating Notifications ` and :ref:`Command Line Interface ` for details. **Make note of the password used as this will be used during first login to the Lemur UI.** @@ -189,10 +196,16 @@ Additional notifications can be created through the UI or API. See :ref:`Creati cd /www/lemur/lemur lemur init +.. note:: If you added the SUPERUSER permission to the lemur database user above, it is recommended you revoke that permission now. + +.. code-block:: bash + sudo -u postgres -i + psql + postgres=# ALTER USER lemur WITH NOSUPERUSER + .. note:: It is recommended that once the ``lemur`` user is created that you create individual users for every day access. There is currently no way for a user to self enroll for Lemur access, they must have an administrator create an account for them or be enrolled automatically through SSO. This can be done through the CLI or UI. See :ref:`Creating Users ` and :ref:`Command Line Interface ` for details. - Set Up a Reverse Proxy --------------------- From 1ed6ae539deea5e6d3513c30d982c52f5d6207db Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Wed, 15 Jan 2020 16:19:48 -0800 Subject: [PATCH 354/357] # possibility to default to a SIGNING_ALGORITHM for a given profile --- lemur/plugins/lemur_digicert/plugin.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lemur/plugins/lemur_digicert/plugin.py b/lemur/plugins/lemur_digicert/plugin.py index 5f52611f..88ea5b6b 100644 --- a/lemur/plugins/lemur_digicert/plugin.py +++ b/lemur/plugins/lemur_digicert/plugin.py @@ -171,6 +171,9 @@ def map_cis_fields(options, csr): "units": [options["organizational_unit"]], }, } + # possibility to default to a SIGNING_ALGORITHM for a given profile + if current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get(options['authority'].name): + data["signature_hash"] = current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get(options['authority'].name) return data From d6f41b6a99d8fedfc0b7505a95a921a2add22466 Mon Sep 17 00:00:00 2001 From: Hossein Shafagh Date: Thu, 16 Jan 2020 13:45:13 -0800 Subject: [PATCH 355/357] improving string formatting to avoid dangling white spaces and new lines --- lemur/plugins/lemur_aws/plugin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 98b01672..6669f641 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -212,7 +212,7 @@ class AWSSourcePlugin(SourcePlugin): if not regions: regions = ec2.get_regions(account_number=account_number) else: - regions = regions.split(",") + regions = "".join(regions.split()).split(",") for region in regions: elbs = elb.get_all_elbs(account_number=account_number, region=region) From 71f43dfcc13bb68d488a5b322bbf5f45e1447fa4 Mon Sep 17 00:00:00 2001 From: Gutttlt <43376523+Gutttlt@users.noreply.github.com> Date: Tue, 21 Jan 2020 08:40:54 +0100 Subject: [PATCH 356/357] Fixing "'Role' object has no attribute 'set_third_party'" error. --- lemur/auth/ldap.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lemur/auth/ldap.py b/lemur/auth/ldap.py index f4ceab03..ed87b76c 100644 --- a/lemur/auth/ldap.py +++ b/lemur/auth/ldap.py @@ -105,7 +105,7 @@ class LdapPrincipal: role = role_service.get_by_name(self.ldap_default_role) if role: if not role.third_party: - role = role.set_third_party(role.id, third_party_status=True) + role = role_service.set_third_party(role.id, third_party_status=True) roles.add(role) # update their 'roles' From 9984470b5846fbbe956e9b4432b1b01ca905d4c9 Mon Sep 17 00:00:00 2001 From: rajatsharma94 Date: Thu, 23 Jan 2020 12:35:57 +0100 Subject: [PATCH 357/357] fix fatal error in schema validator --- lemur/certificates/schemas.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lemur/certificates/schemas.py b/lemur/certificates/schemas.py index c987e5fa..8f15542d 100644 --- a/lemur/certificates/schemas.py +++ b/lemur/certificates/schemas.py @@ -119,6 +119,9 @@ class CertificateInputSchema(CertificateCreationSchema): @validates_schema def validate_authority(self, data): + if 'authority' not in data: + raise ValidationError("Missing Authority.") + if isinstance(data["authority"], str): raise ValidationError("Authority not found.")