diff --git a/bower.json b/bower.json index f7d5500d..44cce994 100644 --- a/bower.json +++ b/bower.json @@ -11,12 +11,12 @@ "angular": "1.4.9", "json3": "~3.3", "es5-shim": "~4.5.0", - "bootstrap": "~3.3.6", "angular-bootstrap": "~1.1.1", "angular-animate": "~1.4.9", "restangular": "~1.5.1", "ng-table": "~0.8.3", "moment": "~2.11.1", + "bootstrap": "~3.4.1", "angular-loading-bar": "~0.8.0", "angular-moment": "~0.10.3", "moment-range": "~2.1.0", @@ -24,7 +24,7 @@ "angularjs-toaster": "~1.0.0", "angular-chart.js": "~0.8.8", "ngletteravatar": "~4.0.0", - "bootswatch": "~3.3.6", + "bootswatch": "~3.4.1", "fontawesome": "~4.5.0", "satellizer": "~0.13.4", "angular-ui-router": "~0.2.15", diff --git a/docs/administration.rst b/docs/administration.rst index 8f055147..157af478 100644 --- a/docs/administration.rst +++ b/docs/administration.rst @@ -735,6 +735,12 @@ The following configuration properties are required to use the Digicert issuer p This is the default validity (in years), if no end date is specified. (Default: 1) +.. data:: DIGICERT_MAX_VALIDITY + :noindex: + + This is the maximum validity (in years). (Default: value of DIGICERT_DEFAULT_VALIDITY) + + .. data:: DIGICERT_PRIVATE :noindex: @@ -1008,6 +1014,18 @@ The following configuration properties are required to use the PowerDNS ACME Plu This is the number of times DNS Verification should be attempted (i.e. 20) + +.. data:: ACME_POWERDNS_VERIFY + :noindex: + + This configures how TLS certificates on the PowerDNS API target are validated. The PowerDNS Plugin depends on the PyPi requests library, which supports the following options for the verify parameter: + + True: Verifies the TLS certificate was issued by a known publicly-trusted CA. (Default) + + False: Disables certificate validation (Not Recommended) + + File/Dir path to CA Bundle: Verifies the TLS certificate was issued by a Certificate Authority in the provided CA bundle. + .. _CommandLineInterface: Command Line Interface diff --git a/docs/production/index.rst b/docs/production/index.rst index cd044ca4..b91ed6bd 100644 --- a/docs/production/index.rst +++ b/docs/production/index.rst @@ -390,6 +390,10 @@ Here are the Celery configuration variables that should be set:: CELERY_IMPORTS = ('lemur.common.celery') CELERY_TIMEZONE = 'UTC' +Do not forget to import crontab module in your configuration file:: + + from celery.task.schedules import crontab + You must start a single Celery scheduler instance and one or more worker instances in order to handle incoming tasks. The scheduler can be started with:: diff --git a/lemur/auth/views.py b/lemur/auth/views.py index e7f87356..eaed419d 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -127,6 +127,10 @@ def retrieve_user(user_api_url, access_token): # retrieve information about the current user. r = requests.get(user_api_url, params=user_params, headers=headers) + # Some IDPs, like "Keycloak", require a POST instead of a GET + if r.status_code == 400: + r = requests.post(user_api_url, data=user_params, headers=headers) + profile = r.json() user = user_service.get_by_email(profile["email"]) @@ -434,7 +438,7 @@ class OAuth2(Resource): verify_cert=verify_cert, ) - jwks_url = current_app.config.get("PING_JWKS_URL") + jwks_url = current_app.config.get("OAUTH2_JWKS_URL") error_code = validate_id_token(id_token, args["clientId"], jwks_url) if error_code: return error_code diff --git a/lemur/certificates/cli.py b/lemur/certificates/cli.py index b57ff175..54455eec 100644 --- a/lemur/certificates/cli.py +++ b/lemur/certificates/cli.py @@ -5,29 +5,18 @@ :license: Apache, see LICENSE for more details. .. moduleauthor:: Kevin Glisson """ -import sys import multiprocessing -from tabulate import tabulate -from sqlalchemy import or_ - +import sys from flask import current_app - -from flask_script import Manager from flask_principal import Identity, identity_changed - +from flask_script import Manager +from sqlalchemy import or_ +from tabulate import tabulate from lemur import database -from lemur.extensions import sentry -from lemur.extensions import metrics -from lemur.plugins.base import plugins -from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS -from lemur.deployment import service as deployment_service -from lemur.endpoints import service as endpoint_service -from lemur.notifications.messaging import send_rotation_notification -from lemur.domains.models import Domain from lemur.authorities.models import Authority -from lemur.certificates.schemas import CertificateOutputSchema from lemur.certificates.models import Certificate +from lemur.certificates.schemas import CertificateOutputSchema from lemur.certificates.service import ( reissue_certificate, get_certificate_primitives, @@ -35,9 +24,16 @@ from lemur.certificates.service import ( get_by_name, get_all_certs, get, + get_all_certs_attached_to_endpoint_without_autorotate, ) - from lemur.certificates.verify import verify_string +from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS +from lemur.deployment import service as deployment_service +from lemur.domains.models import Domain +from lemur.endpoints import service as endpoint_service +from lemur.extensions import sentry, metrics +from lemur.notifications.messaging import send_rotation_notification +from lemur.plugins.base import plugins manager = Manager(usage="Handles all certificate related tasks.") @@ -482,3 +478,38 @@ def check_revoked(): cert.status = "unknown" database.update(cert) + + +@manager.command +def automatically_enable_autorotate(): + """ + This function automatically enables auto-rotation for unexpired certificates that are + attached to an endpoint but do not have autorotate enabled. + + WARNING: This will overwrite the Auto-rotate toggle! + """ + log_data = { + "function": f"{__name__}.{sys._getframe().f_code.co_name}", + } + + permitted_authorities = current_app.config.get("ENABLE_AUTO_ROTATE_AUTHORITY", []) + + eligible_certs = get_all_certs_attached_to_endpoint_without_autorotate() + for cert in eligible_certs: + + if cert.authority_id not in permitted_authorities: + continue + + log_data["certificate"] = cert.name + log_data["certificate_id"] = cert.id + log_data["message"] = "Enabling auto-rotate for certificate" + current_app.logger.info(log_data) + # TODO: add the cert destination to the logging + metrics.send("automatically_enable_autorotate", + "counter", 1, + metric_tags={"certificate": cert.name, + "certificate_id": cert.id, + "authority_id": cert.authority_id, + "authority_name": Authority.get(cert.authority_id).name}) + cert.rotation = True + database.update(cert) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index 0a76cd6b..58630ee6 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -321,7 +321,8 @@ class Certificate(db.Model): @hybrid_property def expired(self): - if self.not_after <= arrow.utcnow(): + # can't compare offset-naive and offset-aware datetimes + if arrow.Arrow.fromdatetime(self.not_after) <= arrow.utcnow(): return True @expired.expression @@ -445,6 +446,9 @@ def update_destinations(target, value, initiator): """ destination_plugin = plugins.get(value.plugin_name) status = FAILURE_METRIC_STATUS + + if target.expired: + return try: if target.private_key or not destination_plugin.requires_key: destination_plugin.upload( diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index c8d732bb..dc463878 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -103,12 +103,13 @@ def get_all_certs(): return Certificate.query.all() -def get_all_pending_cleaning(source): +def get_all_pending_cleaning_expired(source): """ - Retrieves all certificates that are available for cleaning. + Retrieves all certificates that are available for cleaning. These are certificates which are expired and are not + attached to any endpoints. - :param source: - :return: + :param source: the source to search for certificates + :return: list of pending certificates """ return ( Certificate.query.filter(Certificate.sources.any(id=source.id)) @@ -118,6 +119,58 @@ def get_all_pending_cleaning(source): ) +def get_all_certs_attached_to_endpoint_without_autorotate(): + """ + Retrieves all certificates that are attached to an endpoint, but that do not have autorotate enabled. + + :return: list of certificates attached to an endpoint without autorotate + """ + return ( + Certificate.query.filter(Certificate.endpoints.any()) + .filter(Certificate.rotation == False) + .filter(Certificate.not_after >= arrow.now()) + .filter(not_(Certificate.replaced.any())) + .all() # noqa + ) + + +def get_all_pending_cleaning_expiring_in_days(source, days_to_expire): + """ + Retrieves all certificates that are available for cleaning, not attached to endpoint, + and within X days from expiration. + + :param days_to_expire: defines how many days till the certificate is expired + :param source: the source to search for certificates + :return: list of pending certificates + """ + expiration_window = arrow.now().shift(days=+days_to_expire).format("YYYY-MM-DD") + return ( + Certificate.query.filter(Certificate.sources.any(id=source.id)) + .filter(not_(Certificate.endpoints.any())) + .filter(Certificate.not_after < expiration_window) + .all() + ) + + +def get_all_pending_cleaning_issued_since_days(source, days_since_issuance): + """ + Retrieves all certificates that are available for cleaning: not attached to endpoint, and X days since issuance. + + :param days_since_issuance: defines how many days since the certificate is issued + :param source: the source to search for certificates + :return: list of pending certificates + """ + not_in_use_window = ( + arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD") + ) + return ( + Certificate.query.filter(Certificate.sources.any(id=source.id)) + .filter(not_(Certificate.endpoints.any())) + .filter(Certificate.date_created > not_in_use_window) + .all() + ) + + def get_all_pending_reissue(): """ Retrieves all certificates that need to be rotated. @@ -332,9 +385,11 @@ def render(args): show_expired = args.pop("showExpired") if show_expired != 1: - one_month_old = arrow.now()\ - .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))\ + one_month_old = ( + arrow.now() + .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1)) .format("YYYY-MM-DD") + ) query = query.filter(Certificate.not_after > one_month_old) time_range = args.pop("time_range") diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 4af33d86..5df470ab 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -10,27 +10,27 @@ command: celery -A lemur.common.celery worker --loglevel=info -l DEBUG -B import copy import sys import time -from datetime import datetime, timezone, timedelta - from celery import Celery +from celery.app.task import Context from celery.exceptions import SoftTimeLimitExceeded +from celery.signals import task_failure, task_received, task_revoked, task_success +from datetime import datetime, timezone, timedelta from flask import current_app from lemur.authorities.service import get as get_authority +from lemur.certificates import cli as cli_certificate from lemur.common.redis import RedisHandler from lemur.destinations import service as destinations_service +from lemur.dns_providers import cli as cli_dns_providers +from lemur.endpoints import cli as cli_endpoints from lemur.extensions import metrics, sentry from lemur.factory import create_app +from lemur.notifications import cli as cli_notification from lemur.notifications.messaging import send_pending_failure_notification from lemur.pending_certificates import service as pending_certificate_service from lemur.plugins.base import plugins from lemur.sources.cli import clean, sync, validate_sources from lemur.sources.service import add_aws_destination_to_sources -from lemur.certificates import cli as cli_certificate -from lemur.dns_providers import cli as cli_dns_providers -from lemur.notifications import cli as cli_notification -from lemur.endpoints import cli as cli_endpoints - if current_app: flask_app = current_app @@ -67,7 +67,7 @@ def is_task_active(fun, task_id, args): from celery.task.control import inspect if not args: - args = '()' # empty args + args = "()" # empty args i = inspect() active_tasks = i.active() @@ -80,6 +80,37 @@ def is_task_active(fun, task_id, args): return False +def get_celery_request_tags(**kwargs): + request = kwargs.get("request") + sender_hostname = "unknown" + sender = kwargs.get("sender") + if sender: + try: + sender_hostname = sender.hostname + except AttributeError: + sender_hostname = vars(sender.request).get("origin", "unknown") + if request and not isinstance( + request, Context + ): # unlike others, task_revoked sends a Context for `request` + task_name = request.name + task_id = request.id + receiver_hostname = request.hostname + else: + task_name = sender.name + task_id = sender.request.id + receiver_hostname = sender.request.hostname + + tags = { + "task_name": task_name, + "task_id": task_id, + "sender_hostname": sender_hostname, + "receiver_hostname": receiver_hostname, + } + if kwargs.get("exception"): + tags["error"] = repr(kwargs["exception"]) + return tags + + @celery.task() def report_celery_last_success_metrics(): """ @@ -89,7 +120,6 @@ def report_celery_last_success_metrics(): report_celery_last_success_metrics should be ran periodically to emit metrics on when a task was last successful. Admins can then alert when tasks are not ran when intended. Admins should also alert when no metrics are emitted from this function. - """ function = f"{__name__}.{sys._getframe().f_code.co_name}" task_id = None @@ -108,15 +138,91 @@ def report_celery_last_success_metrics(): return current_time = int(time.time()) - schedule = current_app.config.get('CELERYBEAT_SCHEDULE') + schedule = current_app.config.get("CELERYBEAT_SCHEDULE") for _, t in schedule.items(): task = t.get("task") last_success = int(red.get(f"{task}.last_success") or 0) - metrics.send(f"{task}.time_since_last_success", 'gauge', current_time - last_success) + metrics.send( + f"{task}.time_since_last_success", "gauge", current_time - last_success + ) red.set( f"{function}.last_success", int(time.time()) ) # Alert if this metric is not seen - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + + +@task_received.connect +def report_number_pending_tasks(**kwargs): + """ + Report the number of pending tasks to our metrics broker every time a task is published. This metric can be used + for autoscaling workers. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-received + """ + with flask_app.app_context(): + metrics.send( + "celery.new_pending_task", + "TIMER", + 1, + metric_tags=get_celery_request_tags(**kwargs), + ) + + +@task_success.connect +def report_successful_task(**kwargs): + """ + Report a generic success metric as tasks to our metrics broker every time a task finished correctly. + This metric can be used for autoscaling workers. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-success + """ + with flask_app.app_context(): + tags = get_celery_request_tags(**kwargs) + red.set(f"{tags['task_name']}.last_success", int(time.time())) + metrics.send("celery.successful_task", "TIMER", 1, metric_tags=tags) + + +@task_failure.connect +def report_failed_task(**kwargs): + """ + Report a generic failure metric as tasks to our metrics broker every time a task fails. + This metric can be used for alerting. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-failure + """ + with flask_app.app_context(): + log_data = { + "function": f"{__name__}.{sys._getframe().f_code.co_name}", + "Message": "Celery Task Failure", + } + + # Add traceback if exception info is in the kwargs + einfo = kwargs.get("einfo") + if einfo: + log_data["traceback"] = einfo.traceback + + error_tags = get_celery_request_tags(**kwargs) + + log_data.update(error_tags) + current_app.logger.error(log_data) + metrics.send("celery.failed_task", "TIMER", 1, metric_tags=error_tags) + + +@task_revoked.connect +def report_revoked_task(**kwargs): + """ + Report a generic failure metric as tasks to our metrics broker every time a task is revoked. + This metric can be used for alerting. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-revoked + """ + with flask_app.app_context(): + log_data = { + "function": f"{__name__}.{sys._getframe().f_code.co_name}", + "Message": "Celery Task Revoked", + } + + error_tags = get_celery_request_tags(**kwargs) + + log_data.update(error_tags) + current_app.logger.error(log_data) + metrics.send("celery.revoked_task", "TIMER", 1, metric_tags=error_tags) @celery.task(soft_time_limit=600) @@ -217,15 +323,15 @@ def fetch_acme_cert(id): log_data["failed"] = failed log_data["wrong_issuer"] = wrong_issuer current_app.logger.debug(log_data) - metrics.send(f"{function}.resolved", 'gauge', new) - metrics.send(f"{function}.failed", 'gauge', failed) - metrics.send(f"{function}.wrong_issuer", 'gauge', wrong_issuer) + metrics.send(f"{function}.resolved", "gauge", new) + metrics.send(f"{function}.failed", "gauge", failed) + metrics.send(f"{function}.wrong_issuer", "gauge", wrong_issuer) print( "[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format( new=new, failed=failed, wrong_issuer=wrong_issuer ) ) - red.set(f'{function}.last_success', int(time.time())) + return log_data @celery.task() @@ -262,8 +368,8 @@ def fetch_all_pending_acme_certs(): current_app.logger.debug(log_data) fetch_acme_cert.delay(cert.id) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task() @@ -296,8 +402,8 @@ def remove_old_acme_certs(): current_app.logger.debug(log_data) pending_certificate_service.delete(cert) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task() @@ -328,11 +434,11 @@ def clean_all_sources(): current_app.logger.debug(log_data) clean_source.delay(source.label) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data -@celery.task(soft_time_limit=600) +@celery.task(soft_time_limit=3600) def clean_source(source): """ This celery task will clean the specified source. This is a destructive operation that will delete unused @@ -366,6 +472,7 @@ def clean_source(source): current_app.logger.error(log_data) sentry.captureException() metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) + return log_data @celery.task() @@ -395,8 +502,8 @@ def sync_all_sources(): current_app.logger.debug(log_data) sync_source.delay(source.label) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=7200) @@ -428,19 +535,23 @@ def sync_source(source): current_app.logger.debug(log_data) try: sync([source]) - metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) + metrics.send( + f"{function}.success", "counter", 1, metric_tags={"source": source} + ) except SoftTimeLimitExceeded: log_data["message"] = "Error syncing source: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("sync_source_timeout", "counter", 1, metric_tags={"source": source}) + metrics.send( + "sync_source_timeout", "counter", 1, metric_tags={"source": source} + ) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return log_data["message"] = "Done syncing source" current_app.logger.debug(log_data) - metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) - red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", "counter", 1, metric_tags={"source": source}) + return log_data @celery.task() @@ -477,8 +588,8 @@ def sync_source_destination(): log_data["message"] = "completed Syncing AWS destinations and sources" current_app.logger.debug(log_data) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -515,8 +626,8 @@ def certificate_reissue(): log_data["message"] = "reissuance completed" current_app.logger.debug(log_data) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -534,7 +645,6 @@ def certificate_rotate(): "function": function, "message": "rotating certificates", "task_id": task_id, - } if task_id and is_task_active(function, task_id, None): @@ -554,8 +664,8 @@ def certificate_rotate(): log_data["message"] = "rotation completed" current_app.logger.debug(log_data) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -590,8 +700,8 @@ def endpoints_expire(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=600) @@ -626,8 +736,8 @@ def get_all_zones(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -662,8 +772,8 @@ def check_revoked(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -690,7 +800,9 @@ def notify_expirations(): current_app.logger.debug(log_data) try: - cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) + cli_notification.expirations( + current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", []) + ) except SoftTimeLimitExceeded: log_data["message"] = "Notify expiring Time limit exceeded." current_app.logger.error(log_data) @@ -698,5 +810,29 @@ def notify_expirations(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data + + +@celery.task(soft_time_limit=3600) +def enable_autorotate_for_certs_attached_to_endpoint(): + """ + This celery task automatically enables autorotation for unexpired certificates that are + attached to an endpoint but do not have autorotate enabled. + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + log_data = { + "function": function, + "task_id": task_id, + "message": "Enabling autorotate to eligible certificates", + } + current_app.logger.debug(log_data) + + cli_certificate.automatically_enable_autorotate() + metrics.send(f"{function}.success", "counter", 1) + return log_data diff --git a/lemur/common/defaults.py b/lemur/common/defaults.py index d563dbd0..b9c88e49 100644 --- a/lemur/common/defaults.py +++ b/lemur/common/defaults.py @@ -2,6 +2,7 @@ import re import unicodedata from cryptography import x509 +from cryptography.hazmat.primitives.serialization import Encoding from flask import current_app from lemur.common.utils import is_selfsigned @@ -71,12 +72,20 @@ def common_name(cert): :return: Common name or None """ try: - return cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[ - 0 - ].value.strip() + subject_oid = cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME) + if len(subject_oid) > 0: + return subject_oid[0].value.strip() + return None except Exception as e: sentry.captureException() - current_app.logger.error("Unable to get common name! {0}".format(e)) + current_app.logger.error( + { + "message": "Unable to get common name", + "error": e, + "public_key": cert.public_bytes(Encoding.PEM).decode("utf-8") + }, + exc_info=True + ) def organization(cert): diff --git a/lemur/plugins/lemur_acme/plugin.py b/lemur/plugins/lemur_acme/plugin.py index 95689a13..3fc1df61 100644 --- a/lemur/plugins/lemur_acme/plugin.py +++ b/lemur/plugins/lemur_acme/plugin.py @@ -54,18 +54,30 @@ class AcmeHandler(object): current_app.logger.error(f"Unable to fetch DNS Providers: {e}") self.all_dns_providers = [] - def find_dns_challenge(self, host, authorizations): + def get_dns_challenges(self, host, authorizations): + """Get dns challenges for provided domain""" + + domain_to_validate, is_wildcard = self.strip_wildcard(host) dns_challenges = [] for authz in authorizations: - if not authz.body.identifier.value.lower() == host.lower(): + if not authz.body.identifier.value.lower() == domain_to_validate.lower(): + continue + if is_wildcard and not authz.body.wildcard: + continue + if not is_wildcard and authz.body.wildcard: continue for combo in authz.body.challenges: if isinstance(combo.chall, challenges.DNS01): dns_challenges.append(combo) + return dns_challenges - def maybe_remove_wildcard(self, host): - return host.replace("*.", "") + def strip_wildcard(self, host): + """Removes the leading *. and returns Host and whether it was removed or not (True/False)""" + prefix = "*." + if host.startswith(prefix): + return host[len(prefix):], True + return host, False def maybe_add_extension(self, host, dns_provider_options): if dns_provider_options and dns_provider_options.get( @@ -86,9 +98,8 @@ class AcmeHandler(object): current_app.logger.debug("Starting DNS challenge for {0}".format(host)) change_ids = [] - - host_to_validate = self.maybe_remove_wildcard(host) - dns_challenges = self.find_dns_challenge(host_to_validate, order.authorizations) + dns_challenges = self.get_dns_challenges(host, order.authorizations) + host_to_validate, _ = self.strip_wildcard(host) host_to_validate = self.maybe_add_extension( host_to_validate, dns_provider_options ) @@ -172,7 +183,7 @@ class AcmeHandler(object): except (AcmeError, TimeoutError): sentry.captureException(extra={"order_url": str(order.uri)}) - metrics.send("request_certificate_error", "counter", 1) + metrics.send("request_certificate_error", "counter", 1, metric_tags={"uri": order.uri}) current_app.logger.error( f"Unable to resolve Acme order: {order.uri}", exc_info=True ) @@ -183,6 +194,11 @@ class AcmeHandler(object): else: raise + metrics.send("request_certificate_success", "counter", 1, metric_tags={"uri": order.uri}) + current_app.logger.info( + f"Successfully resolved Acme order: {order.uri}", exc_info=True + ) + pem_certificate = OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, OpenSSL.crypto.load_certificate( @@ -320,7 +336,7 @@ class AcmeHandler(object): ) dns_provider_options = json.loads(dns_provider.credentials) account_number = dns_provider_options.get("account_id") - host_to_validate = self.maybe_remove_wildcard(authz_record.host) + host_to_validate, _ = self.strip_wildcard(authz_record.host) host_to_validate = self.maybe_add_extension( host_to_validate, dns_provider_options ) @@ -352,7 +368,7 @@ class AcmeHandler(object): dns_provider_options = json.loads(dns_provider.credentials) account_number = dns_provider_options.get("account_id") dns_challenges = authz_record.dns_challenge - host_to_validate = self.maybe_remove_wildcard(authz_record.host) + host_to_validate, _ = self.strip_wildcard(authz_record.host) host_to_validate = self.maybe_add_extension( host_to_validate, dns_provider_options ) diff --git a/lemur/plugins/lemur_acme/powerdns.py b/lemur/plugins/lemur_acme/powerdns.py index f3ad9965..a5d02353 100644 --- a/lemur/plugins/lemur_acme/powerdns.py +++ b/lemur/plugins/lemur_acme/powerdns.py @@ -1,11 +1,10 @@ -import time -import requests import json import sys +import time import lemur.common.utils as utils import lemur.dns_providers.util as dnsutil - +import requests from flask import current_app from lemur.extensions import metrics, sentry @@ -17,7 +16,9 @@ REQUIRED_VARIABLES = [ class Zone: - """ This class implements a PowerDNS zone in JSON. """ + """ + This class implements a PowerDNS zone in JSON. + """ def __init__(self, _data): self._data = _data @@ -39,7 +40,9 @@ class Zone: class Record: - """ This class implements a PowerDNS record. """ + """ + This class implements a PowerDNS record. + """ def __init__(self, _data): self._data = _data @@ -49,20 +52,30 @@ class Record: return self._data["name"] @property - def disabled(self): - return self._data["disabled"] + def type(self): + return self._data["type"] + + @property + def ttl(self): + return self._data["ttl"] @property def content(self): return self._data["content"] @property - def ttl(self): - return self._data["ttl"] + def disabled(self): + return self._data["disabled"] def get_zones(account_number): - """Retrieve authoritative zones from the PowerDNS API and return a list""" + """ + Retrieve authoritative zones from the PowerDNS API and return a list of zones + + :param account_number: + :raise: Exception + :return: list of Zone Objects + """ _check_conf() server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") path = f"/api/v1/servers/{server_id}/zones" @@ -90,44 +103,41 @@ def get_zones(account_number): def create_txt_record(domain, token, account_number): - """ Create a TXT record for the given domain and token and return a change_id tuple """ + """ + Create a TXT record for the given domain and token and return a change_id tuple + + :param domain: FQDN + :param token: challenge value + :param account_number: + :return: tuple of domain/token + """ _check_conf() - zone_name = _get_zone_name(domain, account_number) - server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") - zone_id = zone_name + "." - domain_id = domain + "." - path = f"/api/v1/servers/{server_id}/zones/{zone_id}" - payload = { - "rrsets": [ - { - "name": domain_id, - "type": "TXT", - "ttl": 300, - "changetype": "REPLACE", - "records": [ - { - "content": f"\"{token}\"", - "disabled": False - } - ], - "comments": [] - } - ] - } + function = sys._getframe().f_code.co_name log_data = { "function": function, "fqdn": domain, "token": token, } + + # Create new record + domain_id = domain + "." + records = [Record({'name': domain_id, 'content': f"\"{token}\"", 'disabled': False})] + + # Get current records + cur_records = _get_txt_records(domain) + for record in cur_records: + if record.content != token: + records.append(record) + try: - _patch(path, payload) - log_data["message"] = "TXT record successfully created" + _patch_txt_records(domain, account_number, records) + log_data["message"] = "TXT record(s) successfully created" current_app.logger.debug(log_data) except Exception as e: sentry.captureException() log_data["Exception"] = e - log_data["message"] = "Unable to create TXT record" + log_data["message"] = "Unable to create TXT record(s)" current_app.logger.debug(log_data) change_id = (domain, token) @@ -136,8 +146,11 @@ def create_txt_record(domain, token, account_number): def wait_for_dns_change(change_id, account_number=None): """ - Checks the authoritative DNS Server to see if changes have propagated to DNS - Retries and waits until successful. + Checks the authoritative DNS Server to see if changes have propagated. + + :param change_id: tuple of domain/token + :param account_number: + :return: """ _check_conf() domain, token = change_id @@ -171,53 +184,115 @@ def wait_for_dns_change(change_id, account_number=None): def delete_txt_record(change_id, account_number, domain, token): - """ Delete the TXT record for the given domain and token """ + """ + Delete the TXT record for the given domain and token + + :param change_id: tuple of domain/token + :param account_number: + :param domain: FQDN + :param token: challenge to delete + :return: + """ _check_conf() - zone_name = _get_zone_name(domain, account_number) - server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") - zone_id = zone_name + "." - domain_id = domain + "." - path = f"/api/v1/servers/{server_id}/zones/{zone_id}" - payload = { - "rrsets": [ - { - "name": domain_id, - "type": "TXT", - "ttl": 300, - "changetype": "DELETE", - "records": [ - { - "content": f"\"{token}\"", - "disabled": False - } - ], - "comments": [] - } - ] - } + function = sys._getframe().f_code.co_name log_data = { "function": function, "fqdn": domain, - "token": token + "token": token, } - try: - _patch(path, payload) - log_data["message"] = "TXT record successfully deleted" - current_app.logger.debug(log_data) - except Exception as e: - sentry.captureException() - log_data["Exception"] = e - log_data["message"] = "Unable to delete TXT record" + + """ + Get existing TXT records matching the domain from DNS + The token to be deleted should already exist + There may be other records with different tokens as well + """ + cur_records = _get_txt_records(domain) + found = False + new_records = [] + for record in cur_records: + if record.content == f"\"{token}\"": + found = True + else: + new_records.append(record) + + # Since the matching token is not in DNS, there is nothing to delete + if not found: + log_data["message"] = "Unable to delete TXT record: Token not found in existing TXT records" current_app.logger.debug(log_data) + return + + # The record to delete has been found AND there are other tokens set on the same domain + # Since we only want to delete one token value from the RRSet, we need to use the Patch command to + # overwrite the current RRSet with the existing records. + elif new_records: + try: + _patch_txt_records(domain, account_number, new_records) + log_data["message"] = "TXT record successfully deleted" + current_app.logger.debug(log_data) + except Exception as e: + sentry.captureException() + log_data["Exception"] = e + log_data["message"] = "Unable to delete TXT record: patching exception" + current_app.logger.debug(log_data) + + # The record to delete has been found AND there are no other token values set on the same domain + # Use the Delete command to delete the whole RRSet. + else: + zone_name = _get_zone_name(domain, account_number) + server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") + zone_id = zone_name + "." + domain_id = domain + "." + path = f"/api/v1/servers/{server_id}/zones/{zone_id}" + payload = { + "rrsets": [ + { + "name": domain_id, + "type": "TXT", + "ttl": 300, + "changetype": "DELETE", + "records": [ + { + "content": f"\"{token}\"", + "disabled": False + } + ], + "comments": [] + } + ] + } + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "fqdn": domain, + "token": token + } + try: + _patch(path, payload) + log_data["message"] = "TXT record successfully deleted" + current_app.logger.debug(log_data) + except Exception as e: + sentry.captureException() + log_data["Exception"] = e + log_data["message"] = "Unable to delete TXT record" + current_app.logger.debug(log_data) def _check_conf(): + """ + Verifies required configuration variables are set + + :return: + """ utils.validate_conf(current_app, REQUIRED_VARIABLES) def _generate_header(): - """Generate a PowerDNS API header and return it as a dictionary""" + """ + Generate a PowerDNS API header and return it as a dictionary + + :return: Dict of header parameters + """ api_key_name = current_app.config.get("ACME_POWERDNS_APIKEYNAME") api_key = current_app.config.get("ACME_POWERDNS_APIKEY") headers = {api_key_name: api_key} @@ -225,7 +300,13 @@ def _generate_header(): def _get_zone_name(domain, account_number): - """Get most specific matching zone for the given domain and return as a String""" + """ + Get most specific matching zone for the given domain and return as a String + + :param domain: FQDN + :param account_number: + :return: FQDN of domain + """ zones = get_zones(account_number) zone_name = "" for z in zones: @@ -243,25 +324,113 @@ def _get_zone_name(domain, account_number): return zone_name +def _get_txt_records(domain): + """ + Retrieve TXT records for a given domain and return list of Record Objects + + :param domain: FQDN + :return: list of Record objects + """ + server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") + + path = f"/api/v1/servers/{server_id}/search-data?q={domain}&max=100&object_type=record" + function = sys._getframe().f_code.co_name + log_data = { + "function": function + } + try: + records = _get(path) + log_data["message"] = "Retrieved TXT Records Successfully" + current_app.logger.debug(log_data) + + except Exception as e: + sentry.captureException() + log_data["Exception"] = e + log_data["message"] = "Failed to Retrieve TXT Records" + current_app.logger.debug(log_data) + return [] + + txt_records = [] + for record in records: + cur_record = Record(record) + txt_records.append(cur_record) + return txt_records + + def _get(path, params=None): - """ Execute a GET request on the given URL (base_uri + path) and return response as JSON object """ + """ + Execute a GET request on the given URL (base_uri + path) and return response as JSON object + + :param path: Relative URL path + :param params: additional parameters + :return: json response + """ base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN") + verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True) resp = requests.get( f"{base_uri}{path}", headers=_generate_header(), params=params, - verify=True, + verify=verify_value ) resp.raise_for_status() return resp.json() +def _patch_txt_records(domain, account_number, records): + """ + Send Patch request to PowerDNS Server + + :param domain: FQDN + :param account_number: + :param records: List of Record objects + :return: + """ + domain_id = domain + "." + + # Create records + txt_records = [] + for record in records: + txt_records.append( + {'content': record.content, 'disabled': record.disabled} + ) + + # Create RRSet + payload = { + "rrsets": [ + { + "name": domain_id, + "type": "TXT", + "ttl": 300, + "changetype": "REPLACE", + "records": txt_records, + "comments": [] + } + ] + } + + # Create Txt Records + server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") + zone_name = _get_zone_name(domain, account_number) + zone_id = zone_name + "." + path = f"/api/v1/servers/{server_id}/zones/{zone_id}" + _patch(path, payload) + + def _patch(path, payload): - """ Execute a Patch request on the given URL (base_uri + path) with given payload """ + """ + Execute a Patch request on the given URL (base_uri + path) with given payload + + :param path: + :param payload: + :return: + """ base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN") + verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True) resp = requests.patch( f"{base_uri}{path}", data=json.dumps(payload), - headers=_generate_header() + headers=_generate_header(), + verify=verify_value ) resp.raise_for_status() diff --git a/lemur/plugins/lemur_acme/route53.py b/lemur/plugins/lemur_acme/route53.py index 55da5161..aaccb57e 100644 --- a/lemur/plugins/lemur_acme/route53.py +++ b/lemur/plugins/lemur_acme/route53.py @@ -35,9 +35,10 @@ def get_zones(client=None): zones = [] for page in paginator.paginate(): for zone in page["HostedZones"]: - zones.append( - zone["Name"][:-1] - ) # We need [:-1] to strip out the trailing dot. + if not zone["Config"]["PrivateZone"]: + zones.append( + zone["Name"][:-1] + ) # We need [:-1] to strip out the trailing dot. return zones diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index 990a556e..bec7be2b 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -1,11 +1,10 @@ import unittest +from unittest.mock import patch, Mock from cryptography.x509 import DNSName -from requests.models import Response - -from mock import MagicMock, Mock, patch - from lemur.plugins.lemur_acme import plugin, ultradns +from mock import MagicMock +from requests.models import Response class TestAcme(unittest.TestCase): @@ -23,11 +22,12 @@ class TestAcme(unittest.TestCase): } @patch("lemur.plugins.lemur_acme.plugin.len", return_value=1) - def test_find_dns_challenge(self, mock_len): + def test_get_dns_challenges(self, mock_len): assert mock_len from acme import challenges + host = "example.com" c = challenges.DNS01() mock_authz = Mock() @@ -35,9 +35,18 @@ class TestAcme(unittest.TestCase): mock_entry = Mock() mock_entry.chall = c mock_authz.body.resolved_combinations.append(mock_entry) - result = yield self.acme.find_dns_challenge(mock_authz) + result = yield self.acme.get_dns_challenges(host, mock_authz) self.assertEqual(result, mock_entry) + def test_strip_wildcard(self): + expected = ("example.com", False) + result = self.acme.strip_wildcard("example.com") + self.assertEqual(expected, result) + + expected = ("example.com", True) + result = self.acme.strip_wildcard("*.example.com") + self.assertEqual(expected, result) + def test_authz_record(self): a = plugin.AuthorizationRecord("host", "authz", "challenge", "id") self.assertEqual(type(a), plugin.AuthorizationRecord) @@ -45,9 +54,9 @@ class TestAcme(unittest.TestCase): @patch("acme.client.Client") @patch("lemur.plugins.lemur_acme.plugin.current_app") @patch("lemur.plugins.lemur_acme.plugin.len", return_value=1) - @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges") def test_start_dns_challenge( - self, mock_find_dns_challenge, mock_len, mock_app, mock_acme + self, mock_get_dns_challenges, mock_len, mock_app, mock_acme ): assert mock_len mock_order = Mock() @@ -65,7 +74,7 @@ class TestAcme(unittest.TestCase): mock_dns_provider.create_txt_record = Mock(return_value=1) values = [mock_entry] - iterable = mock_find_dns_challenge.return_value + iterable = mock_get_dns_challenges.return_value iterator = iter(values) iterable.__iter__.return_value = iterator result = self.acme.start_dns_challenge( @@ -78,7 +87,7 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") @patch("time.sleep") def test_complete_dns_challenge_success( - self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme + self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme ): mock_dns_provider = Mock() mock_dns_provider.wait_for_dns_change = Mock(return_value=True) @@ -102,7 +111,7 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.current_app") @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") def test_complete_dns_challenge_fail( - self, mock_wait_for_dns_change, mock_current_app, mock_acme + self, mock_wait_for_dns_change, mock_current_app, mock_acme ): mock_dns_provider = Mock() mock_dns_provider.wait_for_dns_change = Mock(return_value=True) @@ -127,15 +136,15 @@ class TestAcme(unittest.TestCase): @patch("acme.client.Client") @patch("OpenSSL.crypto", return_value="mock_cert") @patch("josepy.util.ComparableX509") - @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge") + @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges") @patch("lemur.plugins.lemur_acme.plugin.current_app") def test_request_certificate( - self, - mock_current_app, - mock_find_dns_challenge, - mock_jose, - mock_crypto, - mock_acme, + self, + mock_current_app, + mock_get_dns_challenges, + mock_jose, + mock_crypto, + mock_acme, ): mock_cert_response = Mock() mock_cert_response.body = "123" @@ -172,7 +181,7 @@ class TestAcme(unittest.TestCase): assert result_client assert result_registration - @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch('lemur.plugins.lemur_acme.plugin.current_app') def test_get_domains_single(self, mock_current_app): options = {"common_name": "test.netflix.net"} result = self.acme.get_domains(options) @@ -256,11 +265,11 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.cloudflare.current_app") @patch("lemur.plugins.lemur_acme.plugin.dns_provider_service") def test_get_dns_provider( - self, - mock_dns_provider_service, - mock_current_app_cloudflare, - mock_current_app_dyn, - mock_current_app, + self, + mock_dns_provider_service, + mock_current_app_cloudflare, + mock_current_app_dyn, + mock_current_app, ): provider = plugin.ACMEIssuerPlugin() route53 = provider.get_dns_provider("route53") @@ -278,14 +287,14 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") def test_get_ordered_certificate( - self, - mock_request_certificate, - mock_finalize_authorizations, - mock_get_authorizations, - mock_dns_provider_service, - mock_authorization_service, - mock_current_app, - mock_acme, + self, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_dns_provider_service, + mock_authorization_service, + mock_current_app, + mock_acme, ): mock_client = Mock() mock_acme.return_value = (mock_client, "") @@ -309,14 +318,14 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") def test_get_ordered_certificates( - self, - mock_request_certificate, - mock_finalize_authorizations, - mock_get_authorizations, - mock_dns_provider_service, - mock_authorization_service, - mock_current_app, - mock_acme, + self, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_dns_provider_service, + mock_authorization_service, + mock_current_app, + mock_acme, ): mock_client = Mock() mock_acme.return_value = (mock_client, "") @@ -349,14 +358,14 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") @patch("lemur.plugins.lemur_acme.plugin.authorization_service") def test_create_certificate( - self, - mock_authorization_service, - mock_request_certificate, - mock_finalize_authorizations, - mock_get_authorizations, - mock_current_app, - mock_dns_provider_service, - mock_acme, + self, + mock_authorization_service, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_current_app, + mock_dns_provider_service, + mock_acme, ): provider = plugin.ACMEIssuerPlugin() mock_authority = Mock() @@ -423,10 +432,10 @@ class TestAcme(unittest.TestCase): ultradns._post = Mock() ultradns._get = Mock() ultradns._get.return_value = {'zoneName': 'test.example.com.com', - 'rrSets': [{'ownerName': '_acme-challenge.test.example.com.', - 'rrtype': 'TXT (16)', 'ttl': 5, 'rdata': ['ABCDEFGHIJ']}], - 'queryInfo': {'sort': 'OWNER', 'reverse': False, 'limit': 100}, - 'resultInfo': {'totalCount': 1, 'offset': 0, 'returnedCount': 1}} + 'rrSets': [{'ownerName': '_acme-challenge.test.example.com.', + 'rrtype': 'TXT (16)', 'ttl': 5, 'rdata': ['ABCDEFGHIJ']}], + 'queryInfo': {'sort': 'OWNER', 'reverse': False, 'limit': 100}, + 'resultInfo': {'totalCount': 1, 'offset': 0, 'returnedCount': 1}} ultradns._delete = Mock() mock_metrics.send = Mock() ultradns.delete_txt_record(change_id, account_number, domain, token) diff --git a/lemur/plugins/lemur_acme/tests/test_powerdns.py b/lemur/plugins/lemur_acme/tests/test_powerdns.py index c8b0a11e..714cc938 100644 --- a/lemur/plugins/lemur_acme/tests/test_powerdns.py +++ b/lemur/plugins/lemur_acme/tests/test_powerdns.py @@ -1,5 +1,5 @@ import unittest -from mock import Mock, patch +from unittest.mock import patch, Mock from lemur.plugins.lemur_acme import plugin, powerdns @@ -48,13 +48,14 @@ class TestPowerdns(unittest.TestCase): self.assertEqual(result, zone) @patch("lemur.plugins.lemur_acme.powerdns.current_app") - def test_create_txt_record(self, mock_current_app): + def test_create_txt_record_write_only(self, mock_current_app): domain = "_acme_challenge.test.example.com" zone = "test.example.com" token = "ABCDEFGHIJ" account_number = "1234567890" change_id = (domain, token) powerdns._check_conf = Mock() + powerdns._get_txt_records = Mock(return_value=[]) powerdns._get_zone_name = Mock(return_value=zone) mock_current_app.logger.debug = Mock() mock_current_app.config.get = Mock(return_value="localhost") @@ -63,24 +64,74 @@ class TestPowerdns(unittest.TestCase): "function": "create_txt_record", "fqdn": domain, "token": token, - "message": "TXT record successfully created" + "message": "TXT record(s) successfully created" } result = powerdns.create_txt_record(domain, token, account_number) mock_current_app.logger.debug.assert_called_with(log_data) self.assertEqual(result, change_id) + @patch("lemur.plugins.lemur_acme.powerdns.current_app") + def test_create_txt_record_append(self, mock_current_app): + domain = "_acme_challenge.test.example.com" + zone = "test.example.com" + token = "ABCDEFGHIJ" + account_number = "1234567890" + change_id = (domain, token) + powerdns._check_conf = Mock() + cur_token = "123456" + cur_records = [powerdns.Record({'name': domain, 'content': f"\"{cur_token}\"", 'disabled': False})] + powerdns._get_txt_records = Mock(return_value=cur_records) + powerdns._get_zone_name = Mock(return_value=zone) + mock_current_app.logger.debug = Mock() + mock_current_app.config.get = Mock(return_value="localhost") + powerdns._patch = Mock() + log_data = { + "function": "create_txt_record", + "fqdn": domain, + "token": token, + "message": "TXT record(s) successfully created" + } + expected_path = f"/api/v1/servers/localhost/zones/test.example.com." + expected_payload = { + "rrsets": [ + { + "name": domain + ".", + "type": "TXT", + "ttl": 300, + "changetype": "REPLACE", + "records": [ + { + "content": f"\"{token}\"", + "disabled": False + }, + { + "content": f"\"{cur_token}\"", + "disabled": False + } + ], + "comments": [] + } + ] + } + + result = powerdns.create_txt_record(domain, token, account_number) + mock_current_app.logger.debug.assert_called_with(log_data) + powerdns._patch.assert_called_with(expected_path, expected_payload) + self.assertEqual(result, change_id) + @patch("lemur.plugins.lemur_acme.powerdns.dnsutil") @patch("lemur.plugins.lemur_acme.powerdns.current_app") @patch("lemur.extensions.metrics") @patch("time.sleep") def test_wait_for_dns_change(self, mock_sleep, mock_metrics, mock_current_app, mock_dnsutil): domain = "_acme-challenge.test.example.com" - token = "ABCDEFG" + token1 = "ABCDEFG" + token2 = "HIJKLMN" zone_name = "test.example.com" nameserver = "1.1.1.1" - change_id = (domain, token) + change_id = (domain, token1) powerdns._check_conf = Mock() - mock_records = (token,) + mock_records = (token2, token1) mock_current_app.config.get = Mock(return_value=1) powerdns._get_zone_name = Mock(return_value=zone_name) mock_dnsutil.get_authoritative_nameserver = Mock(return_value=nameserver) @@ -114,7 +165,7 @@ class TestPowerdns(unittest.TestCase): "function": "delete_txt_record", "fqdn": domain, "token": token, - "message": "TXT record successfully deleted" + "message": "Unable to delete TXT record: Token not found in existing TXT records" } powerdns.delete_txt_record(change_id, account_number, domain, token) mock_current_app.logger.debug.assert_called_with(log_data) diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index 13590ddd..8d80e020 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -24,6 +24,12 @@ def retry_throttled(exception): if exception.response["Error"]["Code"] == "NoSuchEntity": return False + # No need to retry deletion requests if there is a DeleteConflict error. + # This error indicates that the certificate is still attached to an entity + # and cannot be deleted. + if exception.response["Error"]["Code"] == "DeleteConflict": + return False + metrics.send("iam_retry", "counter", 1, metric_tags={"exception": str(exception)}) return True diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 6669f641..8692348a 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -216,22 +216,24 @@ class AWSSourcePlugin(SourcePlugin): for region in regions: elbs = elb.get_all_elbs(account_number=account_number, region=region) - current_app.logger.info( - "Describing classic load balancers in {0}-{1}".format( - account_number, region - ) - ) + current_app.logger.info({ + "message": "Describing classic load balancers", + "account_number": account_number, + "region": region, + "number_of_load_balancers": len(elbs) + }) for e in elbs: endpoints.extend(get_elb_endpoints(account_number, region, e)) # fetch advanced ELBs elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region) - current_app.logger.info( - "Describing advanced load balancers in {0}-{1}".format( - account_number, region - ) - ) + current_app.logger.info({ + "message": "Describing advanced load balancers", + "account_number": account_number, + "region": region, + "number_of_load_balancers": len(elbs_v2) + }) for e in elbs_v2: endpoints.extend(get_elb_endpoints_v2(account_number, region, e)) @@ -325,14 +327,17 @@ class AWSDestinationPlugin(DestinationPlugin): ] def upload(self, name, body, private_key, cert_chain, options, **kwargs): - iam.upload_cert( - name, - body, - private_key, - self.get_option("path", options), - cert_chain=cert_chain, - account_number=self.get_option("accountNumber", options), - ) + try: + iam.upload_cert( + name, + body, + private_key, + self.get_option("path", options), + cert_chain=cert_chain, + account_number=self.get_option("accountNumber", options), + ) + except ClientError: + sentry.captureException() def deploy(self, elb_name, account, region, certificate): pass diff --git a/lemur/plugins/lemur_digicert/plugin.py b/lemur/plugins/lemur_digicert/plugin.py index 88ea5b6b..e5c4b2ce 100644 --- a/lemur/plugins/lemur_digicert/plugin.py +++ b/lemur/plugins/lemur_digicert/plugin.py @@ -14,21 +14,17 @@ .. moduleauthor:: Kevin Glisson """ import json + import arrow -import requests - import pem -from retrying import retry - -from flask import current_app - +import requests from cryptography import x509 - -from lemur.extensions import metrics +from flask import current_app from lemur.common.utils import validate_conf -from lemur.plugins.bases import IssuerPlugin, SourcePlugin - +from lemur.extensions import metrics from lemur.plugins import lemur_digicert as digicert +from lemur.plugins.bases import IssuerPlugin, SourcePlugin +from retrying import retry def log_status_code(r, *args, **kwargs): @@ -64,24 +60,37 @@ def signature_hash(signing_algorithm): raise Exception("Unsupported signing algorithm.") -def determine_validity_years(end_date): +def determine_validity_years(years): """Given an end date determine how many years into the future that date is. + :param years: + :return: validity in years + """ + default_years = current_app.config.get("DIGICERT_DEFAULT_VALIDITY", 1) + max_years = current_app.config.get("DIGICERT_MAX_VALIDITY", default_years) + + if years > max_years: + return max_years + if years not in [1, 2, 3]: + return default_years + return years + + +def determine_end_date(end_date): + """ + Determine appropriate end date :param end_date: - :return: str validity in years + :return: validity_end """ - now = arrow.utcnow() + default_years = current_app.config.get("DIGICERT_DEFAULT_VALIDITY", 1) + max_validity_end = arrow.utcnow().shift(years=current_app.config.get("DIGICERT_MAX_VALIDITY", default_years)) - if end_date < now.shift(years=+1): - return 1 - elif end_date < now.shift(years=+2): - return 2 - elif end_date < now.shift(years=+3): - return 3 + if not end_date: + end_date = arrow.utcnow().shift(years=default_years) - raise Exception( - "DigiCert issued certificates cannot exceed three" " years in validity" - ) + if end_date > max_validity_end: + end_date = max_validity_end + return end_date def get_additional_names(options): @@ -107,12 +116,6 @@ def map_fields(options, csr): :param csr: :return: dict or valid DigiCert options """ - if not options.get("validity_years"): - if not options.get("validity_end"): - options["validity_years"] = current_app.config.get( - "DIGICERT_DEFAULT_VALIDITY", 1 - ) - data = dict( certificate={ "common_name": options["common_name"], @@ -125,9 +128,11 @@ def map_fields(options, csr): data["certificate"]["dns_names"] = get_additional_names(options) if options.get("validity_years"): - data["validity_years"] = options["validity_years"] + data["validity_years"] = determine_validity_years(options.get("validity_years")) + elif options.get("validity_end"): + data["custom_expiration_date"] = determine_end_date(options.get("validity_end")).format("YYYY-MM-DD") else: - data["custom_expiration_date"] = options["validity_end"].format("YYYY-MM-DD") + data["validity_years"] = determine_validity_years(0) if current_app.config.get("DIGICERT_PRIVATE", False): if "product" in data: @@ -144,18 +149,15 @@ def map_cis_fields(options, csr): :param options: :param csr: - :return: + :return: data """ - if not options.get("validity_years"): - if not options.get("validity_end"): - options["validity_end"] = arrow.utcnow().shift( - years=current_app.config.get("DIGICERT_DEFAULT_VALIDITY", 1) - ) - options["validity_years"] = determine_validity_years(options["validity_end"]) + + if options.get("validity_years"): + validity_end = determine_end_date(arrow.utcnow().shift(years=options["validity_years"])) + elif options.get("validity_end"): + validity_end = determine_end_date(options.get("validity_end")) else: - options["validity_end"] = arrow.utcnow().shift( - years=options["validity_years"] - ) + validity_end = determine_end_date(False) data = { "profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAMES", {}).get(options['authority'].name), @@ -164,7 +166,7 @@ def map_cis_fields(options, csr): "csr": csr, "signature_hash": signature_hash(options.get("signing_algorithm")), "validity": { - "valid_to": options["validity_end"].format("YYYY-MM-DDTHH:MM") + "Z" + "valid_to": validity_end.format("YYYY-MM-DDTHH:MM") + "Z" }, "organization": { "name": options["organization"], @@ -173,7 +175,8 @@ def map_cis_fields(options, csr): } # possibility to default to a SIGNING_ALGORITHM for a given profile if current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get(options['authority'].name): - data["signature_hash"] = current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get(options['authority'].name) + data["signature_hash"] = current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get( + options['authority'].name) return data diff --git a/lemur/plugins/lemur_digicert/tests/test_digicert.py b/lemur/plugins/lemur_digicert/tests/test_digicert.py index 77b0a1fa..8bfd1dcf 100644 --- a/lemur/plugins/lemur_digicert/tests/test_digicert.py +++ b/lemur/plugins/lemur_digicert/tests/test_digicert.py @@ -1,117 +1,125 @@ -import pytest -import arrow import json -from unittest.mock import patch +from unittest.mock import patch, Mock +import arrow +import pytest +from cryptography import x509 from freezegun import freeze_time - +from lemur.plugins.lemur_digicert import plugin from lemur.tests.vectors import CSR_STR -from cryptography import x509 - -def test_map_fields_with_validity_end_and_start(app): - from lemur.plugins.lemur_digicert.plugin import map_fields - - names = [u"one.example.com", u"two.example.com", u"three.example.com"] - - options = { - "common_name": "example.com", - "owner": "bob@example.com", - "description": "test certificate", - "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, - "validity_end": arrow.get(2017, 5, 7), - "validity_start": arrow.get(2016, 10, 30), - } - - data = map_fields(options, CSR_STR) - - assert data == { - "certificate": { - "csr": CSR_STR, - "common_name": "example.com", - "dns_names": names, - "signature_hash": "sha256", - }, - "organization": {"id": 111111}, - "custom_expiration_date": arrow.get(2017, 5, 7).format("YYYY-MM-DD"), +def config_mock(*args): + values = { + "DIGICERT_ORG_ID": 111111, + "DIGICERT_PRIVATE": False, + "DIGICERT_DEFAULT_SIGNING_ALGORITHM": "sha256", + "DIGICERT_DEFAULT_VALIDITY": 1, + "DIGICERT_MAX_VALIDITY": 2, + "DIGICERT_CIS_PROFILE_NAMES": {"digicert": 'digicert'}, + "DIGICERT_CIS_SIGNING_ALGORITHMS": {"digicert": 'digicert'}, } + return values[args[0]] -def test_map_fields_with_validity_years(app): - from lemur.plugins.lemur_digicert.plugin import map_fields - - names = [u"one.example.com", u"two.example.com", u"three.example.com"] - - options = { - "common_name": "example.com", - "owner": "bob@example.com", - "description": "test certificate", - "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, - "validity_years": 2, - "validity_end": arrow.get(2017, 10, 30), - } - - data = map_fields(options, CSR_STR) - - assert data == { - "certificate": { - "csr": CSR_STR, - "common_name": "example.com", - "dns_names": names, - "signature_hash": "sha256", - }, - "organization": {"id": 111111}, - "validity_years": 2, - } +@patch("lemur.plugins.lemur_digicert.plugin.current_app") +def test_determine_validity_years(mock_current_app): + mock_current_app.config.get = Mock(return_value=2) + assert plugin.determine_validity_years(1) == 1 + assert plugin.determine_validity_years(0) == 2 + assert plugin.determine_validity_years(3) == 2 -def test_map_cis_fields(app, authority): - from lemur.plugins.lemur_digicert.plugin import map_cis_fields - - names = [u"one.example.com", u"two.example.com", u"three.example.com"] - - options = { - "common_name": "example.com", - "owner": "bob@example.com", - "description": "test certificate", - "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, - "organization": "Example, Inc.", - "organizational_unit": "Example Org", - "validity_end": arrow.get(2017, 5, 7), - "validity_start": arrow.get(2016, 10, 30), - "authority": authority, - } - - data = map_cis_fields(options, CSR_STR) - - assert data == { - "common_name": "example.com", - "csr": CSR_STR, - "additional_dns_names": names, - "signature_hash": "sha256", - "organization": {"name": "Example, Inc.", "units": ["Example Org"]}, - "validity": { - "valid_to": arrow.get(2017, 5, 7).format("YYYY-MM-DDTHH:MM") + "Z" - }, - "profile_name": None, - } - - options = { - "common_name": "example.com", - "owner": "bob@example.com", - "description": "test certificate", - "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, - "organization": "Example, Inc.", - "organizational_unit": "Example Org", - "validity_years": 2, - "authority": authority, - } - +@patch("lemur.plugins.lemur_digicert.plugin.current_app") +def test_determine_end_date(mock_current_app): + mock_current_app.config.get = Mock(return_value=2) with freeze_time(time_to_freeze=arrow.get(2016, 11, 3).datetime): - data = map_cis_fields(options, CSR_STR) + assert arrow.get(2018, 11, 3) == plugin.determine_end_date(0) + assert arrow.get(2018, 5, 7) == plugin.determine_end_date(arrow.get(2018, 5, 7)) + assert arrow.get(2018, 11, 3) == plugin.determine_end_date(arrow.get(2020, 5, 7)) - assert data == { + +@patch("lemur.plugins.lemur_digicert.plugin.current_app") +def test_map_fields_with_validity_years(mock_current_app): + mock_current_app.config.get = Mock(side_effect=config_mock) + + with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash: + mock_signature_hash.return_value = "sha256" + + names = [u"one.example.com", u"two.example.com", u"three.example.com"] + options = { + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "validity_years": 2 + } + expected = { + "certificate": { + "csr": CSR_STR, + "common_name": "example.com", + "dns_names": names, + "signature_hash": "sha256", + }, + "organization": {"id": 111111}, + "validity_years": 2, + } + assert expected == plugin.map_fields(options, CSR_STR) + + +@patch("lemur.plugins.lemur_digicert.plugin.current_app") +def test_map_fields_with_validity_end_and_start(mock_current_app): + mock_current_app.config.get = Mock(side_effect=config_mock) + plugin.determine_end_date = Mock(return_value=arrow.get(2017, 5, 7)) + + with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash: + mock_signature_hash.return_value = "sha256" + + names = [u"one.example.com", u"two.example.com", u"three.example.com"] + options = { + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "validity_end": arrow.get(2017, 5, 7), + "validity_start": arrow.get(2016, 10, 30), + } + + expected = { + "certificate": { + "csr": CSR_STR, + "common_name": "example.com", + "dns_names": names, + "signature_hash": "sha256", + }, + "organization": {"id": 111111}, + "custom_expiration_date": arrow.get(2017, 5, 7).format("YYYY-MM-DD"), + } + + assert expected == plugin.map_fields(options, CSR_STR) + + +@patch("lemur.plugins.lemur_digicert.plugin.current_app") +def test_map_cis_fields_with_validity_years(mock_current_app, authority): + mock_current_app.config.get = Mock(side_effect=config_mock) + plugin.determine_end_date = Mock(return_value=arrow.get(2018, 11, 3)) + + with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash: + mock_signature_hash.return_value = "sha256" + + names = [u"one.example.com", u"two.example.com", u"three.example.com"] + options = { + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "organization": "Example, Inc.", + "organizational_unit": "Example Org", + "validity_years": 2, + "authority": authority, + } + + expected = { "common_name": "example.com", "csr": CSR_STR, "additional_dns_names": names, @@ -123,21 +131,59 @@ def test_map_cis_fields(app, authority): "profile_name": None, } + assert expected == plugin.map_cis_fields(options, CSR_STR) -def test_signature_hash(app): - from lemur.plugins.lemur_digicert.plugin import signature_hash - assert signature_hash(None) == "sha256" - assert signature_hash("sha256WithRSA") == "sha256" - assert signature_hash("sha384WithRSA") == "sha384" - assert signature_hash("sha512WithRSA") == "sha512" +@patch("lemur.plugins.lemur_digicert.plugin.current_app") +def test_map_cis_fields_with_validity_end_and_start(mock_current_app, app, authority): + mock_current_app.config.get = Mock(side_effect=config_mock) + plugin.determine_end_date = Mock(return_value=arrow.get(2017, 5, 7)) + + with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash: + mock_signature_hash.return_value = "sha256" + + names = [u"one.example.com", u"two.example.com", u"three.example.com"] + options = { + "common_name": "example.com", + "owner": "bob@example.com", + "description": "test certificate", + "extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}}, + "organization": "Example, Inc.", + "organizational_unit": "Example Org", + "validity_end": arrow.get(2017, 5, 7), + "validity_start": arrow.get(2016, 10, 30), + "authority": authority + } + + expected = { + "common_name": "example.com", + "csr": CSR_STR, + "additional_dns_names": names, + "signature_hash": "sha256", + "organization": {"name": "Example, Inc.", "units": ["Example Org"]}, + "validity": { + "valid_to": arrow.get(2017, 5, 7).format("YYYY-MM-DDTHH:MM") + "Z" + }, + "profile_name": None, + } + + assert expected == plugin.map_cis_fields(options, CSR_STR) + + +@patch("lemur.plugins.lemur_digicert.plugin.current_app") +def test_signature_hash(mock_current_app, app): + mock_current_app.config.get = Mock(side_effect=config_mock) + assert plugin.signature_hash(None) == "sha256" + assert plugin.signature_hash("sha256WithRSA") == "sha256" + assert plugin.signature_hash("sha384WithRSA") == "sha384" + assert plugin.signature_hash("sha512WithRSA") == "sha512" with pytest.raises(Exception): - signature_hash("sdfdsf") + plugin.signature_hash("sdfdsf") def test_issuer_plugin_create_certificate( - certificate_="""\ + certificate_="""\ -----BEGIN CERTIFICATE----- abc -----END CERTIFICATE----- diff --git a/lemur/plugins/lemur_kubernetes/plugin.py b/lemur/plugins/lemur_kubernetes/plugin.py index 62ffffda..f7ff00f7 100644 --- a/lemur/plugins/lemur_kubernetes/plugin.py +++ b/lemur/plugins/lemur_kubernetes/plugin.py @@ -96,7 +96,7 @@ def build_secret(secret_format, secret_name, body, private_key, cert_chain): if secret_format == "TLS": secret["type"] = "kubernetes.io/tls" secret["data"] = { - "tls.crt": base64encode(cert_chain), + "tls.crt": base64encode(body), "tls.key": base64encode(private_key), } if secret_format == "Certificate": diff --git a/lemur/plugins/lemur_verisign/plugin.py b/lemur/plugins/lemur_verisign/plugin.py index 7bf517b7..f913861c 100644 --- a/lemur/plugins/lemur_verisign/plugin.py +++ b/lemur/plugins/lemur_verisign/plugin.py @@ -98,10 +98,14 @@ def process_options(options): :param options: :return: dict or valid verisign options """ + # if there is a config variable with VERISIGN_PRODUCT_ take the value as Cert product-type + # else default to "Server", to be compatoible with former versions + authority = options.get("authority").name.upper() + product_type = current_app.config.get("VERISIGN_PRODUCT_{0}".format(authority), "Server") data = { "challenge": get_psuedo_random_string(), "serverType": "Apache", - "certProductType": "Server", + "certProductType": product_type, "firstName": current_app.config.get("VERISIGN_FIRST_NAME"), "lastName": current_app.config.get("VERISIGN_LAST_NAME"), "signatureAlgorithm": "sha256WithRSAEncryption", @@ -111,11 +115,6 @@ def process_options(options): data["subject_alt_names"] = ",".join(get_additional_names(options)) - if options.get("validity_end") > arrow.utcnow().shift(years=2): - raise Exception( - "Verisign issued certificates cannot exceed two years in validity" - ) - if options.get("validity_end"): # VeriSign (Symantec) only accepts strictly smaller than 2 year end date if options.get("validity_end") < arrow.utcnow().shift(years=2, days=-1): @@ -210,7 +209,7 @@ class VerisignIssuerPlugin(IssuerPlugin): response = self.session.post(url, data=data) try: - cert = handle_response(response.content)["Response"]["Certificate"] + response_dict = handle_response(response.content) except KeyError: metrics.send( "verisign_create_certificate_error", @@ -222,8 +221,13 @@ class VerisignIssuerPlugin(IssuerPlugin): extra={"common_name": issuer_options.get("common_name", "")} ) raise Exception(f"Error with Verisign: {response.content}") - # TODO add external id - return cert, current_app.config.get("VERISIGN_INTERMEDIATE"), None + authority = issuer_options.get("authority").name.upper() + cert = response_dict['Response']['Certificate'] + external_id = None + if 'Transaction_ID' in response_dict['Response'].keys(): + external_id = response_dict['Response']['Transaction_ID'] + chain = current_app.config.get("VERISIGN_INTERMEDIATE_{0}".format(authority), current_app.config.get("VERISIGN_INTERMEDIATE")) + return cert, chain, external_id @staticmethod def create_authority(options): diff --git a/lemur/sources/cli.py b/lemur/sources/cli.py index c41a1cf7..c415b567 100644 --- a/lemur/sources/cli.py +++ b/lemur/sources/cli.py @@ -54,6 +54,24 @@ def validate_sources(source_strings): return sources +def execute_clean(plugin, certificate, source): + try: + plugin.clean(certificate, source.options) + certificate.sources.remove(source) + + # If we want to remove the source from the certificate, we also need to clear any equivalent destinations to + # prevent Lemur from re-uploading the certificate. + for destination in certificate.destinations: + if destination.label == source.label: + certificate.destinations.remove(destination) + + certificate_service.database.update(certificate) + return SUCCESS_METRIC_STATUS + except Exception as e: + current_app.logger.exception(e) + sentry.captureException() + + @manager.option( "-s", "--sources", @@ -132,11 +150,9 @@ def clean(source_strings, commit): s = plugins.get(source.plugin_name) if not hasattr(s, "clean"): - print( - "Cannot clean source: {0}, source plugin does not implement 'clean()'".format( - source.label - ) - ) + info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'" + current_app.logger.warning(info_text) + print(info_text) continue start_time = time.time() @@ -144,35 +160,147 @@ def clean(source_strings, commit): print("[+] Staring to clean source: {label}!\n".format(label=source.label)) cleaned = 0 - for certificate in certificate_service.get_all_pending_cleaning(source): + certificates = certificate_service.get_all_pending_cleaning_expired(source) + for certificate in certificates: status = FAILURE_METRIC_STATUS if commit: - try: - s.clean(certificate, source.options) - certificate.sources.remove(source) - certificate_service.database.update(certificate) - status = SUCCESS_METRIC_STATUS - except Exception as e: - current_app.logger.exception(e) - sentry.captureException() + status = execute_clean(s, certificate, source) metrics.send( - "clean", + "certificate_clean", "counter", 1, - metric_tags={"source": source.label, "status": status}, + metric_tags={"status": status, "source": source.label, "certificate": certificate.name}, ) - - current_app.logger.warning( - "Removed {0} from source {1} during cleaning".format( - certificate.name, source.label - ) - ) - + current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning") cleaned += 1 - print( - "[+] Finished cleaning source: {label}. Removed {cleaned} certificates from source. Run Time: {time}\n".format( - label=source.label, time=(time.time() - start_time), cleaned=cleaned + info_text = f"[+] Finished cleaning source: {source.label}. " \ + f"Removed {cleaned} certificates from source. " \ + f"Run Time: {(time.time() - start_time)}\n" + print(info_text) + current_app.logger.warning(info_text) + + +@manager.option( + "-s", + "--sources", + dest="source_strings", + action="append", + help="Sources to operate on.", +) +@manager.option( + "-d", + "--days", + dest="days_to_expire", + type=int, + action="store", + required=True, + help="The expiry range within days.", +) +@manager.option( + "-c", + "--commit", + dest="commit", + action="store_true", + default=False, + help="Persist changes.", +) +def clean_unused_and_expiring_within_days(source_strings, days_to_expire, commit): + sources = validate_sources(source_strings) + for source in sources: + s = plugins.get(source.plugin_name) + + if not hasattr(s, "clean"): + info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'" + current_app.logger.warning(info_text) + print(info_text) + continue + + start_time = time.time() + + print("[+] Staring to clean source: {label}!\n".format(label=source.label)) + + cleaned = 0 + certificates = certificate_service.get_all_pending_cleaning_expiring_in_days(source, days_to_expire) + for certificate in certificates: + status = FAILURE_METRIC_STATUS + if commit: + status = execute_clean(s, certificate, source) + + metrics.send( + "certificate_clean", + "counter", + 1, + metric_tags={"status": status, "source": source.label, "certificate": certificate.name}, ) - ) + current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning") + cleaned += 1 + + info_text = f"[+] Finished cleaning source: {source.label}. " \ + f"Removed {cleaned} certificates from source. " \ + f"Run Time: {(time.time() - start_time)}\n" + print(info_text) + current_app.logger.warning(info_text) + + +@manager.option( + "-s", + "--sources", + dest="source_strings", + action="append", + help="Sources to operate on.", +) +@manager.option( + "-d", + "--days", + dest="days_since_issuance", + type=int, + action="store", + required=True, + help="Days since issuance.", +) +@manager.option( + "-c", + "--commit", + dest="commit", + action="store_true", + default=False, + help="Persist changes.", +) +def clean_unused_and_issued_since_days(source_strings, days_since_issuance, commit): + sources = validate_sources(source_strings) + for source in sources: + s = plugins.get(source.plugin_name) + + if not hasattr(s, "clean"): + info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'" + current_app.logger.warning(info_text) + print(info_text) + continue + + start_time = time.time() + + print("[+] Staring to clean source: {label}!\n".format(label=source.label)) + + cleaned = 0 + certificates = certificate_service.get_all_pending_cleaning_issued_since_days(source, days_since_issuance) + for certificate in certificates: + status = FAILURE_METRIC_STATUS + if commit: + status = execute_clean(s, certificate, source) + + metrics.send( + "certificate_clean", + "counter", + 1, + metric_tags={"status": status, "source": source.label, "certificate": certificate.name}, + ) + current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning") + cleaned += 1 + + info_text = f"[+] Finished cleaning source: {source.label}. " \ + f"Removed {cleaned} certificates from source. " \ + f"Run Time: {(time.time() - start_time)}\n" + print(info_text) + current_app.logger.warning(info_text) diff --git a/lemur/sources/service.py b/lemur/sources/service.py index f4783313..fafa6f5a 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -123,15 +123,19 @@ def sync_endpoints(source): "acct": s.get_option("accountNumber", source.options)}) if not endpoint["certificate"]: - current_app.logger.error( - "Certificate Not Found. Name: {0} Endpoint: {1}".format( - certificate_name, endpoint["name"] - ) - ) + current_app.logger.error({ + "message": "Certificate Not Found", + "certificate_name": certificate_name, + "endpoint_name": endpoint["name"], + "dns_name": endpoint.get("dnsname"), + "account": s.get_option("accountNumber", source.options), + }) + metrics.send("endpoint.certificate.not.found", "counter", 1, metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], - "acct": s.get_option("accountNumber", source.options)}) + "acct": s.get_option("accountNumber", source.options), + "dnsname": endpoint.get("dnsname")}) continue policy = endpoint.pop("policy") @@ -193,6 +197,11 @@ def sync_certificates(source, user): s = plugins.get(source.plugin_name) certificates = s.get_certificates(source.options) + # emitting the count of certificates on the source + metrics.send("sync_certificates_count", + "gauge", len(certificates), + metric_tags={"source": source.label}) + for certificate in certificates: exists, updated_by_hash = find_cert(certificate) diff --git a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html index 7ac2107f..027add0f 100644 --- a/lemur/static/app/angular/certificates/certificate/tracking.tpl.html +++ b/lemur/static/app/angular/certificates/certificate/tracking.tpl.html @@ -140,7 +140,6 @@ diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index adafa605..41584cb3 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -9,7 +9,8 @@ from cryptography import x509 from cryptography.hazmat.backends import default_backend from marshmallow import ValidationError from freezegun import freeze_time -from mock import patch +# from mock import patch +from unittest.mock import patch from lemur.certificates.service import create_csr from lemur.certificates.views import * # noqa @@ -906,12 +907,12 @@ def test_certificate_get_body(client): assert response_body["serial"] == "211983098819107449768450703123665283596" assert response_body["serialHex"] == "9F7A75B39DAE4C3F9524C68B06DA6A0C" assert response_body["distinguishedName"] == ( - "CN=LemurTrust Unittests Class 1 CA 2018," - "O=LemurTrust Enterprises Ltd," - "OU=Unittesting Operations Center," - "C=EE," + "L=Earth," "ST=N/A," - "L=Earth" + "C=EE," + "OU=Unittesting Operations Center," + "O=LemurTrust Enterprises Ltd," + "CN=LemurTrust Unittests Class 1 CA 2018" ) diff --git a/package.json b/package.json index 9b899176..1a54eccc 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "url": "git://github.com/netflix/lemur.git" }, "dependencies": { - "bower": "^1.8.2", + "bower": "^1.8.8", "browser-sync": "^2.26.7", "del": "^2.2.2", "gulp-autoprefixer": "^3.1.1", @@ -17,10 +17,10 @@ "gulp-flatten": "^0.3.1", "gulp-foreach": "0.1.0", "gulp-if": "^2.0.2", - "gulp-imagemin": "^3.1.1", + "gulp-imagemin": "^7.1.0", "gulp-inject": "~4.1.0", "gulp-jshint": "^2.0.4", - "gulp-less": "^3.0.3", + "gulp-less": "^4.0.1", "gulp-load-plugins": "^1.4.0", "gulp-minify-css": "^1.2.4", "gulp-minify-html": "~1.0.6", @@ -29,7 +29,7 @@ "gulp-notify": "^2.2.0", "gulp-plumber": "^1.1.0", "gulp-print": "^2.0.1", - "gulp-protractor": "3.0.0", + "gulp-protractor": "^4.1.1", "gulp-replace": "~0.5.3", "gulp-replace-task": "~0.11.0", "gulp-rev": "^7.1.2", @@ -41,7 +41,7 @@ "gulp-util": "^3.0.1", "http-proxy": "~1.16.2", "jshint-stylish": "^2.2.1", - "karma": "~1.3.0", + "karma": "^4.4.1", "karma-jasmine": "^1.1.0", "main-bower-files": "^2.13.1", "merge-stream": "^1.0.1", @@ -60,7 +60,7 @@ }, "devDependencies": { "gulp": "^3.9.1", - "jshint": "^2.8.0", + "jshint": "^2.11.0", "karma-chrome-launcher": "^2.0.0" } } diff --git a/requirements-dev.txt b/requirements-dev.txt index d1423888..785d3f29 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,38 +4,42 @@ # # pip-compile --no-index --output-file=requirements-dev.txt requirements-dev.in # -aspy.yaml==1.3.0 # via pre-commit -bleach==3.1.0 # via readme-renderer -certifi==2019.11.28 # via requests -cfgv==2.0.1 # via pre-commit +appdirs==1.4.3 # via virtualenv +bleach==3.1.4 # via readme-renderer +certifi==2020.4.5.1 # via requests +cffi==1.14.0 # via cryptography +cfgv==3.1.0 # via pre-commit chardet==3.0.4 # via requests -docutils==0.15.2 # via readme-renderer -flake8==3.5.0 -identify==1.4.9 # via pre-commit -idna==2.8 # via requests -importlib-metadata==1.3.0 # via keyring, pre-commit, twine -invoke==1.3.0 -keyring==21.0.0 # via twine +cryptography==2.9.2 # via secretstorage +distlib==0.3.0 # via virtualenv +docutils==0.16 # via readme-renderer +filelock==3.0.12 # via virtualenv +flake8==3.5.0 # via -r requirements-dev.in +identify==1.4.14 # via pre-commit +idna==2.9 # via requests +invoke==1.4.1 # via -r requirements-dev.in +jeepney==0.4.3 # via keyring, secretstorage +keyring==21.2.0 # via twine mccabe==0.6.1 # via flake8 -more-itertools==8.0.2 # via zipp -nodeenv==1.3.3 +nodeenv==1.3.5 # via -r requirements-dev.in, pre-commit pkginfo==1.5.0.1 # via twine -pre-commit==1.21.0 +pre-commit==2.4.0 # via -r requirements-dev.in pycodestyle==2.3.1 # via flake8 +pycparser==2.20 # via cffi pyflakes==1.6.0 # via flake8 -pygments==2.5.2 # via readme-renderer -pyyaml==5.2 -readme-renderer==24.0 # via twine +pygments==2.6.1 # via readme-renderer +pyyaml==5.3.1 # via -r requirements-dev.in, pre-commit +readme-renderer==25.0 # via twine requests-toolbelt==0.9.1 # via twine -requests==2.22.0 # via requests-toolbelt, twine -six==1.13.0 # via bleach, cfgv, pre-commit, readme-renderer +requests==2.23.0 # via requests-toolbelt, twine +secretstorage==3.1.2 # via keyring +six==1.14.0 # via bleach, cryptography, readme-renderer, virtualenv toml==0.10.0 # via pre-commit -tqdm==4.41.1 # via twine -twine==3.1.1 -urllib3==1.25.7 # via requests -virtualenv==16.7.9 # via pre-commit +tqdm==4.45.0 # via twine +twine==3.1.1 # via -r requirements-dev.in +urllib3==1.25.8 # via requests +virtualenv==20.0.17 # via pre-commit webencodings==0.5.1 # via bleach -zipp==0.6.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements-docs.txt b/requirements-docs.txt index 893965ca..16d97413 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,111 +4,108 @@ # # pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # -acme==1.0.0 +acme==1.4.0 # via -r requirements.txt alabaster==0.7.12 # via sphinx -alembic-autogenerate-enums==0.0.2 -alembic==1.3.2 -amqp==2.5.2 -aniso8601==8.0.0 -arrow==0.15.5 -asyncpool==1.0 +alembic-autogenerate-enums==0.0.2 # via -r requirements.txt +alembic==1.4.2 # via -r requirements.txt, flask-migrate +amqp==2.5.2 # via -r requirements.txt, kombu +aniso8601==8.0.0 # via -r requirements.txt, flask-restful +arrow==0.15.6 # via -r requirements.txt +asyncpool==1.0 # via -r requirements.txt babel==2.8.0 # via sphinx -bcrypt==3.1.7 -billiard==3.6.1.0 -blinker==1.4 -boto3==1.10.46 -botocore==1.13.46 -celery[redis]==4.4.0 -certifi==2019.11.28 -certsrv==2.1.1 -cffi==1.13.2 -chardet==3.0.4 -click==7.0 -cloudflare==2.3.1 -cryptography==2.8 -dnspython3==1.15.0 -dnspython==1.15.0 -docutils==0.15.2 -dyn==1.8.1 -flask-bcrypt==0.7.1 -flask-cors==3.0.8 -flask-mail==0.9.1 -flask-migrate==2.5.2 -flask-principal==0.4.0 -flask-replicated==1.3 -flask-restful==0.3.7 -flask-script==2.0.6 -flask-sqlalchemy==2.4.1 -flask==1.1.1 -future==0.18.2 -gunicorn==20.0.4 -hvac==0.9.6 -idna==2.8 +bcrypt==3.1.7 # via -r requirements.txt, flask-bcrypt, paramiko +billiard==3.6.3.0 # via -r requirements.txt, celery +blinker==1.4 # via -r requirements.txt, flask-mail, flask-principal, raven +boto3==1.13.11 # via -r requirements.txt +botocore==1.16.11 # via -r requirements.txt, boto3, s3transfer +celery[redis]==4.4.2 # via -r requirements.txt +certifi==2020.4.5.1 # via -r requirements.txt, requests +certsrv==2.1.1 # via -r requirements.txt +cffi==1.14.0 # via -r requirements.txt, bcrypt, cryptography, pynacl +chardet==3.0.4 # via -r requirements.txt, requests +click==7.1.1 # via -r requirements.txt, flask +cloudflare==2.7.1 # via -r requirements.txt +cryptography==2.9.2 # via -r requirements.txt, acme, josepy, paramiko, pyopenssl, requests +dnspython3==1.15.0 # via -r requirements.txt +dnspython==1.15.0 # via -r requirements.txt, dnspython3 +docutils==0.15.2 # via -r requirements.txt, botocore, sphinx +dyn==1.8.1 # via -r requirements.txt +flask-bcrypt==0.7.1 # via -r requirements.txt +flask-cors==3.0.8 # via -r requirements.txt +flask-mail==0.9.1 # via -r requirements.txt +flask-migrate==2.5.3 # via -r requirements.txt +flask-principal==0.4.0 # via -r requirements.txt +flask-replicated==1.3 # via -r requirements.txt +flask-restful==0.3.8 # via -r requirements.txt +flask-script==2.0.6 # via -r requirements.txt +flask-sqlalchemy==2.4.1 # via -r requirements.txt, flask-migrate +flask==1.1.2 # via -r requirements.txt, flask-bcrypt, flask-cors, flask-mail, flask-migrate, flask-principal, flask-restful, flask-script, flask-sqlalchemy, raven +future==0.18.2 # via -r requirements.txt, cloudflare +gunicorn==20.0.4 # via -r requirements.txt +hvac==0.10.1 # via -r requirements.txt +idna==2.9 # via -r requirements.txt, requests imagesize==1.2.0 # via sphinx -importlib-metadata==1.3.0 -inflection==0.3.1 -itsdangerous==1.1.0 -javaobj-py3==0.4.0.1 -jinja2==2.10.3 -jmespath==0.9.4 -josepy==1.2.0 -jsonlines==1.2.0 -kombu==4.6.7 -lockfile==0.12.2 -logmatic-python==0.1.7 -mako==1.1.0 -markupsafe==1.1.1 -marshmallow-sqlalchemy==0.21.0 -marshmallow==2.20.4 -mock==3.0.5 -more-itertools==8.0.2 -ndg-httpsclient==0.5.1 -packaging==19.2 # via sphinx -paramiko==2.7.1 -pem==19.3.0 -psycopg2==2.8.4 -pyasn1-modules==0.2.7 -pyasn1==0.4.8 -pycparser==2.19 -pycryptodomex==3.9.4 -pygments==2.5.2 # via sphinx -pyjks==19.0.0 -pyjwt==1.7.1 -pynacl==1.3.0 -pyopenssl==19.1.0 -pyparsing==2.4.6 # via packaging -pyrfc3339==1.1 -python-dateutil==2.8.1 -python-editor==1.0.4 -python-json-logger==0.1.11 -pytz==2019.3 -pyyaml==5.2 -raven[flask]==6.10.0 -redis==3.3.11 -requests-toolbelt==0.9.1 -requests[security]==2.22.0 -retrying==1.3.3 -s3transfer==0.2.1 -six==1.13.0 +inflection==0.4.0 # via -r requirements.txt +itsdangerous==1.1.0 # via -r requirements.txt, flask +javaobj-py3==0.4.0.1 # via -r requirements.txt, pyjks +jinja2==2.11.2 # via -r requirements.txt, flask, sphinx +jmespath==0.9.5 # via -r requirements.txt, boto3, botocore +josepy==1.3.0 # via -r requirements.txt, acme +jsonlines==1.2.0 # via -r requirements.txt, cloudflare +kombu==4.6.8 # via -r requirements.txt, celery +lockfile==0.12.2 # via -r requirements.txt +logmatic-python==0.1.7 # via -r requirements.txt +mako==1.1.2 # via -r requirements.txt, alembic +markupsafe==1.1.1 # via -r requirements.txt, jinja2, mako +marshmallow-sqlalchemy==0.23.0 # via -r requirements.txt +marshmallow==2.20.4 # via -r requirements.txt, marshmallow-sqlalchemy +ndg-httpsclient==0.5.1 # via -r requirements.txt +packaging==20.3 # via sphinx +paramiko==2.7.1 # via -r requirements.txt +pem==20.1.0 # via -r requirements.txt +psycopg2==2.8.5 # via -r requirements.txt +pyasn1-modules==0.2.8 # via -r requirements.txt, pyjks, python-ldap +pyasn1==0.4.8 # via -r requirements.txt, ndg-httpsclient, pyasn1-modules, pyjks, python-ldap +pycparser==2.20 # via -r requirements.txt, cffi +pycryptodomex==3.9.7 # via -r requirements.txt, pyjks +pygments==2.6.1 # via sphinx +pyjks==20.0.0 # via -r requirements.txt +pyjwt==1.7.1 # via -r requirements.txt +pynacl==1.3.0 # via -r requirements.txt, paramiko +pyopenssl==19.1.0 # via -r requirements.txt, acme, josepy, ndg-httpsclient, requests +pyparsing==2.4.7 # via packaging +pyrfc3339==1.1 # via -r requirements.txt, acme +python-dateutil==2.8.1 # via -r requirements.txt, alembic, arrow, botocore +python-editor==1.0.4 # via -r requirements.txt, alembic +python-json-logger==0.1.11 # via -r requirements.txt, logmatic-python +python-ldap==3.2.0 # via -r requirements.txt +pytz==2019.3 # via -r requirements.txt, acme, babel, celery, flask-restful, pyrfc3339 +pyyaml==5.3.1 # via -r requirements.txt, cloudflare +raven[flask]==6.10.0 # via -r requirements.txt +redis==3.5.2 # via -r requirements.txt, celery +requests-toolbelt==0.9.1 # via -r requirements.txt, acme +requests[security]==2.23.0 # via -r requirements.txt, acme, certsrv, cloudflare, hvac, requests-toolbelt, sphinx +retrying==1.3.3 # via -r requirements.txt +s3transfer==0.3.3 # via -r requirements.txt, boto3 +six==1.14.0 # via -r requirements.txt, acme, bcrypt, cryptography, flask-cors, flask-restful, hvac, josepy, jsonlines, packaging, pynacl, pyopenssl, python-dateutil, retrying, sphinxcontrib-httpdomain, sqlalchemy-utils snowballstemmer==2.0.0 # via sphinx -sphinx-rtd-theme==0.4.3 -sphinx==2.3.1 -sphinxcontrib-applehelp==1.0.1 # via sphinx -sphinxcontrib-devhelp==1.0.1 # via sphinx -sphinxcontrib-htmlhelp==1.0.2 # via sphinx -sphinxcontrib-httpdomain==1.7.0 +sphinx-rtd-theme==0.4.3 # via -r requirements-docs.in +sphinx==3.0.3 # via -r requirements-docs.in, sphinx-rtd-theme, sphinxcontrib-httpdomain +sphinxcontrib-applehelp==1.0.2 # via sphinx +sphinxcontrib-devhelp==1.0.2 # via sphinx +sphinxcontrib-htmlhelp==1.0.3 # via sphinx +sphinxcontrib-httpdomain==1.7.0 # via -r requirements-docs.in sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.2 # via sphinx -sphinxcontrib-serializinghtml==1.1.3 # via sphinx -sqlalchemy-utils==0.36.1 -sqlalchemy==1.3.12 -tabulate==0.8.6 -twofish==0.3.0 -urllib3==1.25.7 -vine==1.3.0 -werkzeug==0.16.0 -xmltodict==0.12.0 -zipp==0.6.0 +sphinxcontrib-qthelp==1.0.3 # via sphinx +sphinxcontrib-serializinghtml==1.1.4 # via sphinx +sqlalchemy-utils==0.36.5 # via -r requirements.txt +sqlalchemy==1.3.16 # via -r requirements.txt, alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +tabulate==0.8.7 # via -r requirements.txt +twofish==0.3.0 # via -r requirements.txt, pyjks +urllib3==1.25.8 # via -r requirements.txt, botocore, requests +vine==1.3.0 # via -r requirements.txt, amqp, celery +werkzeug==1.0.1 # via -r requirements.txt, flask +xmltodict==0.12.0 # via -r requirements.txt # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements-tests.txt b/requirements-tests.txt index 293bd350..79340e51 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -6,85 +6,87 @@ # appdirs==1.4.3 # via black attrs==19.3.0 # via black, jsonschema, pytest -aws-sam-translator==1.19.1 # via cfn-lint -aws-xray-sdk==2.4.3 # via moto -bandit==1.6.2 -black==19.10b0 -boto3==1.10.46 # via aws-sam-translator, moto +aws-sam-translator==1.22.0 # via cfn-lint +aws-xray-sdk==2.5.0 # via moto +bandit==1.6.2 # via -r requirements-tests.in +black==19.10b0 # via -r requirements-tests.in +boto3==1.13.11 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.13.46 # via aws-xray-sdk, boto3, moto, s3transfer -certifi==2019.11.28 # via requests -cffi==1.13.2 # via cryptography -cfn-lint==0.26.2 # via moto +botocore==1.16.11 # via aws-xray-sdk, boto3, moto, s3transfer +certifi==2020.4.5.1 # via requests +cffi==1.14.0 # via cryptography +cfn-lint==0.29.5 # via moto chardet==3.0.4 # via requests -click==7.0 # via black, flask -coverage==5.0.1 -cryptography==2.8 # via moto, sshpubkeys -docker==4.1.0 # via moto +click==7.1.1 # via black, flask +coverage==5.1 # via -r requirements-tests.in +cryptography==2.9.2 # via moto, sshpubkeys +decorator==4.4.2 # via networkx +docker==4.2.0 # via moto docutils==0.15.2 # via botocore ecdsa==0.15 # via python-jose, sshpubkeys -factory-boy==2.12.0 -faker==3.0.0 -fakeredis==1.1.0 -flask==1.1.1 # via pytest-flask -freezegun==0.3.12 +factory-boy==2.12.0 # via -r requirements-tests.in +faker==4.1.0 # via -r requirements-tests.in, factory-boy +fakeredis==1.4.1 # via -r requirements-tests.in +flask==1.1.2 # via pytest-flask +freezegun==0.3.15 # via -r requirements-tests.in future==0.18.2 # via aws-xray-sdk -gitdb2==2.0.6 # via gitpython -gitpython==3.0.5 # via bandit +gitdb==4.0.4 # via gitpython +gitpython==3.1.1 # via bandit idna==2.8 # via moto, requests -importlib-metadata==1.3.0 # via jsonschema, pluggy, pytest +importlib-metadata==1.6.0 # via jsonpickle itsdangerous==1.1.0 # via flask -jinja2==2.10.3 # via flask, moto -jmespath==0.9.4 # via boto3, botocore +jinja2==2.11.2 # via flask, moto +jmespath==0.9.5 # via boto3, botocore jsondiff==1.1.2 # via moto -jsonpatch==1.24 # via cfn-lint -jsonpickle==1.2 # via aws-xray-sdk +jsonpatch==1.25 # via cfn-lint +jsonpickle==1.4 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch jsonschema==3.2.0 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 -mock==3.0.5 # via moto -more-itertools==8.0.2 # via pytest, zipp -moto==1.3.14 -nose==1.3.7 -packaging==19.2 # via pytest -pathspec==0.7.0 # via black -pbr==5.4.4 # via stevedore +mock==4.0.2 # via moto +more-itertools==8.2.0 # via pytest +moto==1.3.14 # via -r requirements-tests.in +networkx==2.4 # via cfn-lint +nose==1.3.7 # via -r requirements-tests.in +packaging==20.3 # via pytest +pathspec==0.8.0 # via black +pbr==5.4.5 # via stevedore pluggy==0.13.1 # via pytest py==1.8.1 # via pytest pyasn1==0.4.8 # via python-jose, rsa -pycparser==2.19 # via cffi -pyflakes==2.1.1 -pyparsing==2.4.6 # via packaging -pyrsistent==0.15.6 # via jsonschema -pytest-flask==0.15.0 -pytest-mock==1.13.0 -pytest==5.3.2 +pycparser==2.20 # via cffi +pyflakes==2.2.0 # via -r requirements-tests.in +pyparsing==2.4.7 # via packaging +pyrsistent==0.16.0 # via jsonschema +pytest-flask==1.0.0 # via -r requirements-tests.in +pytest-mock==3.1.0 # via -r requirements-tests.in +pytest==5.4.2 # via -r requirements-tests.in, pytest-flask, pytest-mock python-dateutil==2.8.1 # via botocore, faker, freezegun, moto python-jose==3.1.0 # via moto pytz==2019.3 # via moto -pyyaml==5.2 -redis==3.3.11 # via fakeredis -regex==2019.12.20 # via black -requests-mock==1.7.0 -requests==2.22.0 # via docker, moto, requests-mock, responses -responses==0.10.9 # via moto +pyyaml==5.3.1 # via -r requirements-tests.in, bandit, cfn-lint, moto +redis==3.5.2 # via fakeredis +regex==2020.4.4 # via black +requests-mock==1.8.0 # via -r requirements-tests.in +requests==2.23.0 # via docker, moto, requests-mock, responses +responses==0.10.12 # via moto rsa==4.0 # via python-jose -s3transfer==0.2.1 # via boto3 -six==1.13.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, ecdsa, faker, fakeredis, freezegun, jsonschema, mock, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client -smmap2==2.0.5 # via gitdb2 +s3transfer==0.3.3 # via boto3 +six==1.14.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, ecdsa, fakeredis, freezegun, jsonschema, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +smmap==3.0.2 # via gitdb sortedcontainers==2.1.0 # via fakeredis sshpubkeys==3.1.0 # via moto -stevedore==1.31.0 # via bandit +stevedore==1.32.0 # via bandit text-unidecode==1.3 # via faker toml==0.10.0 # via black -typed-ast==1.4.0 # via black -urllib3==1.25.7 # via botocore, requests -wcwidth==0.1.8 # via pytest +typed-ast==1.4.1 # via black +urllib3==1.25.8 # via botocore, requests +wcwidth==0.1.9 # via pytest websocket-client==0.57.0 # via docker -werkzeug==0.16.0 # via flask, moto, pytest-flask -wrapt==1.11.2 # via aws-xray-sdk +werkzeug==1.0.1 # via flask, moto, pytest-flask +wrapt==1.12.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto -zipp==0.6.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements.txt b/requirements.txt index 639c9377..315f39b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,96 +4,92 @@ # # pip-compile --no-index --output-file=requirements.txt requirements.in # -acme==1.0.0 -alembic-autogenerate-enums==0.0.2 -alembic==1.3.2 # via flask-migrate +acme==1.4.0 # via -r requirements.in +alembic-autogenerate-enums==0.0.2 # via -r requirements.in +alembic==1.4.2 # via flask-migrate amqp==2.5.2 # via kombu aniso8601==8.0.0 # via flask-restful -arrow==0.15.5 -asyncpool==1.0 +arrow==0.15.6 # via -r requirements.in +asyncpool==1.0 # via -r requirements.in bcrypt==3.1.7 # via flask-bcrypt, paramiko -billiard==3.6.1.0 # via celery +billiard==3.6.3.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.10.46 -botocore==1.13.46 -celery[redis]==4.4.0 -certifi==2019.11.28 -certsrv==2.1.1 -cffi==1.13.2 # via bcrypt, cryptography, pynacl +boto3==1.13.11 # via -r requirements.in +botocore==1.16.11 # via -r requirements.in, boto3, s3transfer +celery[redis]==4.4.2 # via -r requirements.in +certifi==2020.4.5.1 # via -r requirements.in, requests +certsrv==2.1.1 # via -r requirements.in +cffi==1.14.0 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests -click==7.0 # via flask -cloudflare==2.3.1 -cryptography==2.8 -dnspython3==1.15.0 +click==7.1.1 # via flask +cloudflare==2.7.1 # via -r requirements.in +cryptography==2.9.2 # via -r requirements.in, acme, josepy, paramiko, pyopenssl, requests +dnspython3==1.15.0 # via -r requirements.in dnspython==1.15.0 # via dnspython3 docutils==0.15.2 # via botocore -dyn==1.8.1 -flask-bcrypt==0.7.1 -flask-cors==3.0.8 -flask-mail==0.9.1 -flask-migrate==2.5.2 -flask-principal==0.4.0 -flask-replicated==1.3 -flask-restful==0.3.7 -flask-script==2.0.6 -flask-sqlalchemy==2.4.1 -flask==1.1.1 -future==0.18.2 -gunicorn==20.0.4 -hvac==0.9.6 -idna==2.8 # via requests -importlib-metadata==1.3.0 # via kombu -inflection==0.3.1 +dyn==1.8.1 # via -r requirements.in +flask-bcrypt==0.7.1 # via -r requirements.in +flask-cors==3.0.8 # via -r requirements.in +flask-mail==0.9.1 # via -r requirements.in +flask-migrate==2.5.3 # via -r requirements.in +flask-principal==0.4.0 # via -r requirements.in +flask-replicated==1.3 # via -r requirements.in +flask-restful==0.3.8 # via -r requirements.in +flask-script==2.0.6 # via -r requirements.in +flask-sqlalchemy==2.4.1 # via -r requirements.in, flask-migrate +flask==1.1.2 # via -r requirements.in, flask-bcrypt, flask-cors, flask-mail, flask-migrate, flask-principal, flask-restful, flask-script, flask-sqlalchemy, raven +future==0.18.2 # via -r requirements.in, cloudflare +gunicorn==20.0.4 # via -r requirements.in +hvac==0.10.1 # via -r requirements.in +idna==2.9 # via requests +inflection==0.4.0 # via -r requirements.in itsdangerous==1.1.0 # via flask javaobj-py3==0.4.0.1 # via pyjks -jinja2==2.10.3 -jmespath==0.9.4 # via boto3, botocore -josepy==1.2.0 # via acme +jinja2==2.11.2 # via -r requirements.in, flask +jmespath==0.9.5 # via boto3, botocore +josepy==1.3.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.6.7 # via celery -lockfile==0.12.2 -logmatic-python==0.1.7 -mako==1.1.0 # via alembic +kombu==4.6.8 # via celery +lockfile==0.12.2 # via -r requirements.in +logmatic-python==0.1.7 # via -r requirements.in +mako==1.1.2 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.21.0 -marshmallow==2.20.4 -mock==3.0.5 # via acme -more-itertools==8.0.2 # via zipp -ndg-httpsclient==0.5.1 -paramiko==2.7.1 -pem==19.3.0 -psycopg2==2.8.4 -pyasn1-modules==0.2.7 # via pyjks, python-ldap +marshmallow-sqlalchemy==0.23.0 # via -r requirements.in +marshmallow==2.20.4 # via -r requirements.in, marshmallow-sqlalchemy +ndg-httpsclient==0.5.1 # via -r requirements.in +paramiko==2.7.1 # via -r requirements.in +pem==20.1.0 # via -r requirements.in +psycopg2==2.8.5 # via -r requirements.in +pyasn1-modules==0.2.8 # via pyjks, python-ldap pyasn1==0.4.8 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap -pycparser==2.19 # via cffi -pycryptodomex==3.9.4 # via pyjks -pyjks==19.0.0 -pyjwt==1.7.1 +pycparser==2.20 # via cffi +pycryptodomex==3.9.7 # via pyjks +pyjks==20.0.0 # via -r requirements.in +pyjwt==1.7.1 # via -r requirements.in pynacl==1.3.0 # via paramiko -pyopenssl==19.1.0 +pyopenssl==19.1.0 # via -r requirements.in, acme, josepy, ndg-httpsclient, requests pyrfc3339==1.1 # via acme python-dateutil==2.8.1 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-json-logger==0.1.11 # via logmatic-python -python-ldap==3.2.0 +python-ldap==3.2.0 # via -r requirements.in pytz==2019.3 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.2 -raven[flask]==6.10.0 -redis==3.3.11 +pyyaml==5.3.1 # via -r requirements.in, cloudflare +raven[flask]==6.10.0 # via -r requirements.in +redis==3.5.2 # via -r requirements.in, celery requests-toolbelt==0.9.1 # via acme -requests[security]==2.22.0 -retrying==1.3.3 -s3transfer==0.2.1 # via boto3 -six==1.13.0 -sqlalchemy-utils==0.36.1 -sqlalchemy==1.3.12 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils -tabulate==0.8.6 +requests[security]==2.23.0 # via -r requirements.in, acme, certsrv, cloudflare, hvac, requests-toolbelt +retrying==1.3.3 # via -r requirements.in +s3transfer==0.3.3 # via boto3 +six==1.14.0 # via -r requirements.in, acme, bcrypt, cryptography, flask-cors, flask-restful, hvac, josepy, jsonlines, pynacl, pyopenssl, python-dateutil, retrying, sqlalchemy-utils +sqlalchemy-utils==0.36.5 # via -r requirements.in +sqlalchemy==1.3.16 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +tabulate==0.8.7 # via -r requirements.in twofish==0.3.0 # via pyjks -urllib3==1.25.7 # via botocore, requests +urllib3==1.25.8 # via botocore, requests vine==1.3.0 # via amqp, celery -werkzeug==0.16.0 # via flask -xmltodict==0.12.0 -zipp==0.6.0 # via importlib-metadata +werkzeug==1.0.1 # via flask +xmltodict==0.12.0 # via -r requirements.in # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/setup.py b/setup.py index fa5a23bc..4ce03d70 100644 --- a/setup.py +++ b/setup.py @@ -45,16 +45,20 @@ with open(os.path.join(ROOT, 'lemur', '__about__.py')) as f: exec(f.read(), about) # nosec: about file is benign install_requires_g = parse_requirements("requirements.txt", session=PipSession()) -install_requires = [str(ir.req) for ir in install_requires_g] - tests_require_g = parse_requirements("requirements-tests.txt", session=PipSession()) -tests_require = [str(ir.req) for ir in tests_require_g] - docs_require_g = parse_requirements("requirements-docs.txt", session=PipSession()) -docs_require = [str(ir.req) for ir in docs_require_g] - dev_requires_g = parse_requirements("requirements-dev.txt", session=PipSession()) -dev_requires = [str(ir.req) for ir in dev_requires_g] + +if tuple(map(int, pip.__version__.split('.'))) >= (20, 1): + install_requires = [str(ir.requirement) for ir in install_requires_g] + tests_require = [str(ir.requirement) for ir in tests_require_g] + docs_require = [str(ir.requirement) for ir in docs_require_g] + dev_requires = [str(ir.requirement) for ir in dev_requires_g] +else: + install_requires = [str(ir.req) for ir in install_requires_g] + tests_require = [str(ir.req) for ir in tests_require_g] + docs_require = [str(ir.req) for ir in docs_require_g] + dev_requires = [str(ir.req) for ir in dev_requires_g] class SmartInstall(install):