diff --git a/bower.json b/bower.json index f7d5500d..44cce994 100644 --- a/bower.json +++ b/bower.json @@ -11,12 +11,12 @@ "angular": "1.4.9", "json3": "~3.3", "es5-shim": "~4.5.0", - "bootstrap": "~3.3.6", "angular-bootstrap": "~1.1.1", "angular-animate": "~1.4.9", "restangular": "~1.5.1", "ng-table": "~0.8.3", "moment": "~2.11.1", + "bootstrap": "~3.4.1", "angular-loading-bar": "~0.8.0", "angular-moment": "~0.10.3", "moment-range": "~2.1.0", @@ -24,7 +24,7 @@ "angularjs-toaster": "~1.0.0", "angular-chart.js": "~0.8.8", "ngletteravatar": "~4.0.0", - "bootswatch": "~3.3.6", + "bootswatch": "~3.4.1", "fontawesome": "~4.5.0", "satellizer": "~0.13.4", "angular-ui-router": "~0.2.15", diff --git a/docs/production/index.rst b/docs/production/index.rst index cd044ca4..b91ed6bd 100644 --- a/docs/production/index.rst +++ b/docs/production/index.rst @@ -390,6 +390,10 @@ Here are the Celery configuration variables that should be set:: CELERY_IMPORTS = ('lemur.common.celery') CELERY_TIMEZONE = 'UTC' +Do not forget to import crontab module in your configuration file:: + + from celery.task.schedules import crontab + You must start a single Celery scheduler instance and one or more worker instances in order to handle incoming tasks. The scheduler can be started with:: diff --git a/lemur/auth/views.py b/lemur/auth/views.py index e7f87356..eaed419d 100644 --- a/lemur/auth/views.py +++ b/lemur/auth/views.py @@ -127,6 +127,10 @@ def retrieve_user(user_api_url, access_token): # retrieve information about the current user. r = requests.get(user_api_url, params=user_params, headers=headers) + # Some IDPs, like "Keycloak", require a POST instead of a GET + if r.status_code == 400: + r = requests.post(user_api_url, data=user_params, headers=headers) + profile = r.json() user = user_service.get_by_email(profile["email"]) @@ -434,7 +438,7 @@ class OAuth2(Resource): verify_cert=verify_cert, ) - jwks_url = current_app.config.get("PING_JWKS_URL") + jwks_url = current_app.config.get("OAUTH2_JWKS_URL") error_code = validate_id_token(id_token, args["clientId"], jwks_url) if error_code: return error_code diff --git a/lemur/certificates/cli.py b/lemur/certificates/cli.py index b57ff175..d007e458 100644 --- a/lemur/certificates/cli.py +++ b/lemur/certificates/cli.py @@ -5,29 +5,19 @@ :license: Apache, see LICENSE for more details. .. moduleauthor:: Kevin Glisson """ -import sys import multiprocessing -from tabulate import tabulate -from sqlalchemy import or_ - +import sys from flask import current_app - -from flask_script import Manager from flask_principal import Identity, identity_changed - +from flask_script import Manager +from sqlalchemy import or_ +from tabulate import tabulate from lemur import database -from lemur.extensions import sentry -from lemur.extensions import metrics -from lemur.plugins.base import plugins -from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS -from lemur.deployment import service as deployment_service -from lemur.endpoints import service as endpoint_service -from lemur.notifications.messaging import send_rotation_notification -from lemur.domains.models import Domain from lemur.authorities.models import Authority -from lemur.certificates.schemas import CertificateOutputSchema +from lemur.authorities.service import get as authorities_get_by_id from lemur.certificates.models import Certificate +from lemur.certificates.schemas import CertificateOutputSchema from lemur.certificates.service import ( reissue_certificate, get_certificate_primitives, @@ -35,9 +25,16 @@ from lemur.certificates.service import ( get_by_name, get_all_certs, get, + get_all_certs_attached_to_endpoint_without_autorotate, ) - from lemur.certificates.verify import verify_string +from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS +from lemur.deployment import service as deployment_service +from lemur.domains.models import Domain +from lemur.endpoints import service as endpoint_service +from lemur.extensions import sentry, metrics +from lemur.notifications.messaging import send_rotation_notification +from lemur.plugins.base import plugins manager = Manager(usage="Handles all certificate related tasks.") @@ -482,3 +479,45 @@ def check_revoked(): cert.status = "unknown" database.update(cert) + + +@manager.command +def automatically_enable_autorotate(): + """ + This function automatically enables auto-rotation for unexpired certificates that are + attached to an endpoint but do not have autorotate enabled. + + WARNING: This will overwrite the Auto-rotate toggle! + """ + log_data = { + "function": f"{__name__}.{sys._getframe().f_code.co_name}", + "message": "Enabling auto-rotate for certificate" + } + + permitted_authorities = current_app.config.get("ENABLE_AUTO_ROTATE_AUTHORITY", []) + + eligible_certs = get_all_certs_attached_to_endpoint_without_autorotate() + for cert in eligible_certs: + + if cert.authority_id not in permitted_authorities: + continue + + log_data["certificate"] = cert.name + log_data["certificate_id"] = cert.id + log_data["authority_id"] = cert.authority_id + log_data["authority_name"] = authorities_get_by_id(cert.authority_id).name + if cert.destinations: + log_data["destination_names"] = ', '.join([d.label for d in cert.destinations]) + else: + log_data["destination_names"] = "NONE" + current_app.logger.info(log_data) + metrics.send("automatically_enable_autorotate", + "counter", 1, + metric_tags={"certificate": log_data["certificate"], + "certificate_id": log_data["certificate_id"], + "authority_id": log_data["authority_id"], + "authority_name": log_data["authority_name"], + "destination_names": log_data["destination_names"] + }) + cert.rotation = True + database.update(cert) diff --git a/lemur/certificates/models.py b/lemur/certificates/models.py index 2ca88b00..58630ee6 100644 --- a/lemur/certificates/models.py +++ b/lemur/certificates/models.py @@ -321,7 +321,8 @@ class Certificate(db.Model): @hybrid_property def expired(self): - if self.not_after <= arrow.utcnow(): + # can't compare offset-naive and offset-aware datetimes + if arrow.Arrow.fromdatetime(self.not_after) <= arrow.utcnow(): return True @expired.expression diff --git a/lemur/certificates/service.py b/lemur/certificates/service.py index a6bbba30..5d1e6e63 100644 --- a/lemur/certificates/service.py +++ b/lemur/certificates/service.py @@ -118,6 +118,21 @@ def get_all_pending_cleaning_expired(source): ) +def get_all_certs_attached_to_endpoint_without_autorotate(): + """ + Retrieves all certificates that are attached to an endpoint, but that do not have autorotate enabled. + + :return: list of certificates attached to an endpoint without autorotate + """ + return ( + Certificate.query.filter(Certificate.endpoints.any()) + .filter(Certificate.rotation == False) + .filter(Certificate.not_after >= arrow.now()) + .filter(not_(Certificate.replaced.any())) + .all() # noqa + ) + + def get_all_pending_cleaning_expiring_in_days(source, days_to_expire): """ Retrieves all certificates that are available for cleaning, not attached to endpoint, @@ -144,7 +159,9 @@ def get_all_pending_cleaning_issued_since_days(source, days_since_issuance): :param source: the source to search for certificates :return: list of pending certificates """ - not_in_use_window = arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD") + not_in_use_window = ( + arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD") + ) return ( Certificate.query.filter(Certificate.sources.any(id=source.id)) .filter(not_(Certificate.endpoints.any())) @@ -367,9 +384,11 @@ def render(args): show_expired = args.pop("showExpired") if show_expired != 1: - one_month_old = arrow.now()\ - .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))\ + one_month_old = ( + arrow.now() + .shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1)) .format("YYYY-MM-DD") + ) query = query.filter(Certificate.not_after > one_month_old) time_range = args.pop("time_range") diff --git a/lemur/common/celery.py b/lemur/common/celery.py index 4af33d86..5df470ab 100644 --- a/lemur/common/celery.py +++ b/lemur/common/celery.py @@ -10,27 +10,27 @@ command: celery -A lemur.common.celery worker --loglevel=info -l DEBUG -B import copy import sys import time -from datetime import datetime, timezone, timedelta - from celery import Celery +from celery.app.task import Context from celery.exceptions import SoftTimeLimitExceeded +from celery.signals import task_failure, task_received, task_revoked, task_success +from datetime import datetime, timezone, timedelta from flask import current_app from lemur.authorities.service import get as get_authority +from lemur.certificates import cli as cli_certificate from lemur.common.redis import RedisHandler from lemur.destinations import service as destinations_service +from lemur.dns_providers import cli as cli_dns_providers +from lemur.endpoints import cli as cli_endpoints from lemur.extensions import metrics, sentry from lemur.factory import create_app +from lemur.notifications import cli as cli_notification from lemur.notifications.messaging import send_pending_failure_notification from lemur.pending_certificates import service as pending_certificate_service from lemur.plugins.base import plugins from lemur.sources.cli import clean, sync, validate_sources from lemur.sources.service import add_aws_destination_to_sources -from lemur.certificates import cli as cli_certificate -from lemur.dns_providers import cli as cli_dns_providers -from lemur.notifications import cli as cli_notification -from lemur.endpoints import cli as cli_endpoints - if current_app: flask_app = current_app @@ -67,7 +67,7 @@ def is_task_active(fun, task_id, args): from celery.task.control import inspect if not args: - args = '()' # empty args + args = "()" # empty args i = inspect() active_tasks = i.active() @@ -80,6 +80,37 @@ def is_task_active(fun, task_id, args): return False +def get_celery_request_tags(**kwargs): + request = kwargs.get("request") + sender_hostname = "unknown" + sender = kwargs.get("sender") + if sender: + try: + sender_hostname = sender.hostname + except AttributeError: + sender_hostname = vars(sender.request).get("origin", "unknown") + if request and not isinstance( + request, Context + ): # unlike others, task_revoked sends a Context for `request` + task_name = request.name + task_id = request.id + receiver_hostname = request.hostname + else: + task_name = sender.name + task_id = sender.request.id + receiver_hostname = sender.request.hostname + + tags = { + "task_name": task_name, + "task_id": task_id, + "sender_hostname": sender_hostname, + "receiver_hostname": receiver_hostname, + } + if kwargs.get("exception"): + tags["error"] = repr(kwargs["exception"]) + return tags + + @celery.task() def report_celery_last_success_metrics(): """ @@ -89,7 +120,6 @@ def report_celery_last_success_metrics(): report_celery_last_success_metrics should be ran periodically to emit metrics on when a task was last successful. Admins can then alert when tasks are not ran when intended. Admins should also alert when no metrics are emitted from this function. - """ function = f"{__name__}.{sys._getframe().f_code.co_name}" task_id = None @@ -108,15 +138,91 @@ def report_celery_last_success_metrics(): return current_time = int(time.time()) - schedule = current_app.config.get('CELERYBEAT_SCHEDULE') + schedule = current_app.config.get("CELERYBEAT_SCHEDULE") for _, t in schedule.items(): task = t.get("task") last_success = int(red.get(f"{task}.last_success") or 0) - metrics.send(f"{task}.time_since_last_success", 'gauge', current_time - last_success) + metrics.send( + f"{task}.time_since_last_success", "gauge", current_time - last_success + ) red.set( f"{function}.last_success", int(time.time()) ) # Alert if this metric is not seen - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + + +@task_received.connect +def report_number_pending_tasks(**kwargs): + """ + Report the number of pending tasks to our metrics broker every time a task is published. This metric can be used + for autoscaling workers. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-received + """ + with flask_app.app_context(): + metrics.send( + "celery.new_pending_task", + "TIMER", + 1, + metric_tags=get_celery_request_tags(**kwargs), + ) + + +@task_success.connect +def report_successful_task(**kwargs): + """ + Report a generic success metric as tasks to our metrics broker every time a task finished correctly. + This metric can be used for autoscaling workers. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-success + """ + with flask_app.app_context(): + tags = get_celery_request_tags(**kwargs) + red.set(f"{tags['task_name']}.last_success", int(time.time())) + metrics.send("celery.successful_task", "TIMER", 1, metric_tags=tags) + + +@task_failure.connect +def report_failed_task(**kwargs): + """ + Report a generic failure metric as tasks to our metrics broker every time a task fails. + This metric can be used for alerting. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-failure + """ + with flask_app.app_context(): + log_data = { + "function": f"{__name__}.{sys._getframe().f_code.co_name}", + "Message": "Celery Task Failure", + } + + # Add traceback if exception info is in the kwargs + einfo = kwargs.get("einfo") + if einfo: + log_data["traceback"] = einfo.traceback + + error_tags = get_celery_request_tags(**kwargs) + + log_data.update(error_tags) + current_app.logger.error(log_data) + metrics.send("celery.failed_task", "TIMER", 1, metric_tags=error_tags) + + +@task_revoked.connect +def report_revoked_task(**kwargs): + """ + Report a generic failure metric as tasks to our metrics broker every time a task is revoked. + This metric can be used for alerting. + https://docs.celeryproject.org/en/latest/userguide/signals.html#task-revoked + """ + with flask_app.app_context(): + log_data = { + "function": f"{__name__}.{sys._getframe().f_code.co_name}", + "Message": "Celery Task Revoked", + } + + error_tags = get_celery_request_tags(**kwargs) + + log_data.update(error_tags) + current_app.logger.error(log_data) + metrics.send("celery.revoked_task", "TIMER", 1, metric_tags=error_tags) @celery.task(soft_time_limit=600) @@ -217,15 +323,15 @@ def fetch_acme_cert(id): log_data["failed"] = failed log_data["wrong_issuer"] = wrong_issuer current_app.logger.debug(log_data) - metrics.send(f"{function}.resolved", 'gauge', new) - metrics.send(f"{function}.failed", 'gauge', failed) - metrics.send(f"{function}.wrong_issuer", 'gauge', wrong_issuer) + metrics.send(f"{function}.resolved", "gauge", new) + metrics.send(f"{function}.failed", "gauge", failed) + metrics.send(f"{function}.wrong_issuer", "gauge", wrong_issuer) print( "[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format( new=new, failed=failed, wrong_issuer=wrong_issuer ) ) - red.set(f'{function}.last_success', int(time.time())) + return log_data @celery.task() @@ -262,8 +368,8 @@ def fetch_all_pending_acme_certs(): current_app.logger.debug(log_data) fetch_acme_cert.delay(cert.id) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task() @@ -296,8 +402,8 @@ def remove_old_acme_certs(): current_app.logger.debug(log_data) pending_certificate_service.delete(cert) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task() @@ -328,11 +434,11 @@ def clean_all_sources(): current_app.logger.debug(log_data) clean_source.delay(source.label) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data -@celery.task(soft_time_limit=600) +@celery.task(soft_time_limit=3600) def clean_source(source): """ This celery task will clean the specified source. This is a destructive operation that will delete unused @@ -366,6 +472,7 @@ def clean_source(source): current_app.logger.error(log_data) sentry.captureException() metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) + return log_data @celery.task() @@ -395,8 +502,8 @@ def sync_all_sources(): current_app.logger.debug(log_data) sync_source.delay(source.label) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=7200) @@ -428,19 +535,23 @@ def sync_source(source): current_app.logger.debug(log_data) try: sync([source]) - metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) + metrics.send( + f"{function}.success", "counter", 1, metric_tags={"source": source} + ) except SoftTimeLimitExceeded: log_data["message"] = "Error syncing source: Time limit exceeded." current_app.logger.error(log_data) sentry.captureException() - metrics.send("sync_source_timeout", "counter", 1, metric_tags={"source": source}) + metrics.send( + "sync_source_timeout", "counter", 1, metric_tags={"source": source} + ) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return log_data["message"] = "Done syncing source" current_app.logger.debug(log_data) - metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) - red.set(f'{function}.last_success', int(time.time())) + metrics.send(f"{function}.success", "counter", 1, metric_tags={"source": source}) + return log_data @celery.task() @@ -477,8 +588,8 @@ def sync_source_destination(): log_data["message"] = "completed Syncing AWS destinations and sources" current_app.logger.debug(log_data) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -515,8 +626,8 @@ def certificate_reissue(): log_data["message"] = "reissuance completed" current_app.logger.debug(log_data) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -534,7 +645,6 @@ def certificate_rotate(): "function": function, "message": "rotating certificates", "task_id": task_id, - } if task_id and is_task_active(function, task_id, None): @@ -554,8 +664,8 @@ def certificate_rotate(): log_data["message"] = "rotation completed" current_app.logger.debug(log_data) - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -590,8 +700,8 @@ def endpoints_expire(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=600) @@ -626,8 +736,8 @@ def get_all_zones(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -662,8 +772,8 @@ def check_revoked(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data @celery.task(soft_time_limit=3600) @@ -690,7 +800,9 @@ def notify_expirations(): current_app.logger.debug(log_data) try: - cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) + cli_notification.expirations( + current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", []) + ) except SoftTimeLimitExceeded: log_data["message"] = "Notify expiring Time limit exceeded." current_app.logger.error(log_data) @@ -698,5 +810,29 @@ def notify_expirations(): metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) return - red.set(f'{function}.last_success', int(time.time())) - metrics.send(f"{function}.success", 'counter', 1) + metrics.send(f"{function}.success", "counter", 1) + return log_data + + +@celery.task(soft_time_limit=3600) +def enable_autorotate_for_certs_attached_to_endpoint(): + """ + This celery task automatically enables autorotation for unexpired certificates that are + attached to an endpoint but do not have autorotate enabled. + :return: + """ + function = f"{__name__}.{sys._getframe().f_code.co_name}" + task_id = None + if celery.current_task: + task_id = celery.current_task.request.id + + log_data = { + "function": function, + "task_id": task_id, + "message": "Enabling autorotate to eligible certificates", + } + current_app.logger.debug(log_data) + + cli_certificate.automatically_enable_autorotate() + metrics.send(f"{function}.success", "counter", 1) + return log_data diff --git a/lemur/common/defaults.py b/lemur/common/defaults.py index d563dbd0..b9c88e49 100644 --- a/lemur/common/defaults.py +++ b/lemur/common/defaults.py @@ -2,6 +2,7 @@ import re import unicodedata from cryptography import x509 +from cryptography.hazmat.primitives.serialization import Encoding from flask import current_app from lemur.common.utils import is_selfsigned @@ -71,12 +72,20 @@ def common_name(cert): :return: Common name or None """ try: - return cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[ - 0 - ].value.strip() + subject_oid = cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME) + if len(subject_oid) > 0: + return subject_oid[0].value.strip() + return None except Exception as e: sentry.captureException() - current_app.logger.error("Unable to get common name! {0}".format(e)) + current_app.logger.error( + { + "message": "Unable to get common name", + "error": e, + "public_key": cert.public_bytes(Encoding.PEM).decode("utf-8") + }, + exc_info=True + ) def organization(cert): diff --git a/lemur/plugins/lemur_acme/powerdns.py b/lemur/plugins/lemur_acme/powerdns.py index a26faaac..a5d02353 100644 --- a/lemur/plugins/lemur_acme/powerdns.py +++ b/lemur/plugins/lemur_acme/powerdns.py @@ -1,11 +1,10 @@ -import time -import requests import json import sys +import time import lemur.common.utils as utils import lemur.dns_providers.util as dnsutil - +import requests from flask import current_app from lemur.extensions import metrics, sentry @@ -17,7 +16,9 @@ REQUIRED_VARIABLES = [ class Zone: - """ This class implements a PowerDNS zone in JSON. """ + """ + This class implements a PowerDNS zone in JSON. + """ def __init__(self, _data): self._data = _data @@ -39,7 +40,9 @@ class Zone: class Record: - """ This class implements a PowerDNS record. """ + """ + This class implements a PowerDNS record. + """ def __init__(self, _data): self._data = _data @@ -49,20 +52,30 @@ class Record: return self._data["name"] @property - def disabled(self): - return self._data["disabled"] + def type(self): + return self._data["type"] + + @property + def ttl(self): + return self._data["ttl"] @property def content(self): return self._data["content"] @property - def ttl(self): - return self._data["ttl"] + def disabled(self): + return self._data["disabled"] def get_zones(account_number): - """Retrieve authoritative zones from the PowerDNS API and return a list""" + """ + Retrieve authoritative zones from the PowerDNS API and return a list of zones + + :param account_number: + :raise: Exception + :return: list of Zone Objects + """ _check_conf() server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") path = f"/api/v1/servers/{server_id}/zones" @@ -90,44 +103,41 @@ def get_zones(account_number): def create_txt_record(domain, token, account_number): - """ Create a TXT record for the given domain and token and return a change_id tuple """ + """ + Create a TXT record for the given domain and token and return a change_id tuple + + :param domain: FQDN + :param token: challenge value + :param account_number: + :return: tuple of domain/token + """ _check_conf() - zone_name = _get_zone_name(domain, account_number) - server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") - zone_id = zone_name + "." - domain_id = domain + "." - path = f"/api/v1/servers/{server_id}/zones/{zone_id}" - payload = { - "rrsets": [ - { - "name": domain_id, - "type": "TXT", - "ttl": 300, - "changetype": "REPLACE", - "records": [ - { - "content": f"\"{token}\"", - "disabled": False - } - ], - "comments": [] - } - ] - } + function = sys._getframe().f_code.co_name log_data = { "function": function, "fqdn": domain, "token": token, } + + # Create new record + domain_id = domain + "." + records = [Record({'name': domain_id, 'content': f"\"{token}\"", 'disabled': False})] + + # Get current records + cur_records = _get_txt_records(domain) + for record in cur_records: + if record.content != token: + records.append(record) + try: - _patch(path, payload) - log_data["message"] = "TXT record successfully created" + _patch_txt_records(domain, account_number, records) + log_data["message"] = "TXT record(s) successfully created" current_app.logger.debug(log_data) except Exception as e: sentry.captureException() log_data["Exception"] = e - log_data["message"] = "Unable to create TXT record" + log_data["message"] = "Unable to create TXT record(s)" current_app.logger.debug(log_data) change_id = (domain, token) @@ -136,8 +146,11 @@ def create_txt_record(domain, token, account_number): def wait_for_dns_change(change_id, account_number=None): """ - Checks the authoritative DNS Server to see if changes have propagated to DNS - Retries and waits until successful. + Checks the authoritative DNS Server to see if changes have propagated. + + :param change_id: tuple of domain/token + :param account_number: + :return: """ _check_conf() domain, token = change_id @@ -171,53 +184,115 @@ def wait_for_dns_change(change_id, account_number=None): def delete_txt_record(change_id, account_number, domain, token): - """ Delete the TXT record for the given domain and token """ + """ + Delete the TXT record for the given domain and token + + :param change_id: tuple of domain/token + :param account_number: + :param domain: FQDN + :param token: challenge to delete + :return: + """ _check_conf() - zone_name = _get_zone_name(domain, account_number) - server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") - zone_id = zone_name + "." - domain_id = domain + "." - path = f"/api/v1/servers/{server_id}/zones/{zone_id}" - payload = { - "rrsets": [ - { - "name": domain_id, - "type": "TXT", - "ttl": 300, - "changetype": "DELETE", - "records": [ - { - "content": f"\"{token}\"", - "disabled": False - } - ], - "comments": [] - } - ] - } + function = sys._getframe().f_code.co_name log_data = { "function": function, "fqdn": domain, - "token": token + "token": token, } - try: - _patch(path, payload) - log_data["message"] = "TXT record successfully deleted" - current_app.logger.debug(log_data) - except Exception as e: - sentry.captureException() - log_data["Exception"] = e - log_data["message"] = "Unable to delete TXT record" + + """ + Get existing TXT records matching the domain from DNS + The token to be deleted should already exist + There may be other records with different tokens as well + """ + cur_records = _get_txt_records(domain) + found = False + new_records = [] + for record in cur_records: + if record.content == f"\"{token}\"": + found = True + else: + new_records.append(record) + + # Since the matching token is not in DNS, there is nothing to delete + if not found: + log_data["message"] = "Unable to delete TXT record: Token not found in existing TXT records" current_app.logger.debug(log_data) + return + + # The record to delete has been found AND there are other tokens set on the same domain + # Since we only want to delete one token value from the RRSet, we need to use the Patch command to + # overwrite the current RRSet with the existing records. + elif new_records: + try: + _patch_txt_records(domain, account_number, new_records) + log_data["message"] = "TXT record successfully deleted" + current_app.logger.debug(log_data) + except Exception as e: + sentry.captureException() + log_data["Exception"] = e + log_data["message"] = "Unable to delete TXT record: patching exception" + current_app.logger.debug(log_data) + + # The record to delete has been found AND there are no other token values set on the same domain + # Use the Delete command to delete the whole RRSet. + else: + zone_name = _get_zone_name(domain, account_number) + server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") + zone_id = zone_name + "." + domain_id = domain + "." + path = f"/api/v1/servers/{server_id}/zones/{zone_id}" + payload = { + "rrsets": [ + { + "name": domain_id, + "type": "TXT", + "ttl": 300, + "changetype": "DELETE", + "records": [ + { + "content": f"\"{token}\"", + "disabled": False + } + ], + "comments": [] + } + ] + } + function = sys._getframe().f_code.co_name + log_data = { + "function": function, + "fqdn": domain, + "token": token + } + try: + _patch(path, payload) + log_data["message"] = "TXT record successfully deleted" + current_app.logger.debug(log_data) + except Exception as e: + sentry.captureException() + log_data["Exception"] = e + log_data["message"] = "Unable to delete TXT record" + current_app.logger.debug(log_data) def _check_conf(): + """ + Verifies required configuration variables are set + + :return: + """ utils.validate_conf(current_app, REQUIRED_VARIABLES) def _generate_header(): - """Generate a PowerDNS API header and return it as a dictionary""" + """ + Generate a PowerDNS API header and return it as a dictionary + + :return: Dict of header parameters + """ api_key_name = current_app.config.get("ACME_POWERDNS_APIKEYNAME") api_key = current_app.config.get("ACME_POWERDNS_APIKEY") headers = {api_key_name: api_key} @@ -225,7 +300,13 @@ def _generate_header(): def _get_zone_name(domain, account_number): - """Get most specific matching zone for the given domain and return as a String""" + """ + Get most specific matching zone for the given domain and return as a String + + :param domain: FQDN + :param account_number: + :return: FQDN of domain + """ zones = get_zones(account_number) zone_name = "" for z in zones: @@ -243,8 +324,47 @@ def _get_zone_name(domain, account_number): return zone_name +def _get_txt_records(domain): + """ + Retrieve TXT records for a given domain and return list of Record Objects + + :param domain: FQDN + :return: list of Record objects + """ + server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") + + path = f"/api/v1/servers/{server_id}/search-data?q={domain}&max=100&object_type=record" + function = sys._getframe().f_code.co_name + log_data = { + "function": function + } + try: + records = _get(path) + log_data["message"] = "Retrieved TXT Records Successfully" + current_app.logger.debug(log_data) + + except Exception as e: + sentry.captureException() + log_data["Exception"] = e + log_data["message"] = "Failed to Retrieve TXT Records" + current_app.logger.debug(log_data) + return [] + + txt_records = [] + for record in records: + cur_record = Record(record) + txt_records.append(cur_record) + return txt_records + + def _get(path, params=None): - """ Execute a GET request on the given URL (base_uri + path) and return response as JSON object """ + """ + Execute a GET request on the given URL (base_uri + path) and return response as JSON object + + :param path: Relative URL path + :param params: additional parameters + :return: json response + """ base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN") verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True) resp = requests.get( @@ -257,8 +377,54 @@ def _get(path, params=None): return resp.json() +def _patch_txt_records(domain, account_number, records): + """ + Send Patch request to PowerDNS Server + + :param domain: FQDN + :param account_number: + :param records: List of Record objects + :return: + """ + domain_id = domain + "." + + # Create records + txt_records = [] + for record in records: + txt_records.append( + {'content': record.content, 'disabled': record.disabled} + ) + + # Create RRSet + payload = { + "rrsets": [ + { + "name": domain_id, + "type": "TXT", + "ttl": 300, + "changetype": "REPLACE", + "records": txt_records, + "comments": [] + } + ] + } + + # Create Txt Records + server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") + zone_name = _get_zone_name(domain, account_number) + zone_id = zone_name + "." + path = f"/api/v1/servers/{server_id}/zones/{zone_id}" + _patch(path, payload) + + def _patch(path, payload): - """ Execute a Patch request on the given URL (base_uri + path) with given payload """ + """ + Execute a Patch request on the given URL (base_uri + path) with given payload + + :param path: + :param payload: + :return: + """ base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN") verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True) resp = requests.patch( diff --git a/lemur/plugins/lemur_acme/route53.py b/lemur/plugins/lemur_acme/route53.py index 55da5161..aaccb57e 100644 --- a/lemur/plugins/lemur_acme/route53.py +++ b/lemur/plugins/lemur_acme/route53.py @@ -35,9 +35,10 @@ def get_zones(client=None): zones = [] for page in paginator.paginate(): for zone in page["HostedZones"]: - zones.append( - zone["Name"][:-1] - ) # We need [:-1] to strip out the trailing dot. + if not zone["Config"]["PrivateZone"]: + zones.append( + zone["Name"][:-1] + ) # We need [:-1] to strip out the trailing dot. return zones diff --git a/lemur/plugins/lemur_acme/tests/test_acme.py b/lemur/plugins/lemur_acme/tests/test_acme.py index b2c32eec..bec7be2b 100644 --- a/lemur/plugins/lemur_acme/tests/test_acme.py +++ b/lemur/plugins/lemur_acme/tests/test_acme.py @@ -1,11 +1,10 @@ import unittest +from unittest.mock import patch, Mock from cryptography.x509 import DNSName -from requests.models import Response - -from mock import MagicMock, Mock, patch - from lemur.plugins.lemur_acme import plugin, ultradns +from mock import MagicMock +from requests.models import Response class TestAcme(unittest.TestCase): @@ -57,7 +56,7 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.len", return_value=1) @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges") def test_start_dns_challenge( - self, mock_get_dns_challenges, mock_len, mock_app, mock_acme + self, mock_get_dns_challenges, mock_len, mock_app, mock_acme ): assert mock_len mock_order = Mock() @@ -88,7 +87,7 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") @patch("time.sleep") def test_complete_dns_challenge_success( - self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme + self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme ): mock_dns_provider = Mock() mock_dns_provider.wait_for_dns_change = Mock(return_value=True) @@ -112,7 +111,7 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.current_app") @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") def test_complete_dns_challenge_fail( - self, mock_wait_for_dns_change, mock_current_app, mock_acme + self, mock_wait_for_dns_change, mock_current_app, mock_acme ): mock_dns_provider = Mock() mock_dns_provider.wait_for_dns_change = Mock(return_value=True) @@ -140,12 +139,12 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges") @patch("lemur.plugins.lemur_acme.plugin.current_app") def test_request_certificate( - self, - mock_current_app, - mock_get_dns_challenges, - mock_jose, - mock_crypto, - mock_acme, + self, + mock_current_app, + mock_get_dns_challenges, + mock_jose, + mock_crypto, + mock_acme, ): mock_cert_response = Mock() mock_cert_response.body = "123" @@ -182,7 +181,7 @@ class TestAcme(unittest.TestCase): assert result_client assert result_registration - @patch("lemur.plugins.lemur_acme.plugin.current_app") + @patch('lemur.plugins.lemur_acme.plugin.current_app') def test_get_domains_single(self, mock_current_app): options = {"common_name": "test.netflix.net"} result = self.acme.get_domains(options) @@ -288,14 +287,14 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") def test_get_ordered_certificate( - self, - mock_request_certificate, - mock_finalize_authorizations, - mock_get_authorizations, - mock_dns_provider_service, - mock_authorization_service, - mock_current_app, - mock_acme, + self, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_dns_provider_service, + mock_authorization_service, + mock_current_app, + mock_acme, ): mock_client = Mock() mock_acme.return_value = (mock_client, "") @@ -319,14 +318,14 @@ class TestAcme(unittest.TestCase): @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") def test_get_ordered_certificates( - self, - mock_request_certificate, - mock_finalize_authorizations, - mock_get_authorizations, - mock_dns_provider_service, - mock_authorization_service, - mock_current_app, - mock_acme, + self, + mock_request_certificate, + mock_finalize_authorizations, + mock_get_authorizations, + mock_dns_provider_service, + mock_authorization_service, + mock_current_app, + mock_acme, ): mock_client = Mock() mock_acme.return_value = (mock_client, "") diff --git a/lemur/plugins/lemur_acme/tests/test_powerdns.py b/lemur/plugins/lemur_acme/tests/test_powerdns.py index c8b0a11e..714cc938 100644 --- a/lemur/plugins/lemur_acme/tests/test_powerdns.py +++ b/lemur/plugins/lemur_acme/tests/test_powerdns.py @@ -1,5 +1,5 @@ import unittest -from mock import Mock, patch +from unittest.mock import patch, Mock from lemur.plugins.lemur_acme import plugin, powerdns @@ -48,13 +48,14 @@ class TestPowerdns(unittest.TestCase): self.assertEqual(result, zone) @patch("lemur.plugins.lemur_acme.powerdns.current_app") - def test_create_txt_record(self, mock_current_app): + def test_create_txt_record_write_only(self, mock_current_app): domain = "_acme_challenge.test.example.com" zone = "test.example.com" token = "ABCDEFGHIJ" account_number = "1234567890" change_id = (domain, token) powerdns._check_conf = Mock() + powerdns._get_txt_records = Mock(return_value=[]) powerdns._get_zone_name = Mock(return_value=zone) mock_current_app.logger.debug = Mock() mock_current_app.config.get = Mock(return_value="localhost") @@ -63,24 +64,74 @@ class TestPowerdns(unittest.TestCase): "function": "create_txt_record", "fqdn": domain, "token": token, - "message": "TXT record successfully created" + "message": "TXT record(s) successfully created" } result = powerdns.create_txt_record(domain, token, account_number) mock_current_app.logger.debug.assert_called_with(log_data) self.assertEqual(result, change_id) + @patch("lemur.plugins.lemur_acme.powerdns.current_app") + def test_create_txt_record_append(self, mock_current_app): + domain = "_acme_challenge.test.example.com" + zone = "test.example.com" + token = "ABCDEFGHIJ" + account_number = "1234567890" + change_id = (domain, token) + powerdns._check_conf = Mock() + cur_token = "123456" + cur_records = [powerdns.Record({'name': domain, 'content': f"\"{cur_token}\"", 'disabled': False})] + powerdns._get_txt_records = Mock(return_value=cur_records) + powerdns._get_zone_name = Mock(return_value=zone) + mock_current_app.logger.debug = Mock() + mock_current_app.config.get = Mock(return_value="localhost") + powerdns._patch = Mock() + log_data = { + "function": "create_txt_record", + "fqdn": domain, + "token": token, + "message": "TXT record(s) successfully created" + } + expected_path = f"/api/v1/servers/localhost/zones/test.example.com." + expected_payload = { + "rrsets": [ + { + "name": domain + ".", + "type": "TXT", + "ttl": 300, + "changetype": "REPLACE", + "records": [ + { + "content": f"\"{token}\"", + "disabled": False + }, + { + "content": f"\"{cur_token}\"", + "disabled": False + } + ], + "comments": [] + } + ] + } + + result = powerdns.create_txt_record(domain, token, account_number) + mock_current_app.logger.debug.assert_called_with(log_data) + powerdns._patch.assert_called_with(expected_path, expected_payload) + self.assertEqual(result, change_id) + @patch("lemur.plugins.lemur_acme.powerdns.dnsutil") @patch("lemur.plugins.lemur_acme.powerdns.current_app") @patch("lemur.extensions.metrics") @patch("time.sleep") def test_wait_for_dns_change(self, mock_sleep, mock_metrics, mock_current_app, mock_dnsutil): domain = "_acme-challenge.test.example.com" - token = "ABCDEFG" + token1 = "ABCDEFG" + token2 = "HIJKLMN" zone_name = "test.example.com" nameserver = "1.1.1.1" - change_id = (domain, token) + change_id = (domain, token1) powerdns._check_conf = Mock() - mock_records = (token,) + mock_records = (token2, token1) mock_current_app.config.get = Mock(return_value=1) powerdns._get_zone_name = Mock(return_value=zone_name) mock_dnsutil.get_authoritative_nameserver = Mock(return_value=nameserver) @@ -114,7 +165,7 @@ class TestPowerdns(unittest.TestCase): "function": "delete_txt_record", "fqdn": domain, "token": token, - "message": "TXT record successfully deleted" + "message": "Unable to delete TXT record: Token not found in existing TXT records" } powerdns.delete_txt_record(change_id, account_number, domain, token) mock_current_app.logger.debug.assert_called_with(log_data) diff --git a/lemur/plugins/lemur_aws/iam.py b/lemur/plugins/lemur_aws/iam.py index 13590ddd..8d80e020 100644 --- a/lemur/plugins/lemur_aws/iam.py +++ b/lemur/plugins/lemur_aws/iam.py @@ -24,6 +24,12 @@ def retry_throttled(exception): if exception.response["Error"]["Code"] == "NoSuchEntity": return False + # No need to retry deletion requests if there is a DeleteConflict error. + # This error indicates that the certificate is still attached to an entity + # and cannot be deleted. + if exception.response["Error"]["Code"] == "DeleteConflict": + return False + metrics.send("iam_retry", "counter", 1, metric_tags={"exception": str(exception)}) return True diff --git a/lemur/plugins/lemur_aws/plugin.py b/lemur/plugins/lemur_aws/plugin.py index 7bb7a3a2..8692348a 100644 --- a/lemur/plugins/lemur_aws/plugin.py +++ b/lemur/plugins/lemur_aws/plugin.py @@ -216,22 +216,24 @@ class AWSSourcePlugin(SourcePlugin): for region in regions: elbs = elb.get_all_elbs(account_number=account_number, region=region) - current_app.logger.info( - "Describing classic load balancers in {0}-{1}".format( - account_number, region - ) - ) + current_app.logger.info({ + "message": "Describing classic load balancers", + "account_number": account_number, + "region": region, + "number_of_load_balancers": len(elbs) + }) for e in elbs: endpoints.extend(get_elb_endpoints(account_number, region, e)) # fetch advanced ELBs elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region) - current_app.logger.info( - "Describing advanced load balancers in {0}-{1}".format( - account_number, region - ) - ) + current_app.logger.info({ + "message": "Describing advanced load balancers", + "account_number": account_number, + "region": region, + "number_of_load_balancers": len(elbs_v2) + }) for e in elbs_v2: endpoints.extend(get_elb_endpoints_v2(account_number, region, e)) diff --git a/lemur/plugins/lemur_digicert/tests/test_digicert.py b/lemur/plugins/lemur_digicert/tests/test_digicert.py index 1e9ebca4..8bfd1dcf 100644 --- a/lemur/plugins/lemur_digicert/tests/test_digicert.py +++ b/lemur/plugins/lemur_digicert/tests/test_digicert.py @@ -1,4 +1,5 @@ import json +from unittest.mock import patch, Mock import arrow import pytest @@ -6,7 +7,6 @@ from cryptography import x509 from freezegun import freeze_time from lemur.plugins.lemur_digicert import plugin from lemur.tests.vectors import CSR_STR -from mock import Mock, patch def config_mock(*args): diff --git a/lemur/sources/cli.py b/lemur/sources/cli.py index 0d537500..c415b567 100644 --- a/lemur/sources/cli.py +++ b/lemur/sources/cli.py @@ -58,6 +58,13 @@ def execute_clean(plugin, certificate, source): try: plugin.clean(certificate, source.options) certificate.sources.remove(source) + + # If we want to remove the source from the certificate, we also need to clear any equivalent destinations to + # prevent Lemur from re-uploading the certificate. + for destination in certificate.destinations: + if destination.label == source.label: + certificate.destinations.remove(destination) + certificate_service.database.update(certificate) return SUCCESS_METRIC_STATUS except Exception as e: diff --git a/lemur/sources/service.py b/lemur/sources/service.py index f4783313..fafa6f5a 100644 --- a/lemur/sources/service.py +++ b/lemur/sources/service.py @@ -123,15 +123,19 @@ def sync_endpoints(source): "acct": s.get_option("accountNumber", source.options)}) if not endpoint["certificate"]: - current_app.logger.error( - "Certificate Not Found. Name: {0} Endpoint: {1}".format( - certificate_name, endpoint["name"] - ) - ) + current_app.logger.error({ + "message": "Certificate Not Found", + "certificate_name": certificate_name, + "endpoint_name": endpoint["name"], + "dns_name": endpoint.get("dnsname"), + "account": s.get_option("accountNumber", source.options), + }) + metrics.send("endpoint.certificate.not.found", "counter", 1, metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], - "acct": s.get_option("accountNumber", source.options)}) + "acct": s.get_option("accountNumber", source.options), + "dnsname": endpoint.get("dnsname")}) continue policy = endpoint.pop("policy") @@ -193,6 +197,11 @@ def sync_certificates(source, user): s = plugins.get(source.plugin_name) certificates = s.get_certificates(source.options) + # emitting the count of certificates on the source + metrics.send("sync_certificates_count", + "gauge", len(certificates), + metric_tags={"source": source.label}) + for certificate in certificates: exists, updated_by_hash = find_cert(certificate) diff --git a/lemur/tests/test_certificates.py b/lemur/tests/test_certificates.py index adafa605..41584cb3 100644 --- a/lemur/tests/test_certificates.py +++ b/lemur/tests/test_certificates.py @@ -9,7 +9,8 @@ from cryptography import x509 from cryptography.hazmat.backends import default_backend from marshmallow import ValidationError from freezegun import freeze_time -from mock import patch +# from mock import patch +from unittest.mock import patch from lemur.certificates.service import create_csr from lemur.certificates.views import * # noqa @@ -906,12 +907,12 @@ def test_certificate_get_body(client): assert response_body["serial"] == "211983098819107449768450703123665283596" assert response_body["serialHex"] == "9F7A75B39DAE4C3F9524C68B06DA6A0C" assert response_body["distinguishedName"] == ( - "CN=LemurTrust Unittests Class 1 CA 2018," - "O=LemurTrust Enterprises Ltd," - "OU=Unittesting Operations Center," - "C=EE," + "L=Earth," "ST=N/A," - "L=Earth" + "C=EE," + "OU=Unittesting Operations Center," + "O=LemurTrust Enterprises Ltd," + "CN=LemurTrust Unittests Class 1 CA 2018" ) diff --git a/package.json b/package.json index 9b899176..1a54eccc 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "url": "git://github.com/netflix/lemur.git" }, "dependencies": { - "bower": "^1.8.2", + "bower": "^1.8.8", "browser-sync": "^2.26.7", "del": "^2.2.2", "gulp-autoprefixer": "^3.1.1", @@ -17,10 +17,10 @@ "gulp-flatten": "^0.3.1", "gulp-foreach": "0.1.0", "gulp-if": "^2.0.2", - "gulp-imagemin": "^3.1.1", + "gulp-imagemin": "^7.1.0", "gulp-inject": "~4.1.0", "gulp-jshint": "^2.0.4", - "gulp-less": "^3.0.3", + "gulp-less": "^4.0.1", "gulp-load-plugins": "^1.4.0", "gulp-minify-css": "^1.2.4", "gulp-minify-html": "~1.0.6", @@ -29,7 +29,7 @@ "gulp-notify": "^2.2.0", "gulp-plumber": "^1.1.0", "gulp-print": "^2.0.1", - "gulp-protractor": "3.0.0", + "gulp-protractor": "^4.1.1", "gulp-replace": "~0.5.3", "gulp-replace-task": "~0.11.0", "gulp-rev": "^7.1.2", @@ -41,7 +41,7 @@ "gulp-util": "^3.0.1", "http-proxy": "~1.16.2", "jshint-stylish": "^2.2.1", - "karma": "~1.3.0", + "karma": "^4.4.1", "karma-jasmine": "^1.1.0", "main-bower-files": "^2.13.1", "merge-stream": "^1.0.1", @@ -60,7 +60,7 @@ }, "devDependencies": { "gulp": "^3.9.1", - "jshint": "^2.8.0", + "jshint": "^2.11.0", "karma-chrome-launcher": "^2.0.0" } } diff --git a/requirements-dev.txt b/requirements-dev.txt index 224789f6..785d3f29 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -4,39 +4,41 @@ # # pip-compile --no-index --output-file=requirements-dev.txt requirements-dev.in # -aspy.yaml==1.3.0 # via pre-commit -bleach==3.1.1 # via readme-renderer -certifi==2019.11.28 # via requests +appdirs==1.4.3 # via virtualenv +bleach==3.1.4 # via readme-renderer +certifi==2020.4.5.1 # via requests cffi==1.14.0 # via cryptography -cfgv==2.0.1 # via pre-commit +cfgv==3.1.0 # via pre-commit chardet==3.0.4 # via requests -cryptography==2.8 # via secretstorage -docutils==0.15.2 # via readme-renderer -flake8==3.5.0 -identify==1.4.9 # via pre-commit -idna==2.8 # via requests -invoke==1.3.0 -jeepney==0.4.2 # via secretstorage -keyring==21.0.0 # via twine +cryptography==2.9.2 # via secretstorage +distlib==0.3.0 # via virtualenv +docutils==0.16 # via readme-renderer +filelock==3.0.12 # via virtualenv +flake8==3.5.0 # via -r requirements-dev.in +identify==1.4.14 # via pre-commit +idna==2.9 # via requests +invoke==1.4.1 # via -r requirements-dev.in +jeepney==0.4.3 # via keyring, secretstorage +keyring==21.2.0 # via twine mccabe==0.6.1 # via flake8 -nodeenv==1.3.3 +nodeenv==1.3.5 # via -r requirements-dev.in, pre-commit pkginfo==1.5.0.1 # via twine -pre-commit==1.21.0 +pre-commit==2.4.0 # via -r requirements-dev.in pycodestyle==2.3.1 # via flake8 -pycparser==2.19 # via cffi +pycparser==2.20 # via cffi pyflakes==1.6.0 # via flake8 -pygments==2.5.2 # via readme-renderer -pyyaml==5.2 -readme-renderer==24.0 # via twine +pygments==2.6.1 # via readme-renderer +pyyaml==5.3.1 # via -r requirements-dev.in, pre-commit +readme-renderer==25.0 # via twine requests-toolbelt==0.9.1 # via twine -requests==2.22.0 # via requests-toolbelt, twine +requests==2.23.0 # via requests-toolbelt, twine secretstorage==3.1.2 # via keyring -six==1.13.0 # via bleach, cfgv, cryptography, pre-commit, readme-renderer +six==1.14.0 # via bleach, cryptography, readme-renderer, virtualenv toml==0.10.0 # via pre-commit -tqdm==4.41.1 # via twine -twine==3.1.1 -urllib3==1.25.7 # via requests -virtualenv==16.7.9 # via pre-commit +tqdm==4.45.0 # via twine +twine==3.1.1 # via -r requirements-dev.in +urllib3==1.25.8 # via requests +virtualenv==20.0.17 # via pre-commit webencodings==0.5.1 # via bleach # The following packages are considered to be unsafe in a requirements file: diff --git a/requirements-docs.txt b/requirements-docs.txt index 893965ca..16d97413 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -4,111 +4,108 @@ # # pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # -acme==1.0.0 +acme==1.4.0 # via -r requirements.txt alabaster==0.7.12 # via sphinx -alembic-autogenerate-enums==0.0.2 -alembic==1.3.2 -amqp==2.5.2 -aniso8601==8.0.0 -arrow==0.15.5 -asyncpool==1.0 +alembic-autogenerate-enums==0.0.2 # via -r requirements.txt +alembic==1.4.2 # via -r requirements.txt, flask-migrate +amqp==2.5.2 # via -r requirements.txt, kombu +aniso8601==8.0.0 # via -r requirements.txt, flask-restful +arrow==0.15.6 # via -r requirements.txt +asyncpool==1.0 # via -r requirements.txt babel==2.8.0 # via sphinx -bcrypt==3.1.7 -billiard==3.6.1.0 -blinker==1.4 -boto3==1.10.46 -botocore==1.13.46 -celery[redis]==4.4.0 -certifi==2019.11.28 -certsrv==2.1.1 -cffi==1.13.2 -chardet==3.0.4 -click==7.0 -cloudflare==2.3.1 -cryptography==2.8 -dnspython3==1.15.0 -dnspython==1.15.0 -docutils==0.15.2 -dyn==1.8.1 -flask-bcrypt==0.7.1 -flask-cors==3.0.8 -flask-mail==0.9.1 -flask-migrate==2.5.2 -flask-principal==0.4.0 -flask-replicated==1.3 -flask-restful==0.3.7 -flask-script==2.0.6 -flask-sqlalchemy==2.4.1 -flask==1.1.1 -future==0.18.2 -gunicorn==20.0.4 -hvac==0.9.6 -idna==2.8 +bcrypt==3.1.7 # via -r requirements.txt, flask-bcrypt, paramiko +billiard==3.6.3.0 # via -r requirements.txt, celery +blinker==1.4 # via -r requirements.txt, flask-mail, flask-principal, raven +boto3==1.13.11 # via -r requirements.txt +botocore==1.16.11 # via -r requirements.txt, boto3, s3transfer +celery[redis]==4.4.2 # via -r requirements.txt +certifi==2020.4.5.1 # via -r requirements.txt, requests +certsrv==2.1.1 # via -r requirements.txt +cffi==1.14.0 # via -r requirements.txt, bcrypt, cryptography, pynacl +chardet==3.0.4 # via -r requirements.txt, requests +click==7.1.1 # via -r requirements.txt, flask +cloudflare==2.7.1 # via -r requirements.txt +cryptography==2.9.2 # via -r requirements.txt, acme, josepy, paramiko, pyopenssl, requests +dnspython3==1.15.0 # via -r requirements.txt +dnspython==1.15.0 # via -r requirements.txt, dnspython3 +docutils==0.15.2 # via -r requirements.txt, botocore, sphinx +dyn==1.8.1 # via -r requirements.txt +flask-bcrypt==0.7.1 # via -r requirements.txt +flask-cors==3.0.8 # via -r requirements.txt +flask-mail==0.9.1 # via -r requirements.txt +flask-migrate==2.5.3 # via -r requirements.txt +flask-principal==0.4.0 # via -r requirements.txt +flask-replicated==1.3 # via -r requirements.txt +flask-restful==0.3.8 # via -r requirements.txt +flask-script==2.0.6 # via -r requirements.txt +flask-sqlalchemy==2.4.1 # via -r requirements.txt, flask-migrate +flask==1.1.2 # via -r requirements.txt, flask-bcrypt, flask-cors, flask-mail, flask-migrate, flask-principal, flask-restful, flask-script, flask-sqlalchemy, raven +future==0.18.2 # via -r requirements.txt, cloudflare +gunicorn==20.0.4 # via -r requirements.txt +hvac==0.10.1 # via -r requirements.txt +idna==2.9 # via -r requirements.txt, requests imagesize==1.2.0 # via sphinx -importlib-metadata==1.3.0 -inflection==0.3.1 -itsdangerous==1.1.0 -javaobj-py3==0.4.0.1 -jinja2==2.10.3 -jmespath==0.9.4 -josepy==1.2.0 -jsonlines==1.2.0 -kombu==4.6.7 -lockfile==0.12.2 -logmatic-python==0.1.7 -mako==1.1.0 -markupsafe==1.1.1 -marshmallow-sqlalchemy==0.21.0 -marshmallow==2.20.4 -mock==3.0.5 -more-itertools==8.0.2 -ndg-httpsclient==0.5.1 -packaging==19.2 # via sphinx -paramiko==2.7.1 -pem==19.3.0 -psycopg2==2.8.4 -pyasn1-modules==0.2.7 -pyasn1==0.4.8 -pycparser==2.19 -pycryptodomex==3.9.4 -pygments==2.5.2 # via sphinx -pyjks==19.0.0 -pyjwt==1.7.1 -pynacl==1.3.0 -pyopenssl==19.1.0 -pyparsing==2.4.6 # via packaging -pyrfc3339==1.1 -python-dateutil==2.8.1 -python-editor==1.0.4 -python-json-logger==0.1.11 -pytz==2019.3 -pyyaml==5.2 -raven[flask]==6.10.0 -redis==3.3.11 -requests-toolbelt==0.9.1 -requests[security]==2.22.0 -retrying==1.3.3 -s3transfer==0.2.1 -six==1.13.0 +inflection==0.4.0 # via -r requirements.txt +itsdangerous==1.1.0 # via -r requirements.txt, flask +javaobj-py3==0.4.0.1 # via -r requirements.txt, pyjks +jinja2==2.11.2 # via -r requirements.txt, flask, sphinx +jmespath==0.9.5 # via -r requirements.txt, boto3, botocore +josepy==1.3.0 # via -r requirements.txt, acme +jsonlines==1.2.0 # via -r requirements.txt, cloudflare +kombu==4.6.8 # via -r requirements.txt, celery +lockfile==0.12.2 # via -r requirements.txt +logmatic-python==0.1.7 # via -r requirements.txt +mako==1.1.2 # via -r requirements.txt, alembic +markupsafe==1.1.1 # via -r requirements.txt, jinja2, mako +marshmallow-sqlalchemy==0.23.0 # via -r requirements.txt +marshmallow==2.20.4 # via -r requirements.txt, marshmallow-sqlalchemy +ndg-httpsclient==0.5.1 # via -r requirements.txt +packaging==20.3 # via sphinx +paramiko==2.7.1 # via -r requirements.txt +pem==20.1.0 # via -r requirements.txt +psycopg2==2.8.5 # via -r requirements.txt +pyasn1-modules==0.2.8 # via -r requirements.txt, pyjks, python-ldap +pyasn1==0.4.8 # via -r requirements.txt, ndg-httpsclient, pyasn1-modules, pyjks, python-ldap +pycparser==2.20 # via -r requirements.txt, cffi +pycryptodomex==3.9.7 # via -r requirements.txt, pyjks +pygments==2.6.1 # via sphinx +pyjks==20.0.0 # via -r requirements.txt +pyjwt==1.7.1 # via -r requirements.txt +pynacl==1.3.0 # via -r requirements.txt, paramiko +pyopenssl==19.1.0 # via -r requirements.txt, acme, josepy, ndg-httpsclient, requests +pyparsing==2.4.7 # via packaging +pyrfc3339==1.1 # via -r requirements.txt, acme +python-dateutil==2.8.1 # via -r requirements.txt, alembic, arrow, botocore +python-editor==1.0.4 # via -r requirements.txt, alembic +python-json-logger==0.1.11 # via -r requirements.txt, logmatic-python +python-ldap==3.2.0 # via -r requirements.txt +pytz==2019.3 # via -r requirements.txt, acme, babel, celery, flask-restful, pyrfc3339 +pyyaml==5.3.1 # via -r requirements.txt, cloudflare +raven[flask]==6.10.0 # via -r requirements.txt +redis==3.5.2 # via -r requirements.txt, celery +requests-toolbelt==0.9.1 # via -r requirements.txt, acme +requests[security]==2.23.0 # via -r requirements.txt, acme, certsrv, cloudflare, hvac, requests-toolbelt, sphinx +retrying==1.3.3 # via -r requirements.txt +s3transfer==0.3.3 # via -r requirements.txt, boto3 +six==1.14.0 # via -r requirements.txt, acme, bcrypt, cryptography, flask-cors, flask-restful, hvac, josepy, jsonlines, packaging, pynacl, pyopenssl, python-dateutil, retrying, sphinxcontrib-httpdomain, sqlalchemy-utils snowballstemmer==2.0.0 # via sphinx -sphinx-rtd-theme==0.4.3 -sphinx==2.3.1 -sphinxcontrib-applehelp==1.0.1 # via sphinx -sphinxcontrib-devhelp==1.0.1 # via sphinx -sphinxcontrib-htmlhelp==1.0.2 # via sphinx -sphinxcontrib-httpdomain==1.7.0 +sphinx-rtd-theme==0.4.3 # via -r requirements-docs.in +sphinx==3.0.3 # via -r requirements-docs.in, sphinx-rtd-theme, sphinxcontrib-httpdomain +sphinxcontrib-applehelp==1.0.2 # via sphinx +sphinxcontrib-devhelp==1.0.2 # via sphinx +sphinxcontrib-htmlhelp==1.0.3 # via sphinx +sphinxcontrib-httpdomain==1.7.0 # via -r requirements-docs.in sphinxcontrib-jsmath==1.0.1 # via sphinx -sphinxcontrib-qthelp==1.0.2 # via sphinx -sphinxcontrib-serializinghtml==1.1.3 # via sphinx -sqlalchemy-utils==0.36.1 -sqlalchemy==1.3.12 -tabulate==0.8.6 -twofish==0.3.0 -urllib3==1.25.7 -vine==1.3.0 -werkzeug==0.16.0 -xmltodict==0.12.0 -zipp==0.6.0 +sphinxcontrib-qthelp==1.0.3 # via sphinx +sphinxcontrib-serializinghtml==1.1.4 # via sphinx +sqlalchemy-utils==0.36.5 # via -r requirements.txt +sqlalchemy==1.3.16 # via -r requirements.txt, alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +tabulate==0.8.7 # via -r requirements.txt +twofish==0.3.0 # via -r requirements.txt, pyjks +urllib3==1.25.8 # via -r requirements.txt, botocore, requests +vine==1.3.0 # via -r requirements.txt, amqp, celery +werkzeug==1.0.1 # via -r requirements.txt, flask +xmltodict==0.12.0 # via -r requirements.txt # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements-tests.txt b/requirements-tests.txt index 293bd350..79340e51 100644 --- a/requirements-tests.txt +++ b/requirements-tests.txt @@ -6,85 +6,87 @@ # appdirs==1.4.3 # via black attrs==19.3.0 # via black, jsonschema, pytest -aws-sam-translator==1.19.1 # via cfn-lint -aws-xray-sdk==2.4.3 # via moto -bandit==1.6.2 -black==19.10b0 -boto3==1.10.46 # via aws-sam-translator, moto +aws-sam-translator==1.22.0 # via cfn-lint +aws-xray-sdk==2.5.0 # via moto +bandit==1.6.2 # via -r requirements-tests.in +black==19.10b0 # via -r requirements-tests.in +boto3==1.13.11 # via aws-sam-translator, moto boto==2.49.0 # via moto -botocore==1.13.46 # via aws-xray-sdk, boto3, moto, s3transfer -certifi==2019.11.28 # via requests -cffi==1.13.2 # via cryptography -cfn-lint==0.26.2 # via moto +botocore==1.16.11 # via aws-xray-sdk, boto3, moto, s3transfer +certifi==2020.4.5.1 # via requests +cffi==1.14.0 # via cryptography +cfn-lint==0.29.5 # via moto chardet==3.0.4 # via requests -click==7.0 # via black, flask -coverage==5.0.1 -cryptography==2.8 # via moto, sshpubkeys -docker==4.1.0 # via moto +click==7.1.1 # via black, flask +coverage==5.1 # via -r requirements-tests.in +cryptography==2.9.2 # via moto, sshpubkeys +decorator==4.4.2 # via networkx +docker==4.2.0 # via moto docutils==0.15.2 # via botocore ecdsa==0.15 # via python-jose, sshpubkeys -factory-boy==2.12.0 -faker==3.0.0 -fakeredis==1.1.0 -flask==1.1.1 # via pytest-flask -freezegun==0.3.12 +factory-boy==2.12.0 # via -r requirements-tests.in +faker==4.1.0 # via -r requirements-tests.in, factory-boy +fakeredis==1.4.1 # via -r requirements-tests.in +flask==1.1.2 # via pytest-flask +freezegun==0.3.15 # via -r requirements-tests.in future==0.18.2 # via aws-xray-sdk -gitdb2==2.0.6 # via gitpython -gitpython==3.0.5 # via bandit +gitdb==4.0.4 # via gitpython +gitpython==3.1.1 # via bandit idna==2.8 # via moto, requests -importlib-metadata==1.3.0 # via jsonschema, pluggy, pytest +importlib-metadata==1.6.0 # via jsonpickle itsdangerous==1.1.0 # via flask -jinja2==2.10.3 # via flask, moto -jmespath==0.9.4 # via boto3, botocore +jinja2==2.11.2 # via flask, moto +jmespath==0.9.5 # via boto3, botocore jsondiff==1.1.2 # via moto -jsonpatch==1.24 # via cfn-lint -jsonpickle==1.2 # via aws-xray-sdk +jsonpatch==1.25 # via cfn-lint +jsonpickle==1.4 # via aws-xray-sdk jsonpointer==2.0 # via jsonpatch jsonschema==3.2.0 # via aws-sam-translator, cfn-lint markupsafe==1.1.1 # via jinja2 -mock==3.0.5 # via moto -more-itertools==8.0.2 # via pytest, zipp -moto==1.3.14 -nose==1.3.7 -packaging==19.2 # via pytest -pathspec==0.7.0 # via black -pbr==5.4.4 # via stevedore +mock==4.0.2 # via moto +more-itertools==8.2.0 # via pytest +moto==1.3.14 # via -r requirements-tests.in +networkx==2.4 # via cfn-lint +nose==1.3.7 # via -r requirements-tests.in +packaging==20.3 # via pytest +pathspec==0.8.0 # via black +pbr==5.4.5 # via stevedore pluggy==0.13.1 # via pytest py==1.8.1 # via pytest pyasn1==0.4.8 # via python-jose, rsa -pycparser==2.19 # via cffi -pyflakes==2.1.1 -pyparsing==2.4.6 # via packaging -pyrsistent==0.15.6 # via jsonschema -pytest-flask==0.15.0 -pytest-mock==1.13.0 -pytest==5.3.2 +pycparser==2.20 # via cffi +pyflakes==2.2.0 # via -r requirements-tests.in +pyparsing==2.4.7 # via packaging +pyrsistent==0.16.0 # via jsonschema +pytest-flask==1.0.0 # via -r requirements-tests.in +pytest-mock==3.1.0 # via -r requirements-tests.in +pytest==5.4.2 # via -r requirements-tests.in, pytest-flask, pytest-mock python-dateutil==2.8.1 # via botocore, faker, freezegun, moto python-jose==3.1.0 # via moto pytz==2019.3 # via moto -pyyaml==5.2 -redis==3.3.11 # via fakeredis -regex==2019.12.20 # via black -requests-mock==1.7.0 -requests==2.22.0 # via docker, moto, requests-mock, responses -responses==0.10.9 # via moto +pyyaml==5.3.1 # via -r requirements-tests.in, bandit, cfn-lint, moto +redis==3.5.2 # via fakeredis +regex==2020.4.4 # via black +requests-mock==1.8.0 # via -r requirements-tests.in +requests==2.23.0 # via docker, moto, requests-mock, responses +responses==0.10.12 # via moto rsa==4.0 # via python-jose -s3transfer==0.2.1 # via boto3 -six==1.13.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, ecdsa, faker, fakeredis, freezegun, jsonschema, mock, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client -smmap2==2.0.5 # via gitdb2 +s3transfer==0.3.3 # via boto3 +six==1.14.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, ecdsa, fakeredis, freezegun, jsonschema, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client +smmap==3.0.2 # via gitdb sortedcontainers==2.1.0 # via fakeredis sshpubkeys==3.1.0 # via moto -stevedore==1.31.0 # via bandit +stevedore==1.32.0 # via bandit text-unidecode==1.3 # via faker toml==0.10.0 # via black -typed-ast==1.4.0 # via black -urllib3==1.25.7 # via botocore, requests -wcwidth==0.1.8 # via pytest +typed-ast==1.4.1 # via black +urllib3==1.25.8 # via botocore, requests +wcwidth==0.1.9 # via pytest websocket-client==0.57.0 # via docker -werkzeug==0.16.0 # via flask, moto, pytest-flask -wrapt==1.11.2 # via aws-xray-sdk +werkzeug==1.0.1 # via flask, moto, pytest-flask +wrapt==1.12.1 # via aws-xray-sdk xmltodict==0.12.0 # via moto -zipp==0.6.0 # via importlib-metadata +zipp==3.1.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements.txt b/requirements.txt index 639c9377..315f39b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,96 +4,92 @@ # # pip-compile --no-index --output-file=requirements.txt requirements.in # -acme==1.0.0 -alembic-autogenerate-enums==0.0.2 -alembic==1.3.2 # via flask-migrate +acme==1.4.0 # via -r requirements.in +alembic-autogenerate-enums==0.0.2 # via -r requirements.in +alembic==1.4.2 # via flask-migrate amqp==2.5.2 # via kombu aniso8601==8.0.0 # via flask-restful -arrow==0.15.5 -asyncpool==1.0 +arrow==0.15.6 # via -r requirements.in +asyncpool==1.0 # via -r requirements.in bcrypt==3.1.7 # via flask-bcrypt, paramiko -billiard==3.6.1.0 # via celery +billiard==3.6.3.0 # via celery blinker==1.4 # via flask-mail, flask-principal, raven -boto3==1.10.46 -botocore==1.13.46 -celery[redis]==4.4.0 -certifi==2019.11.28 -certsrv==2.1.1 -cffi==1.13.2 # via bcrypt, cryptography, pynacl +boto3==1.13.11 # via -r requirements.in +botocore==1.16.11 # via -r requirements.in, boto3, s3transfer +celery[redis]==4.4.2 # via -r requirements.in +certifi==2020.4.5.1 # via -r requirements.in, requests +certsrv==2.1.1 # via -r requirements.in +cffi==1.14.0 # via bcrypt, cryptography, pynacl chardet==3.0.4 # via requests -click==7.0 # via flask -cloudflare==2.3.1 -cryptography==2.8 -dnspython3==1.15.0 +click==7.1.1 # via flask +cloudflare==2.7.1 # via -r requirements.in +cryptography==2.9.2 # via -r requirements.in, acme, josepy, paramiko, pyopenssl, requests +dnspython3==1.15.0 # via -r requirements.in dnspython==1.15.0 # via dnspython3 docutils==0.15.2 # via botocore -dyn==1.8.1 -flask-bcrypt==0.7.1 -flask-cors==3.0.8 -flask-mail==0.9.1 -flask-migrate==2.5.2 -flask-principal==0.4.0 -flask-replicated==1.3 -flask-restful==0.3.7 -flask-script==2.0.6 -flask-sqlalchemy==2.4.1 -flask==1.1.1 -future==0.18.2 -gunicorn==20.0.4 -hvac==0.9.6 -idna==2.8 # via requests -importlib-metadata==1.3.0 # via kombu -inflection==0.3.1 +dyn==1.8.1 # via -r requirements.in +flask-bcrypt==0.7.1 # via -r requirements.in +flask-cors==3.0.8 # via -r requirements.in +flask-mail==0.9.1 # via -r requirements.in +flask-migrate==2.5.3 # via -r requirements.in +flask-principal==0.4.0 # via -r requirements.in +flask-replicated==1.3 # via -r requirements.in +flask-restful==0.3.8 # via -r requirements.in +flask-script==2.0.6 # via -r requirements.in +flask-sqlalchemy==2.4.1 # via -r requirements.in, flask-migrate +flask==1.1.2 # via -r requirements.in, flask-bcrypt, flask-cors, flask-mail, flask-migrate, flask-principal, flask-restful, flask-script, flask-sqlalchemy, raven +future==0.18.2 # via -r requirements.in, cloudflare +gunicorn==20.0.4 # via -r requirements.in +hvac==0.10.1 # via -r requirements.in +idna==2.9 # via requests +inflection==0.4.0 # via -r requirements.in itsdangerous==1.1.0 # via flask javaobj-py3==0.4.0.1 # via pyjks -jinja2==2.10.3 -jmespath==0.9.4 # via boto3, botocore -josepy==1.2.0 # via acme +jinja2==2.11.2 # via -r requirements.in, flask +jmespath==0.9.5 # via boto3, botocore +josepy==1.3.0 # via acme jsonlines==1.2.0 # via cloudflare -kombu==4.6.7 # via celery -lockfile==0.12.2 -logmatic-python==0.1.7 -mako==1.1.0 # via alembic +kombu==4.6.8 # via celery +lockfile==0.12.2 # via -r requirements.in +logmatic-python==0.1.7 # via -r requirements.in +mako==1.1.2 # via alembic markupsafe==1.1.1 # via jinja2, mako -marshmallow-sqlalchemy==0.21.0 -marshmallow==2.20.4 -mock==3.0.5 # via acme -more-itertools==8.0.2 # via zipp -ndg-httpsclient==0.5.1 -paramiko==2.7.1 -pem==19.3.0 -psycopg2==2.8.4 -pyasn1-modules==0.2.7 # via pyjks, python-ldap +marshmallow-sqlalchemy==0.23.0 # via -r requirements.in +marshmallow==2.20.4 # via -r requirements.in, marshmallow-sqlalchemy +ndg-httpsclient==0.5.1 # via -r requirements.in +paramiko==2.7.1 # via -r requirements.in +pem==20.1.0 # via -r requirements.in +psycopg2==2.8.5 # via -r requirements.in +pyasn1-modules==0.2.8 # via pyjks, python-ldap pyasn1==0.4.8 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap -pycparser==2.19 # via cffi -pycryptodomex==3.9.4 # via pyjks -pyjks==19.0.0 -pyjwt==1.7.1 +pycparser==2.20 # via cffi +pycryptodomex==3.9.7 # via pyjks +pyjks==20.0.0 # via -r requirements.in +pyjwt==1.7.1 # via -r requirements.in pynacl==1.3.0 # via paramiko -pyopenssl==19.1.0 +pyopenssl==19.1.0 # via -r requirements.in, acme, josepy, ndg-httpsclient, requests pyrfc3339==1.1 # via acme python-dateutil==2.8.1 # via alembic, arrow, botocore python-editor==1.0.4 # via alembic python-json-logger==0.1.11 # via logmatic-python -python-ldap==3.2.0 +python-ldap==3.2.0 # via -r requirements.in pytz==2019.3 # via acme, celery, flask-restful, pyrfc3339 -pyyaml==5.2 -raven[flask]==6.10.0 -redis==3.3.11 +pyyaml==5.3.1 # via -r requirements.in, cloudflare +raven[flask]==6.10.0 # via -r requirements.in +redis==3.5.2 # via -r requirements.in, celery requests-toolbelt==0.9.1 # via acme -requests[security]==2.22.0 -retrying==1.3.3 -s3transfer==0.2.1 # via boto3 -six==1.13.0 -sqlalchemy-utils==0.36.1 -sqlalchemy==1.3.12 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils -tabulate==0.8.6 +requests[security]==2.23.0 # via -r requirements.in, acme, certsrv, cloudflare, hvac, requests-toolbelt +retrying==1.3.3 # via -r requirements.in +s3transfer==0.3.3 # via boto3 +six==1.14.0 # via -r requirements.in, acme, bcrypt, cryptography, flask-cors, flask-restful, hvac, josepy, jsonlines, pynacl, pyopenssl, python-dateutil, retrying, sqlalchemy-utils +sqlalchemy-utils==0.36.5 # via -r requirements.in +sqlalchemy==1.3.16 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils +tabulate==0.8.7 # via -r requirements.in twofish==0.3.0 # via pyjks -urllib3==1.25.7 # via botocore, requests +urllib3==1.25.8 # via botocore, requests vine==1.3.0 # via amqp, celery -werkzeug==0.16.0 # via flask -xmltodict==0.12.0 -zipp==0.6.0 # via importlib-metadata +werkzeug==1.0.1 # via flask +xmltodict==0.12.0 # via -r requirements.in # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/setup.py b/setup.py index fa5a23bc..4ce03d70 100644 --- a/setup.py +++ b/setup.py @@ -45,16 +45,20 @@ with open(os.path.join(ROOT, 'lemur', '__about__.py')) as f: exec(f.read(), about) # nosec: about file is benign install_requires_g = parse_requirements("requirements.txt", session=PipSession()) -install_requires = [str(ir.req) for ir in install_requires_g] - tests_require_g = parse_requirements("requirements-tests.txt", session=PipSession()) -tests_require = [str(ir.req) for ir in tests_require_g] - docs_require_g = parse_requirements("requirements-docs.txt", session=PipSession()) -docs_require = [str(ir.req) for ir in docs_require_g] - dev_requires_g = parse_requirements("requirements-dev.txt", session=PipSession()) -dev_requires = [str(ir.req) for ir in dev_requires_g] + +if tuple(map(int, pip.__version__.split('.'))) >= (20, 1): + install_requires = [str(ir.requirement) for ir in install_requires_g] + tests_require = [str(ir.requirement) for ir in tests_require_g] + docs_require = [str(ir.requirement) for ir in docs_require_g] + dev_requires = [str(ir.requirement) for ir in dev_requires_g] +else: + install_requires = [str(ir.req) for ir in install_requires_g] + tests_require = [str(ir.req) for ir in tests_require_g] + docs_require = [str(ir.req) for ir in docs_require_g] + dev_requires = [str(ir.req) for ir in dev_requires_g] class SmartInstall(install):