Merge branch 'master' into master

This commit is contained in:
Hossein Shafagh 2020-05-22 09:57:46 -07:00 committed by GitHub
commit cc4fc66c93
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 926 additions and 471 deletions

View File

@ -11,12 +11,12 @@
"angular": "1.4.9", "angular": "1.4.9",
"json3": "~3.3", "json3": "~3.3",
"es5-shim": "~4.5.0", "es5-shim": "~4.5.0",
"bootstrap": "~3.3.6",
"angular-bootstrap": "~1.1.1", "angular-bootstrap": "~1.1.1",
"angular-animate": "~1.4.9", "angular-animate": "~1.4.9",
"restangular": "~1.5.1", "restangular": "~1.5.1",
"ng-table": "~0.8.3", "ng-table": "~0.8.3",
"moment": "~2.11.1", "moment": "~2.11.1",
"bootstrap": "~3.4.1",
"angular-loading-bar": "~0.8.0", "angular-loading-bar": "~0.8.0",
"angular-moment": "~0.10.3", "angular-moment": "~0.10.3",
"moment-range": "~2.1.0", "moment-range": "~2.1.0",
@ -24,7 +24,7 @@
"angularjs-toaster": "~1.0.0", "angularjs-toaster": "~1.0.0",
"angular-chart.js": "~0.8.8", "angular-chart.js": "~0.8.8",
"ngletteravatar": "~4.0.0", "ngletteravatar": "~4.0.0",
"bootswatch": "~3.3.6", "bootswatch": "~3.4.1",
"fontawesome": "~4.5.0", "fontawesome": "~4.5.0",
"satellizer": "~0.13.4", "satellizer": "~0.13.4",
"angular-ui-router": "~0.2.15", "angular-ui-router": "~0.2.15",

View File

@ -390,6 +390,10 @@ Here are the Celery configuration variables that should be set::
CELERY_IMPORTS = ('lemur.common.celery') CELERY_IMPORTS = ('lemur.common.celery')
CELERY_TIMEZONE = 'UTC' CELERY_TIMEZONE = 'UTC'
Do not forget to import crontab module in your configuration file::
from celery.task.schedules import crontab
You must start a single Celery scheduler instance and one or more worker instances in order to handle incoming tasks. You must start a single Celery scheduler instance and one or more worker instances in order to handle incoming tasks.
The scheduler can be started with:: The scheduler can be started with::

View File

@ -127,6 +127,10 @@ def retrieve_user(user_api_url, access_token):
# retrieve information about the current user. # retrieve information about the current user.
r = requests.get(user_api_url, params=user_params, headers=headers) r = requests.get(user_api_url, params=user_params, headers=headers)
# Some IDPs, like "Keycloak", require a POST instead of a GET
if r.status_code == 400:
r = requests.post(user_api_url, data=user_params, headers=headers)
profile = r.json() profile = r.json()
user = user_service.get_by_email(profile["email"]) user = user_service.get_by_email(profile["email"])
@ -434,7 +438,7 @@ class OAuth2(Resource):
verify_cert=verify_cert, verify_cert=verify_cert,
) )
jwks_url = current_app.config.get("PING_JWKS_URL") jwks_url = current_app.config.get("OAUTH2_JWKS_URL")
error_code = validate_id_token(id_token, args["clientId"], jwks_url) error_code = validate_id_token(id_token, args["clientId"], jwks_url)
if error_code: if error_code:
return error_code return error_code

View File

@ -5,29 +5,19 @@
:license: Apache, see LICENSE for more details. :license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com> .. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
""" """
import sys
import multiprocessing import multiprocessing
from tabulate import tabulate import sys
from sqlalchemy import or_
from flask import current_app from flask import current_app
from flask_script import Manager
from flask_principal import Identity, identity_changed from flask_principal import Identity, identity_changed
from flask_script import Manager
from sqlalchemy import or_
from tabulate import tabulate
from lemur import database from lemur import database
from lemur.extensions import sentry
from lemur.extensions import metrics
from lemur.plugins.base import plugins
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
from lemur.deployment import service as deployment_service
from lemur.endpoints import service as endpoint_service
from lemur.notifications.messaging import send_rotation_notification
from lemur.domains.models import Domain
from lemur.authorities.models import Authority from lemur.authorities.models import Authority
from lemur.certificates.schemas import CertificateOutputSchema from lemur.authorities.service import get as authorities_get_by_id
from lemur.certificates.models import Certificate from lemur.certificates.models import Certificate
from lemur.certificates.schemas import CertificateOutputSchema
from lemur.certificates.service import ( from lemur.certificates.service import (
reissue_certificate, reissue_certificate,
get_certificate_primitives, get_certificate_primitives,
@ -35,9 +25,16 @@ from lemur.certificates.service import (
get_by_name, get_by_name,
get_all_certs, get_all_certs,
get, get,
get_all_certs_attached_to_endpoint_without_autorotate,
) )
from lemur.certificates.verify import verify_string from lemur.certificates.verify import verify_string
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
from lemur.deployment import service as deployment_service
from lemur.domains.models import Domain
from lemur.endpoints import service as endpoint_service
from lemur.extensions import sentry, metrics
from lemur.notifications.messaging import send_rotation_notification
from lemur.plugins.base import plugins
manager = Manager(usage="Handles all certificate related tasks.") manager = Manager(usage="Handles all certificate related tasks.")
@ -482,3 +479,45 @@ def check_revoked():
cert.status = "unknown" cert.status = "unknown"
database.update(cert) database.update(cert)
@manager.command
def automatically_enable_autorotate():
"""
This function automatically enables auto-rotation for unexpired certificates that are
attached to an endpoint but do not have autorotate enabled.
WARNING: This will overwrite the Auto-rotate toggle!
"""
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"message": "Enabling auto-rotate for certificate"
}
permitted_authorities = current_app.config.get("ENABLE_AUTO_ROTATE_AUTHORITY", [])
eligible_certs = get_all_certs_attached_to_endpoint_without_autorotate()
for cert in eligible_certs:
if cert.authority_id not in permitted_authorities:
continue
log_data["certificate"] = cert.name
log_data["certificate_id"] = cert.id
log_data["authority_id"] = cert.authority_id
log_data["authority_name"] = authorities_get_by_id(cert.authority_id).name
if cert.destinations:
log_data["destination_names"] = ', '.join([d.label for d in cert.destinations])
else:
log_data["destination_names"] = "NONE"
current_app.logger.info(log_data)
metrics.send("automatically_enable_autorotate",
"counter", 1,
metric_tags={"certificate": log_data["certificate"],
"certificate_id": log_data["certificate_id"],
"authority_id": log_data["authority_id"],
"authority_name": log_data["authority_name"],
"destination_names": log_data["destination_names"]
})
cert.rotation = True
database.update(cert)

View File

@ -321,7 +321,8 @@ class Certificate(db.Model):
@hybrid_property @hybrid_property
def expired(self): def expired(self):
if self.not_after <= arrow.utcnow(): # can't compare offset-naive and offset-aware datetimes
if arrow.Arrow.fromdatetime(self.not_after) <= arrow.utcnow():
return True return True
@expired.expression @expired.expression

View File

@ -118,6 +118,21 @@ def get_all_pending_cleaning_expired(source):
) )
def get_all_certs_attached_to_endpoint_without_autorotate():
"""
Retrieves all certificates that are attached to an endpoint, but that do not have autorotate enabled.
:return: list of certificates attached to an endpoint without autorotate
"""
return (
Certificate.query.filter(Certificate.endpoints.any())
.filter(Certificate.rotation == False)
.filter(Certificate.not_after >= arrow.now())
.filter(not_(Certificate.replaced.any()))
.all() # noqa
)
def get_all_pending_cleaning_expiring_in_days(source, days_to_expire): def get_all_pending_cleaning_expiring_in_days(source, days_to_expire):
""" """
Retrieves all certificates that are available for cleaning, not attached to endpoint, Retrieves all certificates that are available for cleaning, not attached to endpoint,
@ -144,7 +159,9 @@ def get_all_pending_cleaning_issued_since_days(source, days_since_issuance):
:param source: the source to search for certificates :param source: the source to search for certificates
:return: list of pending certificates :return: list of pending certificates
""" """
not_in_use_window = arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD") not_in_use_window = (
arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD")
)
return ( return (
Certificate.query.filter(Certificate.sources.any(id=source.id)) Certificate.query.filter(Certificate.sources.any(id=source.id))
.filter(not_(Certificate.endpoints.any())) .filter(not_(Certificate.endpoints.any()))
@ -367,9 +384,11 @@ def render(args):
show_expired = args.pop("showExpired") show_expired = args.pop("showExpired")
if show_expired != 1: if show_expired != 1:
one_month_old = arrow.now()\ one_month_old = (
.shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))\ arrow.now()
.shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))
.format("YYYY-MM-DD") .format("YYYY-MM-DD")
)
query = query.filter(Certificate.not_after > one_month_old) query = query.filter(Certificate.not_after > one_month_old)
time_range = args.pop("time_range") time_range = args.pop("time_range")

View File

@ -10,27 +10,27 @@ command: celery -A lemur.common.celery worker --loglevel=info -l DEBUG -B
import copy import copy
import sys import sys
import time import time
from datetime import datetime, timezone, timedelta
from celery import Celery from celery import Celery
from celery.app.task import Context
from celery.exceptions import SoftTimeLimitExceeded from celery.exceptions import SoftTimeLimitExceeded
from celery.signals import task_failure, task_received, task_revoked, task_success
from datetime import datetime, timezone, timedelta
from flask import current_app from flask import current_app
from lemur.authorities.service import get as get_authority from lemur.authorities.service import get as get_authority
from lemur.certificates import cli as cli_certificate
from lemur.common.redis import RedisHandler from lemur.common.redis import RedisHandler
from lemur.destinations import service as destinations_service from lemur.destinations import service as destinations_service
from lemur.dns_providers import cli as cli_dns_providers
from lemur.endpoints import cli as cli_endpoints
from lemur.extensions import metrics, sentry from lemur.extensions import metrics, sentry
from lemur.factory import create_app from lemur.factory import create_app
from lemur.notifications import cli as cli_notification
from lemur.notifications.messaging import send_pending_failure_notification from lemur.notifications.messaging import send_pending_failure_notification
from lemur.pending_certificates import service as pending_certificate_service from lemur.pending_certificates import service as pending_certificate_service
from lemur.plugins.base import plugins from lemur.plugins.base import plugins
from lemur.sources.cli import clean, sync, validate_sources from lemur.sources.cli import clean, sync, validate_sources
from lemur.sources.service import add_aws_destination_to_sources from lemur.sources.service import add_aws_destination_to_sources
from lemur.certificates import cli as cli_certificate
from lemur.dns_providers import cli as cli_dns_providers
from lemur.notifications import cli as cli_notification
from lemur.endpoints import cli as cli_endpoints
if current_app: if current_app:
flask_app = current_app flask_app = current_app
@ -67,7 +67,7 @@ def is_task_active(fun, task_id, args):
from celery.task.control import inspect from celery.task.control import inspect
if not args: if not args:
args = '()' # empty args args = "()" # empty args
i = inspect() i = inspect()
active_tasks = i.active() active_tasks = i.active()
@ -80,6 +80,37 @@ def is_task_active(fun, task_id, args):
return False return False
def get_celery_request_tags(**kwargs):
request = kwargs.get("request")
sender_hostname = "unknown"
sender = kwargs.get("sender")
if sender:
try:
sender_hostname = sender.hostname
except AttributeError:
sender_hostname = vars(sender.request).get("origin", "unknown")
if request and not isinstance(
request, Context
): # unlike others, task_revoked sends a Context for `request`
task_name = request.name
task_id = request.id
receiver_hostname = request.hostname
else:
task_name = sender.name
task_id = sender.request.id
receiver_hostname = sender.request.hostname
tags = {
"task_name": task_name,
"task_id": task_id,
"sender_hostname": sender_hostname,
"receiver_hostname": receiver_hostname,
}
if kwargs.get("exception"):
tags["error"] = repr(kwargs["exception"])
return tags
@celery.task() @celery.task()
def report_celery_last_success_metrics(): def report_celery_last_success_metrics():
""" """
@ -89,7 +120,6 @@ def report_celery_last_success_metrics():
report_celery_last_success_metrics should be ran periodically to emit metrics on when a task was last successful. report_celery_last_success_metrics should be ran periodically to emit metrics on when a task was last successful.
Admins can then alert when tasks are not ran when intended. Admins should also alert when no metrics are emitted Admins can then alert when tasks are not ran when intended. Admins should also alert when no metrics are emitted
from this function. from this function.
""" """
function = f"{__name__}.{sys._getframe().f_code.co_name}" function = f"{__name__}.{sys._getframe().f_code.co_name}"
task_id = None task_id = None
@ -108,15 +138,91 @@ def report_celery_last_success_metrics():
return return
current_time = int(time.time()) current_time = int(time.time())
schedule = current_app.config.get('CELERYBEAT_SCHEDULE') schedule = current_app.config.get("CELERYBEAT_SCHEDULE")
for _, t in schedule.items(): for _, t in schedule.items():
task = t.get("task") task = t.get("task")
last_success = int(red.get(f"{task}.last_success") or 0) last_success = int(red.get(f"{task}.last_success") or 0)
metrics.send(f"{task}.time_since_last_success", 'gauge', current_time - last_success) metrics.send(
f"{task}.time_since_last_success", "gauge", current_time - last_success
)
red.set( red.set(
f"{function}.last_success", int(time.time()) f"{function}.last_success", int(time.time())
) # Alert if this metric is not seen ) # Alert if this metric is not seen
metrics.send(f"{function}.success", 'counter', 1) metrics.send(f"{function}.success", "counter", 1)
@task_received.connect
def report_number_pending_tasks(**kwargs):
"""
Report the number of pending tasks to our metrics broker every time a task is published. This metric can be used
for autoscaling workers.
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-received
"""
with flask_app.app_context():
metrics.send(
"celery.new_pending_task",
"TIMER",
1,
metric_tags=get_celery_request_tags(**kwargs),
)
@task_success.connect
def report_successful_task(**kwargs):
"""
Report a generic success metric as tasks to our metrics broker every time a task finished correctly.
This metric can be used for autoscaling workers.
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-success
"""
with flask_app.app_context():
tags = get_celery_request_tags(**kwargs)
red.set(f"{tags['task_name']}.last_success", int(time.time()))
metrics.send("celery.successful_task", "TIMER", 1, metric_tags=tags)
@task_failure.connect
def report_failed_task(**kwargs):
"""
Report a generic failure metric as tasks to our metrics broker every time a task fails.
This metric can be used for alerting.
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-failure
"""
with flask_app.app_context():
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"Message": "Celery Task Failure",
}
# Add traceback if exception info is in the kwargs
einfo = kwargs.get("einfo")
if einfo:
log_data["traceback"] = einfo.traceback
error_tags = get_celery_request_tags(**kwargs)
log_data.update(error_tags)
current_app.logger.error(log_data)
metrics.send("celery.failed_task", "TIMER", 1, metric_tags=error_tags)
@task_revoked.connect
def report_revoked_task(**kwargs):
"""
Report a generic failure metric as tasks to our metrics broker every time a task is revoked.
This metric can be used for alerting.
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-revoked
"""
with flask_app.app_context():
log_data = {
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
"Message": "Celery Task Revoked",
}
error_tags = get_celery_request_tags(**kwargs)
log_data.update(error_tags)
current_app.logger.error(log_data)
metrics.send("celery.revoked_task", "TIMER", 1, metric_tags=error_tags)
@celery.task(soft_time_limit=600) @celery.task(soft_time_limit=600)
@ -217,15 +323,15 @@ def fetch_acme_cert(id):
log_data["failed"] = failed log_data["failed"] = failed
log_data["wrong_issuer"] = wrong_issuer log_data["wrong_issuer"] = wrong_issuer
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
metrics.send(f"{function}.resolved", 'gauge', new) metrics.send(f"{function}.resolved", "gauge", new)
metrics.send(f"{function}.failed", 'gauge', failed) metrics.send(f"{function}.failed", "gauge", failed)
metrics.send(f"{function}.wrong_issuer", 'gauge', wrong_issuer) metrics.send(f"{function}.wrong_issuer", "gauge", wrong_issuer)
print( print(
"[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format( "[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format(
new=new, failed=failed, wrong_issuer=wrong_issuer new=new, failed=failed, wrong_issuer=wrong_issuer
) )
) )
red.set(f'{function}.last_success', int(time.time())) return log_data
@celery.task() @celery.task()
@ -262,8 +368,8 @@ def fetch_all_pending_acme_certs():
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
fetch_acme_cert.delay(cert.id) fetch_acme_cert.delay(cert.id)
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task() @celery.task()
@ -296,8 +402,8 @@ def remove_old_acme_certs():
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
pending_certificate_service.delete(cert) pending_certificate_service.delete(cert)
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task() @celery.task()
@ -328,11 +434,11 @@ def clean_all_sources():
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
clean_source.delay(source.label) clean_source.delay(source.label)
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=600) @celery.task(soft_time_limit=3600)
def clean_source(source): def clean_source(source):
""" """
This celery task will clean the specified source. This is a destructive operation that will delete unused This celery task will clean the specified source. This is a destructive operation that will delete unused
@ -366,6 +472,7 @@ def clean_source(source):
current_app.logger.error(log_data) current_app.logger.error(log_data)
sentry.captureException() sentry.captureException()
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
return log_data
@celery.task() @celery.task()
@ -395,8 +502,8 @@ def sync_all_sources():
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
sync_source.delay(source.label) sync_source.delay(source.label)
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=7200) @celery.task(soft_time_limit=7200)
@ -428,19 +535,23 @@ def sync_source(source):
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
try: try:
sync([source]) sync([source])
metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) metrics.send(
f"{function}.success", "counter", 1, metric_tags={"source": source}
)
except SoftTimeLimitExceeded: except SoftTimeLimitExceeded:
log_data["message"] = "Error syncing source: Time limit exceeded." log_data["message"] = "Error syncing source: Time limit exceeded."
current_app.logger.error(log_data) current_app.logger.error(log_data)
sentry.captureException() sentry.captureException()
metrics.send("sync_source_timeout", "counter", 1, metric_tags={"source": source}) metrics.send(
"sync_source_timeout", "counter", 1, metric_tags={"source": source}
)
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
return return
log_data["message"] = "Done syncing source" log_data["message"] = "Done syncing source"
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source}) metrics.send(f"{function}.success", "counter", 1, metric_tags={"source": source})
red.set(f'{function}.last_success', int(time.time())) return log_data
@celery.task() @celery.task()
@ -477,8 +588,8 @@ def sync_source_destination():
log_data["message"] = "completed Syncing AWS destinations and sources" log_data["message"] = "completed Syncing AWS destinations and sources"
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=3600) @celery.task(soft_time_limit=3600)
@ -515,8 +626,8 @@ def certificate_reissue():
log_data["message"] = "reissuance completed" log_data["message"] = "reissuance completed"
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=3600) @celery.task(soft_time_limit=3600)
@ -534,7 +645,6 @@ def certificate_rotate():
"function": function, "function": function,
"message": "rotating certificates", "message": "rotating certificates",
"task_id": task_id, "task_id": task_id,
} }
if task_id and is_task_active(function, task_id, None): if task_id and is_task_active(function, task_id, None):
@ -554,8 +664,8 @@ def certificate_rotate():
log_data["message"] = "rotation completed" log_data["message"] = "rotation completed"
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=3600) @celery.task(soft_time_limit=3600)
@ -590,8 +700,8 @@ def endpoints_expire():
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
return return
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=600) @celery.task(soft_time_limit=600)
@ -626,8 +736,8 @@ def get_all_zones():
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
return return
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=3600) @celery.task(soft_time_limit=3600)
@ -662,8 +772,8 @@ def check_revoked():
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
return return
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=3600) @celery.task(soft_time_limit=3600)
@ -690,7 +800,9 @@ def notify_expirations():
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
try: try:
cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])) cli_notification.expirations(
current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])
)
except SoftTimeLimitExceeded: except SoftTimeLimitExceeded:
log_data["message"] = "Notify expiring Time limit exceeded." log_data["message"] = "Notify expiring Time limit exceeded."
current_app.logger.error(log_data) current_app.logger.error(log_data)
@ -698,5 +810,29 @@ def notify_expirations():
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function}) metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
return return
red.set(f'{function}.last_success', int(time.time())) metrics.send(f"{function}.success", "counter", 1)
metrics.send(f"{function}.success", 'counter', 1) return log_data
@celery.task(soft_time_limit=3600)
def enable_autorotate_for_certs_attached_to_endpoint():
"""
This celery task automatically enables autorotation for unexpired certificates that are
attached to an endpoint but do not have autorotate enabled.
:return:
"""
function = f"{__name__}.{sys._getframe().f_code.co_name}"
task_id = None
if celery.current_task:
task_id = celery.current_task.request.id
log_data = {
"function": function,
"task_id": task_id,
"message": "Enabling autorotate to eligible certificates",
}
current_app.logger.debug(log_data)
cli_certificate.automatically_enable_autorotate()
metrics.send(f"{function}.success", "counter", 1)
return log_data

View File

@ -2,6 +2,7 @@ import re
import unicodedata import unicodedata
from cryptography import x509 from cryptography import x509
from cryptography.hazmat.primitives.serialization import Encoding
from flask import current_app from flask import current_app
from lemur.common.utils import is_selfsigned from lemur.common.utils import is_selfsigned
@ -71,12 +72,20 @@ def common_name(cert):
:return: Common name or None :return: Common name or None
""" """
try: try:
return cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[ subject_oid = cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)
0 if len(subject_oid) > 0:
].value.strip() return subject_oid[0].value.strip()
return None
except Exception as e: except Exception as e:
sentry.captureException() sentry.captureException()
current_app.logger.error("Unable to get common name! {0}".format(e)) current_app.logger.error(
{
"message": "Unable to get common name",
"error": e,
"public_key": cert.public_bytes(Encoding.PEM).decode("utf-8")
},
exc_info=True
)
def organization(cert): def organization(cert):

View File

@ -1,11 +1,10 @@
import time
import requests
import json import json
import sys import sys
import time
import lemur.common.utils as utils import lemur.common.utils as utils
import lemur.dns_providers.util as dnsutil import lemur.dns_providers.util as dnsutil
import requests
from flask import current_app from flask import current_app
from lemur.extensions import metrics, sentry from lemur.extensions import metrics, sentry
@ -17,7 +16,9 @@ REQUIRED_VARIABLES = [
class Zone: class Zone:
""" This class implements a PowerDNS zone in JSON. """ """
This class implements a PowerDNS zone in JSON.
"""
def __init__(self, _data): def __init__(self, _data):
self._data = _data self._data = _data
@ -39,7 +40,9 @@ class Zone:
class Record: class Record:
""" This class implements a PowerDNS record. """ """
This class implements a PowerDNS record.
"""
def __init__(self, _data): def __init__(self, _data):
self._data = _data self._data = _data
@ -49,20 +52,30 @@ class Record:
return self._data["name"] return self._data["name"]
@property @property
def disabled(self): def type(self):
return self._data["disabled"] return self._data["type"]
@property
def ttl(self):
return self._data["ttl"]
@property @property
def content(self): def content(self):
return self._data["content"] return self._data["content"]
@property @property
def ttl(self): def disabled(self):
return self._data["ttl"] return self._data["disabled"]
def get_zones(account_number): def get_zones(account_number):
"""Retrieve authoritative zones from the PowerDNS API and return a list""" """
Retrieve authoritative zones from the PowerDNS API and return a list of zones
:param account_number:
:raise: Exception
:return: list of Zone Objects
"""
_check_conf() _check_conf()
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost") server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
path = f"/api/v1/servers/{server_id}/zones" path = f"/api/v1/servers/{server_id}/zones"
@ -90,44 +103,41 @@ def get_zones(account_number):
def create_txt_record(domain, token, account_number): def create_txt_record(domain, token, account_number):
""" Create a TXT record for the given domain and token and return a change_id tuple """ """
Create a TXT record for the given domain and token and return a change_id tuple
:param domain: FQDN
:param token: challenge value
:param account_number:
:return: tuple of domain/token
"""
_check_conf() _check_conf()
zone_name = _get_zone_name(domain, account_number)
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
zone_id = zone_name + "."
domain_id = domain + "."
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
payload = {
"rrsets": [
{
"name": domain_id,
"type": "TXT",
"ttl": 300,
"changetype": "REPLACE",
"records": [
{
"content": f"\"{token}\"",
"disabled": False
}
],
"comments": []
}
]
}
function = sys._getframe().f_code.co_name function = sys._getframe().f_code.co_name
log_data = { log_data = {
"function": function, "function": function,
"fqdn": domain, "fqdn": domain,
"token": token, "token": token,
} }
# Create new record
domain_id = domain + "."
records = [Record({'name': domain_id, 'content': f"\"{token}\"", 'disabled': False})]
# Get current records
cur_records = _get_txt_records(domain)
for record in cur_records:
if record.content != token:
records.append(record)
try: try:
_patch(path, payload) _patch_txt_records(domain, account_number, records)
log_data["message"] = "TXT record successfully created" log_data["message"] = "TXT record(s) successfully created"
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
except Exception as e: except Exception as e:
sentry.captureException() sentry.captureException()
log_data["Exception"] = e log_data["Exception"] = e
log_data["message"] = "Unable to create TXT record" log_data["message"] = "Unable to create TXT record(s)"
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
change_id = (domain, token) change_id = (domain, token)
@ -136,8 +146,11 @@ def create_txt_record(domain, token, account_number):
def wait_for_dns_change(change_id, account_number=None): def wait_for_dns_change(change_id, account_number=None):
""" """
Checks the authoritative DNS Server to see if changes have propagated to DNS Checks the authoritative DNS Server to see if changes have propagated.
Retries and waits until successful.
:param change_id: tuple of domain/token
:param account_number:
:return:
""" """
_check_conf() _check_conf()
domain, token = change_id domain, token = change_id
@ -171,53 +184,115 @@ def wait_for_dns_change(change_id, account_number=None):
def delete_txt_record(change_id, account_number, domain, token): def delete_txt_record(change_id, account_number, domain, token):
""" Delete the TXT record for the given domain and token """ """
Delete the TXT record for the given domain and token
:param change_id: tuple of domain/token
:param account_number:
:param domain: FQDN
:param token: challenge to delete
:return:
"""
_check_conf() _check_conf()
zone_name = _get_zone_name(domain, account_number)
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
zone_id = zone_name + "."
domain_id = domain + "."
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
payload = {
"rrsets": [
{
"name": domain_id,
"type": "TXT",
"ttl": 300,
"changetype": "DELETE",
"records": [
{
"content": f"\"{token}\"",
"disabled": False
}
],
"comments": []
}
]
}
function = sys._getframe().f_code.co_name function = sys._getframe().f_code.co_name
log_data = { log_data = {
"function": function, "function": function,
"fqdn": domain, "fqdn": domain,
"token": token "token": token,
} }
try:
_patch(path, payload) """
log_data["message"] = "TXT record successfully deleted" Get existing TXT records matching the domain from DNS
current_app.logger.debug(log_data) The token to be deleted should already exist
except Exception as e: There may be other records with different tokens as well
sentry.captureException() """
log_data["Exception"] = e cur_records = _get_txt_records(domain)
log_data["message"] = "Unable to delete TXT record" found = False
new_records = []
for record in cur_records:
if record.content == f"\"{token}\"":
found = True
else:
new_records.append(record)
# Since the matching token is not in DNS, there is nothing to delete
if not found:
log_data["message"] = "Unable to delete TXT record: Token not found in existing TXT records"
current_app.logger.debug(log_data) current_app.logger.debug(log_data)
return
# The record to delete has been found AND there are other tokens set on the same domain
# Since we only want to delete one token value from the RRSet, we need to use the Patch command to
# overwrite the current RRSet with the existing records.
elif new_records:
try:
_patch_txt_records(domain, account_number, new_records)
log_data["message"] = "TXT record successfully deleted"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["Exception"] = e
log_data["message"] = "Unable to delete TXT record: patching exception"
current_app.logger.debug(log_data)
# The record to delete has been found AND there are no other token values set on the same domain
# Use the Delete command to delete the whole RRSet.
else:
zone_name = _get_zone_name(domain, account_number)
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
zone_id = zone_name + "."
domain_id = domain + "."
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
payload = {
"rrsets": [
{
"name": domain_id,
"type": "TXT",
"ttl": 300,
"changetype": "DELETE",
"records": [
{
"content": f"\"{token}\"",
"disabled": False
}
],
"comments": []
}
]
}
function = sys._getframe().f_code.co_name
log_data = {
"function": function,
"fqdn": domain,
"token": token
}
try:
_patch(path, payload)
log_data["message"] = "TXT record successfully deleted"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["Exception"] = e
log_data["message"] = "Unable to delete TXT record"
current_app.logger.debug(log_data)
def _check_conf(): def _check_conf():
"""
Verifies required configuration variables are set
:return:
"""
utils.validate_conf(current_app, REQUIRED_VARIABLES) utils.validate_conf(current_app, REQUIRED_VARIABLES)
def _generate_header(): def _generate_header():
"""Generate a PowerDNS API header and return it as a dictionary""" """
Generate a PowerDNS API header and return it as a dictionary
:return: Dict of header parameters
"""
api_key_name = current_app.config.get("ACME_POWERDNS_APIKEYNAME") api_key_name = current_app.config.get("ACME_POWERDNS_APIKEYNAME")
api_key = current_app.config.get("ACME_POWERDNS_APIKEY") api_key = current_app.config.get("ACME_POWERDNS_APIKEY")
headers = {api_key_name: api_key} headers = {api_key_name: api_key}
@ -225,7 +300,13 @@ def _generate_header():
def _get_zone_name(domain, account_number): def _get_zone_name(domain, account_number):
"""Get most specific matching zone for the given domain and return as a String""" """
Get most specific matching zone for the given domain and return as a String
:param domain: FQDN
:param account_number:
:return: FQDN of domain
"""
zones = get_zones(account_number) zones = get_zones(account_number)
zone_name = "" zone_name = ""
for z in zones: for z in zones:
@ -243,8 +324,47 @@ def _get_zone_name(domain, account_number):
return zone_name return zone_name
def _get_txt_records(domain):
"""
Retrieve TXT records for a given domain and return list of Record Objects
:param domain: FQDN
:return: list of Record objects
"""
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
path = f"/api/v1/servers/{server_id}/search-data?q={domain}&max=100&object_type=record"
function = sys._getframe().f_code.co_name
log_data = {
"function": function
}
try:
records = _get(path)
log_data["message"] = "Retrieved TXT Records Successfully"
current_app.logger.debug(log_data)
except Exception as e:
sentry.captureException()
log_data["Exception"] = e
log_data["message"] = "Failed to Retrieve TXT Records"
current_app.logger.debug(log_data)
return []
txt_records = []
for record in records:
cur_record = Record(record)
txt_records.append(cur_record)
return txt_records
def _get(path, params=None): def _get(path, params=None):
""" Execute a GET request on the given URL (base_uri + path) and return response as JSON object """ """
Execute a GET request on the given URL (base_uri + path) and return response as JSON object
:param path: Relative URL path
:param params: additional parameters
:return: json response
"""
base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN") base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN")
verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True) verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True)
resp = requests.get( resp = requests.get(
@ -257,8 +377,54 @@ def _get(path, params=None):
return resp.json() return resp.json()
def _patch_txt_records(domain, account_number, records):
"""
Send Patch request to PowerDNS Server
:param domain: FQDN
:param account_number:
:param records: List of Record objects
:return:
"""
domain_id = domain + "."
# Create records
txt_records = []
for record in records:
txt_records.append(
{'content': record.content, 'disabled': record.disabled}
)
# Create RRSet
payload = {
"rrsets": [
{
"name": domain_id,
"type": "TXT",
"ttl": 300,
"changetype": "REPLACE",
"records": txt_records,
"comments": []
}
]
}
# Create Txt Records
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
zone_name = _get_zone_name(domain, account_number)
zone_id = zone_name + "."
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
_patch(path, payload)
def _patch(path, payload): def _patch(path, payload):
""" Execute a Patch request on the given URL (base_uri + path) with given payload """ """
Execute a Patch request on the given URL (base_uri + path) with given payload
:param path:
:param payload:
:return:
"""
base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN") base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN")
verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True) verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True)
resp = requests.patch( resp = requests.patch(

View File

@ -35,9 +35,10 @@ def get_zones(client=None):
zones = [] zones = []
for page in paginator.paginate(): for page in paginator.paginate():
for zone in page["HostedZones"]: for zone in page["HostedZones"]:
zones.append( if not zone["Config"]["PrivateZone"]:
zone["Name"][:-1] zones.append(
) # We need [:-1] to strip out the trailing dot. zone["Name"][:-1]
) # We need [:-1] to strip out the trailing dot.
return zones return zones

View File

@ -1,11 +1,10 @@
import unittest import unittest
from unittest.mock import patch, Mock
from cryptography.x509 import DNSName from cryptography.x509 import DNSName
from requests.models import Response
from mock import MagicMock, Mock, patch
from lemur.plugins.lemur_acme import plugin, ultradns from lemur.plugins.lemur_acme import plugin, ultradns
from mock import MagicMock
from requests.models import Response
class TestAcme(unittest.TestCase): class TestAcme(unittest.TestCase):
@ -57,7 +56,7 @@ class TestAcme(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1) @patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges")
def test_start_dns_challenge( def test_start_dns_challenge(
self, mock_get_dns_challenges, mock_len, mock_app, mock_acme self, mock_get_dns_challenges, mock_len, mock_app, mock_acme
): ):
assert mock_len assert mock_len
mock_order = Mock() mock_order = Mock()
@ -88,7 +87,7 @@ class TestAcme(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
@patch("time.sleep") @patch("time.sleep")
def test_complete_dns_challenge_success( def test_complete_dns_challenge_success(
self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme
): ):
mock_dns_provider = Mock() mock_dns_provider = Mock()
mock_dns_provider.wait_for_dns_change = Mock(return_value=True) mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
@ -112,7 +111,7 @@ class TestAcme(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.plugin.current_app") @patch("lemur.plugins.lemur_acme.plugin.current_app")
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change") @patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
def test_complete_dns_challenge_fail( def test_complete_dns_challenge_fail(
self, mock_wait_for_dns_change, mock_current_app, mock_acme self, mock_wait_for_dns_change, mock_current_app, mock_acme
): ):
mock_dns_provider = Mock() mock_dns_provider = Mock()
mock_dns_provider.wait_for_dns_change = Mock(return_value=True) mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
@ -140,12 +139,12 @@ class TestAcme(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges")
@patch("lemur.plugins.lemur_acme.plugin.current_app") @patch("lemur.plugins.lemur_acme.plugin.current_app")
def test_request_certificate( def test_request_certificate(
self, self,
mock_current_app, mock_current_app,
mock_get_dns_challenges, mock_get_dns_challenges,
mock_jose, mock_jose,
mock_crypto, mock_crypto,
mock_acme, mock_acme,
): ):
mock_cert_response = Mock() mock_cert_response = Mock()
mock_cert_response.body = "123" mock_cert_response.body = "123"
@ -182,7 +181,7 @@ class TestAcme(unittest.TestCase):
assert result_client assert result_client
assert result_registration assert result_registration
@patch("lemur.plugins.lemur_acme.plugin.current_app") @patch('lemur.plugins.lemur_acme.plugin.current_app')
def test_get_domains_single(self, mock_current_app): def test_get_domains_single(self, mock_current_app):
options = {"common_name": "test.netflix.net"} options = {"common_name": "test.netflix.net"}
result = self.acme.get_domains(options) result = self.acme.get_domains(options)
@ -288,14 +287,14 @@ class TestAcme(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
def test_get_ordered_certificate( def test_get_ordered_certificate(
self, self,
mock_request_certificate, mock_request_certificate,
mock_finalize_authorizations, mock_finalize_authorizations,
mock_get_authorizations, mock_get_authorizations,
mock_dns_provider_service, mock_dns_provider_service,
mock_authorization_service, mock_authorization_service,
mock_current_app, mock_current_app,
mock_acme, mock_acme,
): ):
mock_client = Mock() mock_client = Mock()
mock_acme.return_value = (mock_client, "") mock_acme.return_value = (mock_client, "")
@ -319,14 +318,14 @@ class TestAcme(unittest.TestCase):
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations")
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate") @patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
def test_get_ordered_certificates( def test_get_ordered_certificates(
self, self,
mock_request_certificate, mock_request_certificate,
mock_finalize_authorizations, mock_finalize_authorizations,
mock_get_authorizations, mock_get_authorizations,
mock_dns_provider_service, mock_dns_provider_service,
mock_authorization_service, mock_authorization_service,
mock_current_app, mock_current_app,
mock_acme, mock_acme,
): ):
mock_client = Mock() mock_client = Mock()
mock_acme.return_value = (mock_client, "") mock_acme.return_value = (mock_client, "")

View File

@ -1,5 +1,5 @@
import unittest import unittest
from mock import Mock, patch from unittest.mock import patch, Mock
from lemur.plugins.lemur_acme import plugin, powerdns from lemur.plugins.lemur_acme import plugin, powerdns
@ -48,13 +48,14 @@ class TestPowerdns(unittest.TestCase):
self.assertEqual(result, zone) self.assertEqual(result, zone)
@patch("lemur.plugins.lemur_acme.powerdns.current_app") @patch("lemur.plugins.lemur_acme.powerdns.current_app")
def test_create_txt_record(self, mock_current_app): def test_create_txt_record_write_only(self, mock_current_app):
domain = "_acme_challenge.test.example.com" domain = "_acme_challenge.test.example.com"
zone = "test.example.com" zone = "test.example.com"
token = "ABCDEFGHIJ" token = "ABCDEFGHIJ"
account_number = "1234567890" account_number = "1234567890"
change_id = (domain, token) change_id = (domain, token)
powerdns._check_conf = Mock() powerdns._check_conf = Mock()
powerdns._get_txt_records = Mock(return_value=[])
powerdns._get_zone_name = Mock(return_value=zone) powerdns._get_zone_name = Mock(return_value=zone)
mock_current_app.logger.debug = Mock() mock_current_app.logger.debug = Mock()
mock_current_app.config.get = Mock(return_value="localhost") mock_current_app.config.get = Mock(return_value="localhost")
@ -63,24 +64,74 @@ class TestPowerdns(unittest.TestCase):
"function": "create_txt_record", "function": "create_txt_record",
"fqdn": domain, "fqdn": domain,
"token": token, "token": token,
"message": "TXT record successfully created" "message": "TXT record(s) successfully created"
} }
result = powerdns.create_txt_record(domain, token, account_number) result = powerdns.create_txt_record(domain, token, account_number)
mock_current_app.logger.debug.assert_called_with(log_data) mock_current_app.logger.debug.assert_called_with(log_data)
self.assertEqual(result, change_id) self.assertEqual(result, change_id)
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
def test_create_txt_record_append(self, mock_current_app):
domain = "_acme_challenge.test.example.com"
zone = "test.example.com"
token = "ABCDEFGHIJ"
account_number = "1234567890"
change_id = (domain, token)
powerdns._check_conf = Mock()
cur_token = "123456"
cur_records = [powerdns.Record({'name': domain, 'content': f"\"{cur_token}\"", 'disabled': False})]
powerdns._get_txt_records = Mock(return_value=cur_records)
powerdns._get_zone_name = Mock(return_value=zone)
mock_current_app.logger.debug = Mock()
mock_current_app.config.get = Mock(return_value="localhost")
powerdns._patch = Mock()
log_data = {
"function": "create_txt_record",
"fqdn": domain,
"token": token,
"message": "TXT record(s) successfully created"
}
expected_path = f"/api/v1/servers/localhost/zones/test.example.com."
expected_payload = {
"rrsets": [
{
"name": domain + ".",
"type": "TXT",
"ttl": 300,
"changetype": "REPLACE",
"records": [
{
"content": f"\"{token}\"",
"disabled": False
},
{
"content": f"\"{cur_token}\"",
"disabled": False
}
],
"comments": []
}
]
}
result = powerdns.create_txt_record(domain, token, account_number)
mock_current_app.logger.debug.assert_called_with(log_data)
powerdns._patch.assert_called_with(expected_path, expected_payload)
self.assertEqual(result, change_id)
@patch("lemur.plugins.lemur_acme.powerdns.dnsutil") @patch("lemur.plugins.lemur_acme.powerdns.dnsutil")
@patch("lemur.plugins.lemur_acme.powerdns.current_app") @patch("lemur.plugins.lemur_acme.powerdns.current_app")
@patch("lemur.extensions.metrics") @patch("lemur.extensions.metrics")
@patch("time.sleep") @patch("time.sleep")
def test_wait_for_dns_change(self, mock_sleep, mock_metrics, mock_current_app, mock_dnsutil): def test_wait_for_dns_change(self, mock_sleep, mock_metrics, mock_current_app, mock_dnsutil):
domain = "_acme-challenge.test.example.com" domain = "_acme-challenge.test.example.com"
token = "ABCDEFG" token1 = "ABCDEFG"
token2 = "HIJKLMN"
zone_name = "test.example.com" zone_name = "test.example.com"
nameserver = "1.1.1.1" nameserver = "1.1.1.1"
change_id = (domain, token) change_id = (domain, token1)
powerdns._check_conf = Mock() powerdns._check_conf = Mock()
mock_records = (token,) mock_records = (token2, token1)
mock_current_app.config.get = Mock(return_value=1) mock_current_app.config.get = Mock(return_value=1)
powerdns._get_zone_name = Mock(return_value=zone_name) powerdns._get_zone_name = Mock(return_value=zone_name)
mock_dnsutil.get_authoritative_nameserver = Mock(return_value=nameserver) mock_dnsutil.get_authoritative_nameserver = Mock(return_value=nameserver)
@ -114,7 +165,7 @@ class TestPowerdns(unittest.TestCase):
"function": "delete_txt_record", "function": "delete_txt_record",
"fqdn": domain, "fqdn": domain,
"token": token, "token": token,
"message": "TXT record successfully deleted" "message": "Unable to delete TXT record: Token not found in existing TXT records"
} }
powerdns.delete_txt_record(change_id, account_number, domain, token) powerdns.delete_txt_record(change_id, account_number, domain, token)
mock_current_app.logger.debug.assert_called_with(log_data) mock_current_app.logger.debug.assert_called_with(log_data)

View File

@ -24,6 +24,12 @@ def retry_throttled(exception):
if exception.response["Error"]["Code"] == "NoSuchEntity": if exception.response["Error"]["Code"] == "NoSuchEntity":
return False return False
# No need to retry deletion requests if there is a DeleteConflict error.
# This error indicates that the certificate is still attached to an entity
# and cannot be deleted.
if exception.response["Error"]["Code"] == "DeleteConflict":
return False
metrics.send("iam_retry", "counter", 1, metric_tags={"exception": str(exception)}) metrics.send("iam_retry", "counter", 1, metric_tags={"exception": str(exception)})
return True return True

View File

@ -216,22 +216,24 @@ class AWSSourcePlugin(SourcePlugin):
for region in regions: for region in regions:
elbs = elb.get_all_elbs(account_number=account_number, region=region) elbs = elb.get_all_elbs(account_number=account_number, region=region)
current_app.logger.info( current_app.logger.info({
"Describing classic load balancers in {0}-{1}".format( "message": "Describing classic load balancers",
account_number, region "account_number": account_number,
) "region": region,
) "number_of_load_balancers": len(elbs)
})
for e in elbs: for e in elbs:
endpoints.extend(get_elb_endpoints(account_number, region, e)) endpoints.extend(get_elb_endpoints(account_number, region, e))
# fetch advanced ELBs # fetch advanced ELBs
elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region) elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region)
current_app.logger.info( current_app.logger.info({
"Describing advanced load balancers in {0}-{1}".format( "message": "Describing advanced load balancers",
account_number, region "account_number": account_number,
) "region": region,
) "number_of_load_balancers": len(elbs_v2)
})
for e in elbs_v2: for e in elbs_v2:
endpoints.extend(get_elb_endpoints_v2(account_number, region, e)) endpoints.extend(get_elb_endpoints_v2(account_number, region, e))

View File

@ -1,4 +1,5 @@
import json import json
from unittest.mock import patch, Mock
import arrow import arrow
import pytest import pytest
@ -6,7 +7,6 @@ from cryptography import x509
from freezegun import freeze_time from freezegun import freeze_time
from lemur.plugins.lemur_digicert import plugin from lemur.plugins.lemur_digicert import plugin
from lemur.tests.vectors import CSR_STR from lemur.tests.vectors import CSR_STR
from mock import Mock, patch
def config_mock(*args): def config_mock(*args):

View File

@ -58,6 +58,13 @@ def execute_clean(plugin, certificate, source):
try: try:
plugin.clean(certificate, source.options) plugin.clean(certificate, source.options)
certificate.sources.remove(source) certificate.sources.remove(source)
# If we want to remove the source from the certificate, we also need to clear any equivalent destinations to
# prevent Lemur from re-uploading the certificate.
for destination in certificate.destinations:
if destination.label == source.label:
certificate.destinations.remove(destination)
certificate_service.database.update(certificate) certificate_service.database.update(certificate)
return SUCCESS_METRIC_STATUS return SUCCESS_METRIC_STATUS
except Exception as e: except Exception as e:

View File

@ -123,15 +123,19 @@ def sync_endpoints(source):
"acct": s.get_option("accountNumber", source.options)}) "acct": s.get_option("accountNumber", source.options)})
if not endpoint["certificate"]: if not endpoint["certificate"]:
current_app.logger.error( current_app.logger.error({
"Certificate Not Found. Name: {0} Endpoint: {1}".format( "message": "Certificate Not Found",
certificate_name, endpoint["name"] "certificate_name": certificate_name,
) "endpoint_name": endpoint["name"],
) "dns_name": endpoint.get("dnsname"),
"account": s.get_option("accountNumber", source.options),
})
metrics.send("endpoint.certificate.not.found", metrics.send("endpoint.certificate.not.found",
"counter", 1, "counter", 1,
metric_tags={"cert": certificate_name, "endpoint": endpoint["name"], metric_tags={"cert": certificate_name, "endpoint": endpoint["name"],
"acct": s.get_option("accountNumber", source.options)}) "acct": s.get_option("accountNumber", source.options),
"dnsname": endpoint.get("dnsname")})
continue continue
policy = endpoint.pop("policy") policy = endpoint.pop("policy")
@ -193,6 +197,11 @@ def sync_certificates(source, user):
s = plugins.get(source.plugin_name) s = plugins.get(source.plugin_name)
certificates = s.get_certificates(source.options) certificates = s.get_certificates(source.options)
# emitting the count of certificates on the source
metrics.send("sync_certificates_count",
"gauge", len(certificates),
metric_tags={"source": source.label})
for certificate in certificates: for certificate in certificates:
exists, updated_by_hash = find_cert(certificate) exists, updated_by_hash = find_cert(certificate)

View File

@ -9,7 +9,8 @@ from cryptography import x509
from cryptography.hazmat.backends import default_backend from cryptography.hazmat.backends import default_backend
from marshmallow import ValidationError from marshmallow import ValidationError
from freezegun import freeze_time from freezegun import freeze_time
from mock import patch # from mock import patch
from unittest.mock import patch
from lemur.certificates.service import create_csr from lemur.certificates.service import create_csr
from lemur.certificates.views import * # noqa from lemur.certificates.views import * # noqa
@ -906,12 +907,12 @@ def test_certificate_get_body(client):
assert response_body["serial"] == "211983098819107449768450703123665283596" assert response_body["serial"] == "211983098819107449768450703123665283596"
assert response_body["serialHex"] == "9F7A75B39DAE4C3F9524C68B06DA6A0C" assert response_body["serialHex"] == "9F7A75B39DAE4C3F9524C68B06DA6A0C"
assert response_body["distinguishedName"] == ( assert response_body["distinguishedName"] == (
"CN=LemurTrust Unittests Class 1 CA 2018," "L=Earth,"
"O=LemurTrust Enterprises Ltd,"
"OU=Unittesting Operations Center,"
"C=EE,"
"ST=N/A," "ST=N/A,"
"L=Earth" "C=EE,"
"OU=Unittesting Operations Center,"
"O=LemurTrust Enterprises Ltd,"
"CN=LemurTrust Unittests Class 1 CA 2018"
) )

View File

@ -6,7 +6,7 @@
"url": "git://github.com/netflix/lemur.git" "url": "git://github.com/netflix/lemur.git"
}, },
"dependencies": { "dependencies": {
"bower": "^1.8.2", "bower": "^1.8.8",
"browser-sync": "^2.26.7", "browser-sync": "^2.26.7",
"del": "^2.2.2", "del": "^2.2.2",
"gulp-autoprefixer": "^3.1.1", "gulp-autoprefixer": "^3.1.1",
@ -17,10 +17,10 @@
"gulp-flatten": "^0.3.1", "gulp-flatten": "^0.3.1",
"gulp-foreach": "0.1.0", "gulp-foreach": "0.1.0",
"gulp-if": "^2.0.2", "gulp-if": "^2.0.2",
"gulp-imagemin": "^3.1.1", "gulp-imagemin": "^7.1.0",
"gulp-inject": "~4.1.0", "gulp-inject": "~4.1.0",
"gulp-jshint": "^2.0.4", "gulp-jshint": "^2.0.4",
"gulp-less": "^3.0.3", "gulp-less": "^4.0.1",
"gulp-load-plugins": "^1.4.0", "gulp-load-plugins": "^1.4.0",
"gulp-minify-css": "^1.2.4", "gulp-minify-css": "^1.2.4",
"gulp-minify-html": "~1.0.6", "gulp-minify-html": "~1.0.6",
@ -29,7 +29,7 @@
"gulp-notify": "^2.2.0", "gulp-notify": "^2.2.0",
"gulp-plumber": "^1.1.0", "gulp-plumber": "^1.1.0",
"gulp-print": "^2.0.1", "gulp-print": "^2.0.1",
"gulp-protractor": "3.0.0", "gulp-protractor": "^4.1.1",
"gulp-replace": "~0.5.3", "gulp-replace": "~0.5.3",
"gulp-replace-task": "~0.11.0", "gulp-replace-task": "~0.11.0",
"gulp-rev": "^7.1.2", "gulp-rev": "^7.1.2",
@ -41,7 +41,7 @@
"gulp-util": "^3.0.1", "gulp-util": "^3.0.1",
"http-proxy": "~1.16.2", "http-proxy": "~1.16.2",
"jshint-stylish": "^2.2.1", "jshint-stylish": "^2.2.1",
"karma": "~1.3.0", "karma": "^4.4.1",
"karma-jasmine": "^1.1.0", "karma-jasmine": "^1.1.0",
"main-bower-files": "^2.13.1", "main-bower-files": "^2.13.1",
"merge-stream": "^1.0.1", "merge-stream": "^1.0.1",
@ -60,7 +60,7 @@
}, },
"devDependencies": { "devDependencies": {
"gulp": "^3.9.1", "gulp": "^3.9.1",
"jshint": "^2.8.0", "jshint": "^2.11.0",
"karma-chrome-launcher": "^2.0.0" "karma-chrome-launcher": "^2.0.0"
} }
} }

View File

@ -4,39 +4,41 @@
# #
# pip-compile --no-index --output-file=requirements-dev.txt requirements-dev.in # pip-compile --no-index --output-file=requirements-dev.txt requirements-dev.in
# #
aspy.yaml==1.3.0 # via pre-commit appdirs==1.4.3 # via virtualenv
bleach==3.1.1 # via readme-renderer bleach==3.1.4 # via readme-renderer
certifi==2019.11.28 # via requests certifi==2020.4.5.1 # via requests
cffi==1.14.0 # via cryptography cffi==1.14.0 # via cryptography
cfgv==2.0.1 # via pre-commit cfgv==3.1.0 # via pre-commit
chardet==3.0.4 # via requests chardet==3.0.4 # via requests
cryptography==2.8 # via secretstorage cryptography==2.9.2 # via secretstorage
docutils==0.15.2 # via readme-renderer distlib==0.3.0 # via virtualenv
flake8==3.5.0 docutils==0.16 # via readme-renderer
identify==1.4.9 # via pre-commit filelock==3.0.12 # via virtualenv
idna==2.8 # via requests flake8==3.5.0 # via -r requirements-dev.in
invoke==1.3.0 identify==1.4.14 # via pre-commit
jeepney==0.4.2 # via secretstorage idna==2.9 # via requests
keyring==21.0.0 # via twine invoke==1.4.1 # via -r requirements-dev.in
jeepney==0.4.3 # via keyring, secretstorage
keyring==21.2.0 # via twine
mccabe==0.6.1 # via flake8 mccabe==0.6.1 # via flake8
nodeenv==1.3.3 nodeenv==1.3.5 # via -r requirements-dev.in, pre-commit
pkginfo==1.5.0.1 # via twine pkginfo==1.5.0.1 # via twine
pre-commit==1.21.0 pre-commit==2.4.0 # via -r requirements-dev.in
pycodestyle==2.3.1 # via flake8 pycodestyle==2.3.1 # via flake8
pycparser==2.19 # via cffi pycparser==2.20 # via cffi
pyflakes==1.6.0 # via flake8 pyflakes==1.6.0 # via flake8
pygments==2.5.2 # via readme-renderer pygments==2.6.1 # via readme-renderer
pyyaml==5.2 pyyaml==5.3.1 # via -r requirements-dev.in, pre-commit
readme-renderer==24.0 # via twine readme-renderer==25.0 # via twine
requests-toolbelt==0.9.1 # via twine requests-toolbelt==0.9.1 # via twine
requests==2.22.0 # via requests-toolbelt, twine requests==2.23.0 # via requests-toolbelt, twine
secretstorage==3.1.2 # via keyring secretstorage==3.1.2 # via keyring
six==1.13.0 # via bleach, cfgv, cryptography, pre-commit, readme-renderer six==1.14.0 # via bleach, cryptography, readme-renderer, virtualenv
toml==0.10.0 # via pre-commit toml==0.10.0 # via pre-commit
tqdm==4.41.1 # via twine tqdm==4.45.0 # via twine
twine==3.1.1 twine==3.1.1 # via -r requirements-dev.in
urllib3==1.25.7 # via requests urllib3==1.25.8 # via requests
virtualenv==16.7.9 # via pre-commit virtualenv==20.0.17 # via pre-commit
webencodings==0.5.1 # via bleach webencodings==0.5.1 # via bleach
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:

View File

@ -4,111 +4,108 @@
# #
# pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in # pip-compile --no-index --output-file=requirements-docs.txt requirements-docs.in
# #
acme==1.0.0 acme==1.4.0 # via -r requirements.txt
alabaster==0.7.12 # via sphinx alabaster==0.7.12 # via sphinx
alembic-autogenerate-enums==0.0.2 alembic-autogenerate-enums==0.0.2 # via -r requirements.txt
alembic==1.3.2 alembic==1.4.2 # via -r requirements.txt, flask-migrate
amqp==2.5.2 amqp==2.5.2 # via -r requirements.txt, kombu
aniso8601==8.0.0 aniso8601==8.0.0 # via -r requirements.txt, flask-restful
arrow==0.15.5 arrow==0.15.6 # via -r requirements.txt
asyncpool==1.0 asyncpool==1.0 # via -r requirements.txt
babel==2.8.0 # via sphinx babel==2.8.0 # via sphinx
bcrypt==3.1.7 bcrypt==3.1.7 # via -r requirements.txt, flask-bcrypt, paramiko
billiard==3.6.1.0 billiard==3.6.3.0 # via -r requirements.txt, celery
blinker==1.4 blinker==1.4 # via -r requirements.txt, flask-mail, flask-principal, raven
boto3==1.10.46 boto3==1.13.11 # via -r requirements.txt
botocore==1.13.46 botocore==1.16.11 # via -r requirements.txt, boto3, s3transfer
celery[redis]==4.4.0 celery[redis]==4.4.2 # via -r requirements.txt
certifi==2019.11.28 certifi==2020.4.5.1 # via -r requirements.txt, requests
certsrv==2.1.1 certsrv==2.1.1 # via -r requirements.txt
cffi==1.13.2 cffi==1.14.0 # via -r requirements.txt, bcrypt, cryptography, pynacl
chardet==3.0.4 chardet==3.0.4 # via -r requirements.txt, requests
click==7.0 click==7.1.1 # via -r requirements.txt, flask
cloudflare==2.3.1 cloudflare==2.7.1 # via -r requirements.txt
cryptography==2.8 cryptography==2.9.2 # via -r requirements.txt, acme, josepy, paramiko, pyopenssl, requests
dnspython3==1.15.0 dnspython3==1.15.0 # via -r requirements.txt
dnspython==1.15.0 dnspython==1.15.0 # via -r requirements.txt, dnspython3
docutils==0.15.2 docutils==0.15.2 # via -r requirements.txt, botocore, sphinx
dyn==1.8.1 dyn==1.8.1 # via -r requirements.txt
flask-bcrypt==0.7.1 flask-bcrypt==0.7.1 # via -r requirements.txt
flask-cors==3.0.8 flask-cors==3.0.8 # via -r requirements.txt
flask-mail==0.9.1 flask-mail==0.9.1 # via -r requirements.txt
flask-migrate==2.5.2 flask-migrate==2.5.3 # via -r requirements.txt
flask-principal==0.4.0 flask-principal==0.4.0 # via -r requirements.txt
flask-replicated==1.3 flask-replicated==1.3 # via -r requirements.txt
flask-restful==0.3.7 flask-restful==0.3.8 # via -r requirements.txt
flask-script==2.0.6 flask-script==2.0.6 # via -r requirements.txt
flask-sqlalchemy==2.4.1 flask-sqlalchemy==2.4.1 # via -r requirements.txt, flask-migrate
flask==1.1.1 flask==1.1.2 # via -r requirements.txt, flask-bcrypt, flask-cors, flask-mail, flask-migrate, flask-principal, flask-restful, flask-script, flask-sqlalchemy, raven
future==0.18.2 future==0.18.2 # via -r requirements.txt, cloudflare
gunicorn==20.0.4 gunicorn==20.0.4 # via -r requirements.txt
hvac==0.9.6 hvac==0.10.1 # via -r requirements.txt
idna==2.8 idna==2.9 # via -r requirements.txt, requests
imagesize==1.2.0 # via sphinx imagesize==1.2.0 # via sphinx
importlib-metadata==1.3.0 inflection==0.4.0 # via -r requirements.txt
inflection==0.3.1 itsdangerous==1.1.0 # via -r requirements.txt, flask
itsdangerous==1.1.0 javaobj-py3==0.4.0.1 # via -r requirements.txt, pyjks
javaobj-py3==0.4.0.1 jinja2==2.11.2 # via -r requirements.txt, flask, sphinx
jinja2==2.10.3 jmespath==0.9.5 # via -r requirements.txt, boto3, botocore
jmespath==0.9.4 josepy==1.3.0 # via -r requirements.txt, acme
josepy==1.2.0 jsonlines==1.2.0 # via -r requirements.txt, cloudflare
jsonlines==1.2.0 kombu==4.6.8 # via -r requirements.txt, celery
kombu==4.6.7 lockfile==0.12.2 # via -r requirements.txt
lockfile==0.12.2 logmatic-python==0.1.7 # via -r requirements.txt
logmatic-python==0.1.7 mako==1.1.2 # via -r requirements.txt, alembic
mako==1.1.0 markupsafe==1.1.1 # via -r requirements.txt, jinja2, mako
markupsafe==1.1.1 marshmallow-sqlalchemy==0.23.0 # via -r requirements.txt
marshmallow-sqlalchemy==0.21.0 marshmallow==2.20.4 # via -r requirements.txt, marshmallow-sqlalchemy
marshmallow==2.20.4 ndg-httpsclient==0.5.1 # via -r requirements.txt
mock==3.0.5 packaging==20.3 # via sphinx
more-itertools==8.0.2 paramiko==2.7.1 # via -r requirements.txt
ndg-httpsclient==0.5.1 pem==20.1.0 # via -r requirements.txt
packaging==19.2 # via sphinx psycopg2==2.8.5 # via -r requirements.txt
paramiko==2.7.1 pyasn1-modules==0.2.8 # via -r requirements.txt, pyjks, python-ldap
pem==19.3.0 pyasn1==0.4.8 # via -r requirements.txt, ndg-httpsclient, pyasn1-modules, pyjks, python-ldap
psycopg2==2.8.4 pycparser==2.20 # via -r requirements.txt, cffi
pyasn1-modules==0.2.7 pycryptodomex==3.9.7 # via -r requirements.txt, pyjks
pyasn1==0.4.8 pygments==2.6.1 # via sphinx
pycparser==2.19 pyjks==20.0.0 # via -r requirements.txt
pycryptodomex==3.9.4 pyjwt==1.7.1 # via -r requirements.txt
pygments==2.5.2 # via sphinx pynacl==1.3.0 # via -r requirements.txt, paramiko
pyjks==19.0.0 pyopenssl==19.1.0 # via -r requirements.txt, acme, josepy, ndg-httpsclient, requests
pyjwt==1.7.1 pyparsing==2.4.7 # via packaging
pynacl==1.3.0 pyrfc3339==1.1 # via -r requirements.txt, acme
pyopenssl==19.1.0 python-dateutil==2.8.1 # via -r requirements.txt, alembic, arrow, botocore
pyparsing==2.4.6 # via packaging python-editor==1.0.4 # via -r requirements.txt, alembic
pyrfc3339==1.1 python-json-logger==0.1.11 # via -r requirements.txt, logmatic-python
python-dateutil==2.8.1 python-ldap==3.2.0 # via -r requirements.txt
python-editor==1.0.4 pytz==2019.3 # via -r requirements.txt, acme, babel, celery, flask-restful, pyrfc3339
python-json-logger==0.1.11 pyyaml==5.3.1 # via -r requirements.txt, cloudflare
pytz==2019.3 raven[flask]==6.10.0 # via -r requirements.txt
pyyaml==5.2 redis==3.5.2 # via -r requirements.txt, celery
raven[flask]==6.10.0 requests-toolbelt==0.9.1 # via -r requirements.txt, acme
redis==3.3.11 requests[security]==2.23.0 # via -r requirements.txt, acme, certsrv, cloudflare, hvac, requests-toolbelt, sphinx
requests-toolbelt==0.9.1 retrying==1.3.3 # via -r requirements.txt
requests[security]==2.22.0 s3transfer==0.3.3 # via -r requirements.txt, boto3
retrying==1.3.3 six==1.14.0 # via -r requirements.txt, acme, bcrypt, cryptography, flask-cors, flask-restful, hvac, josepy, jsonlines, packaging, pynacl, pyopenssl, python-dateutil, retrying, sphinxcontrib-httpdomain, sqlalchemy-utils
s3transfer==0.2.1
six==1.13.0
snowballstemmer==2.0.0 # via sphinx snowballstemmer==2.0.0 # via sphinx
sphinx-rtd-theme==0.4.3 sphinx-rtd-theme==0.4.3 # via -r requirements-docs.in
sphinx==2.3.1 sphinx==3.0.3 # via -r requirements-docs.in, sphinx-rtd-theme, sphinxcontrib-httpdomain
sphinxcontrib-applehelp==1.0.1 # via sphinx sphinxcontrib-applehelp==1.0.2 # via sphinx
sphinxcontrib-devhelp==1.0.1 # via sphinx sphinxcontrib-devhelp==1.0.2 # via sphinx
sphinxcontrib-htmlhelp==1.0.2 # via sphinx sphinxcontrib-htmlhelp==1.0.3 # via sphinx
sphinxcontrib-httpdomain==1.7.0 sphinxcontrib-httpdomain==1.7.0 # via -r requirements-docs.in
sphinxcontrib-jsmath==1.0.1 # via sphinx sphinxcontrib-jsmath==1.0.1 # via sphinx
sphinxcontrib-qthelp==1.0.2 # via sphinx sphinxcontrib-qthelp==1.0.3 # via sphinx
sphinxcontrib-serializinghtml==1.1.3 # via sphinx sphinxcontrib-serializinghtml==1.1.4 # via sphinx
sqlalchemy-utils==0.36.1 sqlalchemy-utils==0.36.5 # via -r requirements.txt
sqlalchemy==1.3.12 sqlalchemy==1.3.16 # via -r requirements.txt, alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils
tabulate==0.8.6 tabulate==0.8.7 # via -r requirements.txt
twofish==0.3.0 twofish==0.3.0 # via -r requirements.txt, pyjks
urllib3==1.25.7 urllib3==1.25.8 # via -r requirements.txt, botocore, requests
vine==1.3.0 vine==1.3.0 # via -r requirements.txt, amqp, celery
werkzeug==0.16.0 werkzeug==1.0.1 # via -r requirements.txt, flask
xmltodict==0.12.0 xmltodict==0.12.0 # via -r requirements.txt
zipp==0.6.0
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# setuptools # setuptools

View File

@ -6,85 +6,87 @@
# #
appdirs==1.4.3 # via black appdirs==1.4.3 # via black
attrs==19.3.0 # via black, jsonschema, pytest attrs==19.3.0 # via black, jsonschema, pytest
aws-sam-translator==1.19.1 # via cfn-lint aws-sam-translator==1.22.0 # via cfn-lint
aws-xray-sdk==2.4.3 # via moto aws-xray-sdk==2.5.0 # via moto
bandit==1.6.2 bandit==1.6.2 # via -r requirements-tests.in
black==19.10b0 black==19.10b0 # via -r requirements-tests.in
boto3==1.10.46 # via aws-sam-translator, moto boto3==1.13.11 # via aws-sam-translator, moto
boto==2.49.0 # via moto boto==2.49.0 # via moto
botocore==1.13.46 # via aws-xray-sdk, boto3, moto, s3transfer botocore==1.16.11 # via aws-xray-sdk, boto3, moto, s3transfer
certifi==2019.11.28 # via requests certifi==2020.4.5.1 # via requests
cffi==1.13.2 # via cryptography cffi==1.14.0 # via cryptography
cfn-lint==0.26.2 # via moto cfn-lint==0.29.5 # via moto
chardet==3.0.4 # via requests chardet==3.0.4 # via requests
click==7.0 # via black, flask click==7.1.1 # via black, flask
coverage==5.0.1 coverage==5.1 # via -r requirements-tests.in
cryptography==2.8 # via moto, sshpubkeys cryptography==2.9.2 # via moto, sshpubkeys
docker==4.1.0 # via moto decorator==4.4.2 # via networkx
docker==4.2.0 # via moto
docutils==0.15.2 # via botocore docutils==0.15.2 # via botocore
ecdsa==0.15 # via python-jose, sshpubkeys ecdsa==0.15 # via python-jose, sshpubkeys
factory-boy==2.12.0 factory-boy==2.12.0 # via -r requirements-tests.in
faker==3.0.0 faker==4.1.0 # via -r requirements-tests.in, factory-boy
fakeredis==1.1.0 fakeredis==1.4.1 # via -r requirements-tests.in
flask==1.1.1 # via pytest-flask flask==1.1.2 # via pytest-flask
freezegun==0.3.12 freezegun==0.3.15 # via -r requirements-tests.in
future==0.18.2 # via aws-xray-sdk future==0.18.2 # via aws-xray-sdk
gitdb2==2.0.6 # via gitpython gitdb==4.0.4 # via gitpython
gitpython==3.0.5 # via bandit gitpython==3.1.1 # via bandit
idna==2.8 # via moto, requests idna==2.8 # via moto, requests
importlib-metadata==1.3.0 # via jsonschema, pluggy, pytest importlib-metadata==1.6.0 # via jsonpickle
itsdangerous==1.1.0 # via flask itsdangerous==1.1.0 # via flask
jinja2==2.10.3 # via flask, moto jinja2==2.11.2 # via flask, moto
jmespath==0.9.4 # via boto3, botocore jmespath==0.9.5 # via boto3, botocore
jsondiff==1.1.2 # via moto jsondiff==1.1.2 # via moto
jsonpatch==1.24 # via cfn-lint jsonpatch==1.25 # via cfn-lint
jsonpickle==1.2 # via aws-xray-sdk jsonpickle==1.4 # via aws-xray-sdk
jsonpointer==2.0 # via jsonpatch jsonpointer==2.0 # via jsonpatch
jsonschema==3.2.0 # via aws-sam-translator, cfn-lint jsonschema==3.2.0 # via aws-sam-translator, cfn-lint
markupsafe==1.1.1 # via jinja2 markupsafe==1.1.1 # via jinja2
mock==3.0.5 # via moto mock==4.0.2 # via moto
more-itertools==8.0.2 # via pytest, zipp more-itertools==8.2.0 # via pytest
moto==1.3.14 moto==1.3.14 # via -r requirements-tests.in
nose==1.3.7 networkx==2.4 # via cfn-lint
packaging==19.2 # via pytest nose==1.3.7 # via -r requirements-tests.in
pathspec==0.7.0 # via black packaging==20.3 # via pytest
pbr==5.4.4 # via stevedore pathspec==0.8.0 # via black
pbr==5.4.5 # via stevedore
pluggy==0.13.1 # via pytest pluggy==0.13.1 # via pytest
py==1.8.1 # via pytest py==1.8.1 # via pytest
pyasn1==0.4.8 # via python-jose, rsa pyasn1==0.4.8 # via python-jose, rsa
pycparser==2.19 # via cffi pycparser==2.20 # via cffi
pyflakes==2.1.1 pyflakes==2.2.0 # via -r requirements-tests.in
pyparsing==2.4.6 # via packaging pyparsing==2.4.7 # via packaging
pyrsistent==0.15.6 # via jsonschema pyrsistent==0.16.0 # via jsonschema
pytest-flask==0.15.0 pytest-flask==1.0.0 # via -r requirements-tests.in
pytest-mock==1.13.0 pytest-mock==3.1.0 # via -r requirements-tests.in
pytest==5.3.2 pytest==5.4.2 # via -r requirements-tests.in, pytest-flask, pytest-mock
python-dateutil==2.8.1 # via botocore, faker, freezegun, moto python-dateutil==2.8.1 # via botocore, faker, freezegun, moto
python-jose==3.1.0 # via moto python-jose==3.1.0 # via moto
pytz==2019.3 # via moto pytz==2019.3 # via moto
pyyaml==5.2 pyyaml==5.3.1 # via -r requirements-tests.in, bandit, cfn-lint, moto
redis==3.3.11 # via fakeredis redis==3.5.2 # via fakeredis
regex==2019.12.20 # via black regex==2020.4.4 # via black
requests-mock==1.7.0 requests-mock==1.8.0 # via -r requirements-tests.in
requests==2.22.0 # via docker, moto, requests-mock, responses requests==2.23.0 # via docker, moto, requests-mock, responses
responses==0.10.9 # via moto responses==0.10.12 # via moto
rsa==4.0 # via python-jose rsa==4.0 # via python-jose
s3transfer==0.2.1 # via boto3 s3transfer==0.3.3 # via boto3
six==1.13.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, ecdsa, faker, fakeredis, freezegun, jsonschema, mock, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client six==1.14.0 # via aws-sam-translator, bandit, cfn-lint, cryptography, docker, ecdsa, fakeredis, freezegun, jsonschema, moto, packaging, pyrsistent, python-dateutil, python-jose, requests-mock, responses, stevedore, websocket-client
smmap2==2.0.5 # via gitdb2 smmap==3.0.2 # via gitdb
sortedcontainers==2.1.0 # via fakeredis sortedcontainers==2.1.0 # via fakeredis
sshpubkeys==3.1.0 # via moto sshpubkeys==3.1.0 # via moto
stevedore==1.31.0 # via bandit stevedore==1.32.0 # via bandit
text-unidecode==1.3 # via faker text-unidecode==1.3 # via faker
toml==0.10.0 # via black toml==0.10.0 # via black
typed-ast==1.4.0 # via black typed-ast==1.4.1 # via black
urllib3==1.25.7 # via botocore, requests urllib3==1.25.8 # via botocore, requests
wcwidth==0.1.8 # via pytest wcwidth==0.1.9 # via pytest
websocket-client==0.57.0 # via docker websocket-client==0.57.0 # via docker
werkzeug==0.16.0 # via flask, moto, pytest-flask werkzeug==1.0.1 # via flask, moto, pytest-flask
wrapt==1.11.2 # via aws-xray-sdk wrapt==1.12.1 # via aws-xray-sdk
xmltodict==0.12.0 # via moto xmltodict==0.12.0 # via moto
zipp==0.6.0 # via importlib-metadata zipp==3.1.0 # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# setuptools # setuptools

View File

@ -4,96 +4,92 @@
# #
# pip-compile --no-index --output-file=requirements.txt requirements.in # pip-compile --no-index --output-file=requirements.txt requirements.in
# #
acme==1.0.0 acme==1.4.0 # via -r requirements.in
alembic-autogenerate-enums==0.0.2 alembic-autogenerate-enums==0.0.2 # via -r requirements.in
alembic==1.3.2 # via flask-migrate alembic==1.4.2 # via flask-migrate
amqp==2.5.2 # via kombu amqp==2.5.2 # via kombu
aniso8601==8.0.0 # via flask-restful aniso8601==8.0.0 # via flask-restful
arrow==0.15.5 arrow==0.15.6 # via -r requirements.in
asyncpool==1.0 asyncpool==1.0 # via -r requirements.in
bcrypt==3.1.7 # via flask-bcrypt, paramiko bcrypt==3.1.7 # via flask-bcrypt, paramiko
billiard==3.6.1.0 # via celery billiard==3.6.3.0 # via celery
blinker==1.4 # via flask-mail, flask-principal, raven blinker==1.4 # via flask-mail, flask-principal, raven
boto3==1.10.46 boto3==1.13.11 # via -r requirements.in
botocore==1.13.46 botocore==1.16.11 # via -r requirements.in, boto3, s3transfer
celery[redis]==4.4.0 celery[redis]==4.4.2 # via -r requirements.in
certifi==2019.11.28 certifi==2020.4.5.1 # via -r requirements.in, requests
certsrv==2.1.1 certsrv==2.1.1 # via -r requirements.in
cffi==1.13.2 # via bcrypt, cryptography, pynacl cffi==1.14.0 # via bcrypt, cryptography, pynacl
chardet==3.0.4 # via requests chardet==3.0.4 # via requests
click==7.0 # via flask click==7.1.1 # via flask
cloudflare==2.3.1 cloudflare==2.7.1 # via -r requirements.in
cryptography==2.8 cryptography==2.9.2 # via -r requirements.in, acme, josepy, paramiko, pyopenssl, requests
dnspython3==1.15.0 dnspython3==1.15.0 # via -r requirements.in
dnspython==1.15.0 # via dnspython3 dnspython==1.15.0 # via dnspython3
docutils==0.15.2 # via botocore docutils==0.15.2 # via botocore
dyn==1.8.1 dyn==1.8.1 # via -r requirements.in
flask-bcrypt==0.7.1 flask-bcrypt==0.7.1 # via -r requirements.in
flask-cors==3.0.8 flask-cors==3.0.8 # via -r requirements.in
flask-mail==0.9.1 flask-mail==0.9.1 # via -r requirements.in
flask-migrate==2.5.2 flask-migrate==2.5.3 # via -r requirements.in
flask-principal==0.4.0 flask-principal==0.4.0 # via -r requirements.in
flask-replicated==1.3 flask-replicated==1.3 # via -r requirements.in
flask-restful==0.3.7 flask-restful==0.3.8 # via -r requirements.in
flask-script==2.0.6 flask-script==2.0.6 # via -r requirements.in
flask-sqlalchemy==2.4.1 flask-sqlalchemy==2.4.1 # via -r requirements.in, flask-migrate
flask==1.1.1 flask==1.1.2 # via -r requirements.in, flask-bcrypt, flask-cors, flask-mail, flask-migrate, flask-principal, flask-restful, flask-script, flask-sqlalchemy, raven
future==0.18.2 future==0.18.2 # via -r requirements.in, cloudflare
gunicorn==20.0.4 gunicorn==20.0.4 # via -r requirements.in
hvac==0.9.6 hvac==0.10.1 # via -r requirements.in
idna==2.8 # via requests idna==2.9 # via requests
importlib-metadata==1.3.0 # via kombu inflection==0.4.0 # via -r requirements.in
inflection==0.3.1
itsdangerous==1.1.0 # via flask itsdangerous==1.1.0 # via flask
javaobj-py3==0.4.0.1 # via pyjks javaobj-py3==0.4.0.1 # via pyjks
jinja2==2.10.3 jinja2==2.11.2 # via -r requirements.in, flask
jmespath==0.9.4 # via boto3, botocore jmespath==0.9.5 # via boto3, botocore
josepy==1.2.0 # via acme josepy==1.3.0 # via acme
jsonlines==1.2.0 # via cloudflare jsonlines==1.2.0 # via cloudflare
kombu==4.6.7 # via celery kombu==4.6.8 # via celery
lockfile==0.12.2 lockfile==0.12.2 # via -r requirements.in
logmatic-python==0.1.7 logmatic-python==0.1.7 # via -r requirements.in
mako==1.1.0 # via alembic mako==1.1.2 # via alembic
markupsafe==1.1.1 # via jinja2, mako markupsafe==1.1.1 # via jinja2, mako
marshmallow-sqlalchemy==0.21.0 marshmallow-sqlalchemy==0.23.0 # via -r requirements.in
marshmallow==2.20.4 marshmallow==2.20.4 # via -r requirements.in, marshmallow-sqlalchemy
mock==3.0.5 # via acme ndg-httpsclient==0.5.1 # via -r requirements.in
more-itertools==8.0.2 # via zipp paramiko==2.7.1 # via -r requirements.in
ndg-httpsclient==0.5.1 pem==20.1.0 # via -r requirements.in
paramiko==2.7.1 psycopg2==2.8.5 # via -r requirements.in
pem==19.3.0 pyasn1-modules==0.2.8 # via pyjks, python-ldap
psycopg2==2.8.4
pyasn1-modules==0.2.7 # via pyjks, python-ldap
pyasn1==0.4.8 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap pyasn1==0.4.8 # via ndg-httpsclient, pyasn1-modules, pyjks, python-ldap
pycparser==2.19 # via cffi pycparser==2.20 # via cffi
pycryptodomex==3.9.4 # via pyjks pycryptodomex==3.9.7 # via pyjks
pyjks==19.0.0 pyjks==20.0.0 # via -r requirements.in
pyjwt==1.7.1 pyjwt==1.7.1 # via -r requirements.in
pynacl==1.3.0 # via paramiko pynacl==1.3.0 # via paramiko
pyopenssl==19.1.0 pyopenssl==19.1.0 # via -r requirements.in, acme, josepy, ndg-httpsclient, requests
pyrfc3339==1.1 # via acme pyrfc3339==1.1 # via acme
python-dateutil==2.8.1 # via alembic, arrow, botocore python-dateutil==2.8.1 # via alembic, arrow, botocore
python-editor==1.0.4 # via alembic python-editor==1.0.4 # via alembic
python-json-logger==0.1.11 # via logmatic-python python-json-logger==0.1.11 # via logmatic-python
python-ldap==3.2.0 python-ldap==3.2.0 # via -r requirements.in
pytz==2019.3 # via acme, celery, flask-restful, pyrfc3339 pytz==2019.3 # via acme, celery, flask-restful, pyrfc3339
pyyaml==5.2 pyyaml==5.3.1 # via -r requirements.in, cloudflare
raven[flask]==6.10.0 raven[flask]==6.10.0 # via -r requirements.in
redis==3.3.11 redis==3.5.2 # via -r requirements.in, celery
requests-toolbelt==0.9.1 # via acme requests-toolbelt==0.9.1 # via acme
requests[security]==2.22.0 requests[security]==2.23.0 # via -r requirements.in, acme, certsrv, cloudflare, hvac, requests-toolbelt
retrying==1.3.3 retrying==1.3.3 # via -r requirements.in
s3transfer==0.2.1 # via boto3 s3transfer==0.3.3 # via boto3
six==1.13.0 six==1.14.0 # via -r requirements.in, acme, bcrypt, cryptography, flask-cors, flask-restful, hvac, josepy, jsonlines, pynacl, pyopenssl, python-dateutil, retrying, sqlalchemy-utils
sqlalchemy-utils==0.36.1 sqlalchemy-utils==0.36.5 # via -r requirements.in
sqlalchemy==1.3.12 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils sqlalchemy==1.3.16 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils
tabulate==0.8.6 tabulate==0.8.7 # via -r requirements.in
twofish==0.3.0 # via pyjks twofish==0.3.0 # via pyjks
urllib3==1.25.7 # via botocore, requests urllib3==1.25.8 # via botocore, requests
vine==1.3.0 # via amqp, celery vine==1.3.0 # via amqp, celery
werkzeug==0.16.0 # via flask werkzeug==1.0.1 # via flask
xmltodict==0.12.0 xmltodict==0.12.0 # via -r requirements.in
zipp==0.6.0 # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# setuptools # setuptools

View File

@ -45,16 +45,20 @@ with open(os.path.join(ROOT, 'lemur', '__about__.py')) as f:
exec(f.read(), about) # nosec: about file is benign exec(f.read(), about) # nosec: about file is benign
install_requires_g = parse_requirements("requirements.txt", session=PipSession()) install_requires_g = parse_requirements("requirements.txt", session=PipSession())
install_requires = [str(ir.req) for ir in install_requires_g]
tests_require_g = parse_requirements("requirements-tests.txt", session=PipSession()) tests_require_g = parse_requirements("requirements-tests.txt", session=PipSession())
tests_require = [str(ir.req) for ir in tests_require_g]
docs_require_g = parse_requirements("requirements-docs.txt", session=PipSession()) docs_require_g = parse_requirements("requirements-docs.txt", session=PipSession())
docs_require = [str(ir.req) for ir in docs_require_g]
dev_requires_g = parse_requirements("requirements-dev.txt", session=PipSession()) dev_requires_g = parse_requirements("requirements-dev.txt", session=PipSession())
dev_requires = [str(ir.req) for ir in dev_requires_g]
if tuple(map(int, pip.__version__.split('.'))) >= (20, 1):
install_requires = [str(ir.requirement) for ir in install_requires_g]
tests_require = [str(ir.requirement) for ir in tests_require_g]
docs_require = [str(ir.requirement) for ir in docs_require_g]
dev_requires = [str(ir.requirement) for ir in dev_requires_g]
else:
install_requires = [str(ir.req) for ir in install_requires_g]
tests_require = [str(ir.req) for ir in tests_require_g]
docs_require = [str(ir.req) for ir in docs_require_g]
dev_requires = [str(ir.req) for ir in dev_requires_g]
class SmartInstall(install): class SmartInstall(install):