Merge branch 'master' into master
This commit is contained in:
@ -127,6 +127,10 @@ def retrieve_user(user_api_url, access_token):
|
||||
|
||||
# retrieve information about the current user.
|
||||
r = requests.get(user_api_url, params=user_params, headers=headers)
|
||||
# Some IDPs, like "Keycloak", require a POST instead of a GET
|
||||
if r.status_code == 400:
|
||||
r = requests.post(user_api_url, data=user_params, headers=headers)
|
||||
|
||||
profile = r.json()
|
||||
|
||||
user = user_service.get_by_email(profile["email"])
|
||||
@ -434,7 +438,7 @@ class OAuth2(Resource):
|
||||
verify_cert=verify_cert,
|
||||
)
|
||||
|
||||
jwks_url = current_app.config.get("PING_JWKS_URL")
|
||||
jwks_url = current_app.config.get("OAUTH2_JWKS_URL")
|
||||
error_code = validate_id_token(id_token, args["clientId"], jwks_url)
|
||||
if error_code:
|
||||
return error_code
|
||||
|
@ -25,7 +25,7 @@ class Authorization(db.Model):
|
||||
return plugins.get(self.plugin_name)
|
||||
|
||||
def __repr__(self):
|
||||
return "Authorization(id={id})".format(label=self.id)
|
||||
return "Authorization(id={id})".format(id=self.id)
|
||||
|
||||
def __init__(self, account_number, domains, dns_provider_type, options=None):
|
||||
self.account_number = account_number
|
||||
|
@ -5,39 +5,36 @@
|
||||
:license: Apache, see LICENSE for more details.
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import sys
|
||||
import multiprocessing
|
||||
from tabulate import tabulate
|
||||
from sqlalchemy import or_
|
||||
|
||||
import sys
|
||||
from flask import current_app
|
||||
|
||||
from flask_script import Manager
|
||||
from flask_principal import Identity, identity_changed
|
||||
|
||||
from flask_script import Manager
|
||||
from sqlalchemy import or_
|
||||
from tabulate import tabulate
|
||||
|
||||
from lemur import database
|
||||
from lemur.extensions import sentry
|
||||
from lemur.extensions import metrics
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
|
||||
from lemur.deployment import service as deployment_service
|
||||
from lemur.endpoints import service as endpoint_service
|
||||
from lemur.notifications.messaging import send_rotation_notification
|
||||
from lemur.domains.models import Domain
|
||||
from lemur.authorities.models import Authority
|
||||
from lemur.certificates.schemas import CertificateOutputSchema
|
||||
from lemur.authorities.service import get as authorities_get_by_id
|
||||
from lemur.certificates.models import Certificate
|
||||
from lemur.certificates.schemas import CertificateOutputSchema
|
||||
from lemur.certificates.service import (
|
||||
reissue_certificate,
|
||||
get_certificate_primitives,
|
||||
get_all_pending_reissue,
|
||||
get_by_name,
|
||||
get_all_certs,
|
||||
get_all_valid_certs,
|
||||
get,
|
||||
get_all_certs_attached_to_endpoint_without_autorotate,
|
||||
)
|
||||
|
||||
from lemur.certificates.verify import verify_string
|
||||
from lemur.constants import SUCCESS_METRIC_STATUS, FAILURE_METRIC_STATUS
|
||||
from lemur.deployment import service as deployment_service
|
||||
from lemur.domains.models import Domain
|
||||
from lemur.endpoints import service as endpoint_service
|
||||
from lemur.extensions import sentry, metrics
|
||||
from lemur.notifications.messaging import send_rotation_notification
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
manager = Manager(usage="Handles all certificate related tasks.")
|
||||
|
||||
@ -213,6 +210,10 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c
|
||||
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
}
|
||||
|
||||
try:
|
||||
old_cert = validate_certificate(old_certificate_name)
|
||||
new_cert = validate_certificate(new_certificate_name)
|
||||
@ -222,26 +223,43 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c
|
||||
print(
|
||||
f"[+] Rotating endpoint: {endpoint.name} to certificate {new_cert.name}"
|
||||
)
|
||||
log_data["message"] = "Rotating endpoint"
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
log_data["certificate"] = new_cert.name
|
||||
request_rotation(endpoint, new_cert, message, commit)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
elif old_cert and new_cert:
|
||||
print(f"[+] Rotating all endpoints from {old_cert.name} to {new_cert.name}")
|
||||
|
||||
log_data["message"] = "Rotating all endpoints"
|
||||
log_data["certificate"] = new_cert.name
|
||||
log_data["certificate_old"] = old_cert.name
|
||||
log_data["message"] = "Rotating endpoint from old to new cert"
|
||||
for endpoint in old_cert.endpoints:
|
||||
print(f"[+] Rotating {endpoint.name}")
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
request_rotation(endpoint, new_cert, message, commit)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
else:
|
||||
print("[+] Rotating all endpoints that have new certificates available")
|
||||
log_data["message"] = "Rotating all endpoints that have new certificates available"
|
||||
for endpoint in endpoint_service.get_all_pending_rotation():
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
if len(endpoint.certificate.replaced) == 1:
|
||||
print(
|
||||
f"[+] Rotating {endpoint.name} to {endpoint.certificate.replaced[0].name}"
|
||||
)
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
request_rotation(
|
||||
endpoint, endpoint.certificate.replaced[0], message, commit
|
||||
)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
else:
|
||||
log_data["message"] = "Failed to rotate endpoint due to Multiple replacement certificates found"
|
||||
print(log_data)
|
||||
metrics.send(
|
||||
"endpoint_rotation",
|
||||
"counter",
|
||||
@ -289,6 +307,178 @@ def rotate(endpoint_name, new_certificate_name, old_certificate_name, message, c
|
||||
)
|
||||
|
||||
|
||||
def request_rotation_region(endpoint, new_cert, message, commit, log_data, region):
|
||||
if region in endpoint.dnsname:
|
||||
log_data["message"] = "Rotating endpoint in region"
|
||||
request_rotation(endpoint, new_cert, message, commit)
|
||||
else:
|
||||
log_data["message"] = "Skipping rotation, region mismatch"
|
||||
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-e",
|
||||
"--endpoint",
|
||||
dest="endpoint_name",
|
||||
help="Name of the endpoint you wish to rotate.",
|
||||
)
|
||||
@manager.option(
|
||||
"-n",
|
||||
"--new-certificate",
|
||||
dest="new_certificate_name",
|
||||
help="Name of the certificate you wish to rotate to.",
|
||||
)
|
||||
@manager.option(
|
||||
"-o",
|
||||
"--old-certificate",
|
||||
dest="old_certificate_name",
|
||||
help="Name of the certificate you wish to rotate.",
|
||||
)
|
||||
@manager.option(
|
||||
"-a",
|
||||
"--notify",
|
||||
dest="message",
|
||||
action="store_true",
|
||||
help="Send a rotation notification to the certificates owner.",
|
||||
)
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
dest="commit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
@manager.option(
|
||||
"-r",
|
||||
"--region",
|
||||
dest="region",
|
||||
required=True,
|
||||
help="Region in which to rotate the endpoint.",
|
||||
)
|
||||
def rotate_region(endpoint_name, new_certificate_name, old_certificate_name, message, commit, region):
|
||||
"""
|
||||
Rotates an endpoint in a defined region it if it has not already been replaced. If it has
|
||||
been replaced, will use the replacement certificate for the rotation.
|
||||
:param old_certificate_name: Name of the certificate you wish to rotate.
|
||||
:param new_certificate_name: Name of the certificate you wish to rotate to.
|
||||
:param endpoint_name: Name of the endpoint you wish to rotate.
|
||||
:param message: Send a rotation notification to the certificates owner.
|
||||
:param commit: Persist changes.
|
||||
:param region: Region in which to rotate the endpoint.
|
||||
"""
|
||||
if commit:
|
||||
print("[!] Running in COMMIT mode.")
|
||||
|
||||
print("[+] Starting endpoint rotation.")
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"region": region,
|
||||
}
|
||||
|
||||
try:
|
||||
old_cert = validate_certificate(old_certificate_name)
|
||||
new_cert = validate_certificate(new_certificate_name)
|
||||
endpoint = validate_endpoint(endpoint_name)
|
||||
|
||||
if endpoint and new_cert:
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
log_data["certificate"] = new_cert.name
|
||||
request_rotation_region(endpoint, new_cert, message, commit, log_data, region)
|
||||
|
||||
elif old_cert and new_cert:
|
||||
log_data["certificate"] = new_cert.name
|
||||
log_data["certificate_old"] = old_cert.name
|
||||
log_data["message"] = "Rotating endpoint from old to new cert"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
for endpoint in old_cert.endpoints:
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
request_rotation_region(endpoint, new_cert, message, commit, log_data, region)
|
||||
|
||||
else:
|
||||
log_data["message"] = "Rotating all endpoints that have new certificates available"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
all_pending_rotation_endpoints = endpoint_service.get_all_pending_rotation()
|
||||
for endpoint in all_pending_rotation_endpoints:
|
||||
log_data["endpoint"] = endpoint.dnsname
|
||||
if region not in endpoint.dnsname:
|
||||
log_data["message"] = "Skipping rotation, region mismatch"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
metrics.send(
|
||||
"endpoint_rotation_region_skipped",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"region": region,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(endpoint.certificate.replaced[0].name),
|
||||
"endpoint_name": str(endpoint.dnsname),
|
||||
},
|
||||
)
|
||||
|
||||
if len(endpoint.certificate.replaced) == 1:
|
||||
log_data["certificate"] = endpoint.certificate.replaced[0].name
|
||||
log_data["message"] = "Rotating all endpoints in region"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
request_rotation(endpoint, endpoint.certificate.replaced[0], message, commit)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
else:
|
||||
status = FAILURE_METRIC_STATUS
|
||||
log_data["message"] = "Failed to rotate endpoint due to Multiple replacement certificates found"
|
||||
print(log_data)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
metrics.send(
|
||||
"endpoint_rotation_region",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": FAILURE_METRIC_STATUS,
|
||||
"old_certificate_name": str(old_cert),
|
||||
"new_certificate_name": str(endpoint.certificate.replaced[0].name),
|
||||
"endpoint_name": str(endpoint.dnsname),
|
||||
"message": str(message),
|
||||
"region": str(region),
|
||||
},
|
||||
)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
print("[+] Done!")
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"old_certificate_name": str(old_certificate_name),
|
||||
"new_certificate_name": str(new_certificate_name),
|
||||
"endpoint": str(endpoint_name),
|
||||
"message": str(message),
|
||||
"region": str(region),
|
||||
}
|
||||
)
|
||||
|
||||
metrics.send(
|
||||
"endpoint_rotation_region_job",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"status": status,
|
||||
"old_certificate_name": str(old_certificate_name),
|
||||
"new_certificate_name": str(new_certificate_name),
|
||||
"endpoint_name": str(endpoint_name),
|
||||
"message": str(message),
|
||||
"endpoint": str(globals().get("endpoint")),
|
||||
"region": str(region),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-o",
|
||||
"--old-certificate",
|
||||
@ -467,7 +657,14 @@ def check_revoked():
|
||||
encounters an issue with verification it marks the certificate status
|
||||
as `unknown`.
|
||||
"""
|
||||
for cert in get_all_certs():
|
||||
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"message": "Checking for revoked Certificates"
|
||||
}
|
||||
|
||||
certs = get_all_valid_certs(current_app.config.get("SUPPORTED_REVOCATION_AUTHORITY_PLUGINS", []))
|
||||
for cert in certs:
|
||||
try:
|
||||
if cert.chain:
|
||||
status = verify_string(cert.body, cert.chain)
|
||||
@ -476,9 +673,65 @@ def check_revoked():
|
||||
|
||||
cert.status = "valid" if status else "revoked"
|
||||
|
||||
if cert.status == "revoked":
|
||||
log_data["valid"] = cert.status
|
||||
log_data["certificate_name"] = cert.name
|
||||
log_data["certificate_id"] = cert.id
|
||||
metrics.send(
|
||||
"certificate_revoked",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": log_data["valid"],
|
||||
"certificate_name": log_data["certificate_name"],
|
||||
"certificate_id": log_data["certificate_id"]},
|
||||
)
|
||||
current_app.logger.info(log_data)
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
cert.status = "unknown"
|
||||
|
||||
database.update(cert)
|
||||
|
||||
|
||||
@manager.command
|
||||
def automatically_enable_autorotate():
|
||||
"""
|
||||
This function automatically enables auto-rotation for unexpired certificates that are
|
||||
attached to an endpoint but do not have autorotate enabled.
|
||||
|
||||
WARNING: This will overwrite the Auto-rotate toggle!
|
||||
"""
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"message": "Enabling auto-rotate for certificate"
|
||||
}
|
||||
|
||||
permitted_authorities = current_app.config.get("ENABLE_AUTO_ROTATE_AUTHORITY", [])
|
||||
|
||||
eligible_certs = get_all_certs_attached_to_endpoint_without_autorotate()
|
||||
for cert in eligible_certs:
|
||||
|
||||
if cert.authority_id not in permitted_authorities:
|
||||
continue
|
||||
|
||||
log_data["certificate"] = cert.name
|
||||
log_data["certificate_id"] = cert.id
|
||||
log_data["authority_id"] = cert.authority_id
|
||||
log_data["authority_name"] = authorities_get_by_id(cert.authority_id).name
|
||||
if cert.destinations:
|
||||
log_data["destination_names"] = ', '.join([d.label for d in cert.destinations])
|
||||
else:
|
||||
log_data["destination_names"] = "NONE"
|
||||
current_app.logger.info(log_data)
|
||||
metrics.send("automatically_enable_autorotate",
|
||||
"counter", 1,
|
||||
metric_tags={"certificate": log_data["certificate"],
|
||||
"certificate_id": log_data["certificate_id"],
|
||||
"authority_id": log_data["authority_id"],
|
||||
"authority_name": log_data["authority_name"],
|
||||
"destination_names": log_data["destination_names"]
|
||||
})
|
||||
cert.rotation = True
|
||||
database.update(cert)
|
||||
|
@ -321,7 +321,8 @@ class Certificate(db.Model):
|
||||
|
||||
@hybrid_property
|
||||
def expired(self):
|
||||
if self.not_after <= arrow.utcnow():
|
||||
# can't compare offset-naive and offset-aware datetimes
|
||||
if arrow.Arrow.fromdatetime(self.not_after) <= arrow.utcnow():
|
||||
return True
|
||||
|
||||
@expired.expression
|
||||
@ -445,6 +446,9 @@ def update_destinations(target, value, initiator):
|
||||
"""
|
||||
destination_plugin = plugins.get(value.plugin_name)
|
||||
status = FAILURE_METRIC_STATUS
|
||||
|
||||
if target.expired:
|
||||
return
|
||||
try:
|
||||
if target.private_key or not destination_plugin.requires_key:
|
||||
destination_plugin.upload(
|
||||
|
@ -146,7 +146,8 @@ class CertificateInputSchema(CertificateCreationSchema):
|
||||
data["extensions"]["subAltNames"] = {"names": []}
|
||||
elif not data["extensions"]["subAltNames"].get("names"):
|
||||
data["extensions"]["subAltNames"]["names"] = []
|
||||
data["extensions"]["subAltNames"]["names"] += csr_sans
|
||||
|
||||
data["extensions"]["subAltNames"]["names"] = csr_sans
|
||||
return missing.convert_validity_years(data)
|
||||
|
||||
|
||||
|
@ -20,6 +20,7 @@ from lemur.common.utils import generate_private_key, truthiness
|
||||
from lemur.destinations.models import Destination
|
||||
from lemur.domains.models import Domain
|
||||
from lemur.extensions import metrics, sentry, signals
|
||||
from lemur.models import certificate_associations
|
||||
from lemur.notifications.models import Notification
|
||||
from lemur.pending_certificates.models import PendingCertificate
|
||||
from lemur.plugins.base import plugins
|
||||
@ -102,13 +103,35 @@ def get_all_certs():
|
||||
return Certificate.query.all()
|
||||
|
||||
|
||||
def get_all_pending_cleaning(source):
|
||||
def get_all_valid_certs(authority_plugin_name):
|
||||
"""
|
||||
Retrieves all certificates that are available for cleaning.
|
||||
Retrieves all valid (not expired) certificates within Lemur, for the given authority plugin names
|
||||
ignored if no authority_plugin_name provided.
|
||||
|
||||
Note that depending on the DB size retrieving all certificates might an expensive operation
|
||||
|
||||
:param source:
|
||||
:return:
|
||||
"""
|
||||
if authority_plugin_name:
|
||||
return (
|
||||
Certificate.query.outerjoin(Authority, Authority.id == Certificate.authority_id).filter(
|
||||
Certificate.not_after > arrow.now().format("YYYY-MM-DD")).filter(
|
||||
Authority.plugin_name.in_(authority_plugin_name)).all()
|
||||
)
|
||||
else:
|
||||
return (
|
||||
Certificate.query.filter(Certificate.not_after > arrow.now().format("YYYY-MM-DD")).all()
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_cleaning_expired(source):
|
||||
"""
|
||||
Retrieves all certificates that are available for cleaning. These are certificates which are expired and are not
|
||||
attached to any endpoints.
|
||||
|
||||
:param source: the source to search for certificates
|
||||
:return: list of pending certificates
|
||||
"""
|
||||
return (
|
||||
Certificate.query.filter(Certificate.sources.any(id=source.id))
|
||||
.filter(not_(Certificate.endpoints.any()))
|
||||
@ -117,6 +140,58 @@ def get_all_pending_cleaning(source):
|
||||
)
|
||||
|
||||
|
||||
def get_all_certs_attached_to_endpoint_without_autorotate():
|
||||
"""
|
||||
Retrieves all certificates that are attached to an endpoint, but that do not have autorotate enabled.
|
||||
|
||||
:return: list of certificates attached to an endpoint without autorotate
|
||||
"""
|
||||
return (
|
||||
Certificate.query.filter(Certificate.endpoints.any())
|
||||
.filter(Certificate.rotation == False)
|
||||
.filter(Certificate.not_after >= arrow.now())
|
||||
.filter(not_(Certificate.replaced.any()))
|
||||
.all() # noqa
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_cleaning_expiring_in_days(source, days_to_expire):
|
||||
"""
|
||||
Retrieves all certificates that are available for cleaning, not attached to endpoint,
|
||||
and within X days from expiration.
|
||||
|
||||
:param days_to_expire: defines how many days till the certificate is expired
|
||||
:param source: the source to search for certificates
|
||||
:return: list of pending certificates
|
||||
"""
|
||||
expiration_window = arrow.now().shift(days=+days_to_expire).format("YYYY-MM-DD")
|
||||
return (
|
||||
Certificate.query.filter(Certificate.sources.any(id=source.id))
|
||||
.filter(not_(Certificate.endpoints.any()))
|
||||
.filter(Certificate.not_after < expiration_window)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_cleaning_issued_since_days(source, days_since_issuance):
|
||||
"""
|
||||
Retrieves all certificates that are available for cleaning: not attached to endpoint, and X days since issuance.
|
||||
|
||||
:param days_since_issuance: defines how many days since the certificate is issued
|
||||
:param source: the source to search for certificates
|
||||
:return: list of pending certificates
|
||||
"""
|
||||
not_in_use_window = (
|
||||
arrow.now().shift(days=-days_since_issuance).format("YYYY-MM-DD")
|
||||
)
|
||||
return (
|
||||
Certificate.query.filter(Certificate.sources.any(id=source.id))
|
||||
.filter(not_(Certificate.endpoints.any()))
|
||||
.filter(Certificate.date_created > not_in_use_window)
|
||||
.all()
|
||||
)
|
||||
|
||||
|
||||
def get_all_pending_reissue():
|
||||
"""
|
||||
Retrieves all certificates that need to be rotated.
|
||||
@ -331,9 +406,11 @@ def render(args):
|
||||
|
||||
show_expired = args.pop("showExpired")
|
||||
if show_expired != 1:
|
||||
one_month_old = arrow.now()\
|
||||
.shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))\
|
||||
one_month_old = (
|
||||
arrow.now()
|
||||
.shift(months=current_app.config.get("HIDE_EXPIRED_CERTS_AFTER_MONTHS", -1))
|
||||
.format("YYYY-MM-DD")
|
||||
)
|
||||
query = query.filter(Certificate.not_after > one_month_old)
|
||||
|
||||
time_range = args.pop("time_range")
|
||||
@ -379,8 +456,8 @@ def render(args):
|
||||
elif "cn" in terms:
|
||||
query = query.filter(
|
||||
or_(
|
||||
Certificate.cn.ilike(term),
|
||||
Certificate.domains.any(Domain.name.ilike(term)),
|
||||
func.lower(Certificate.cn).like(term.lower()),
|
||||
Certificate.id.in_(like_domain_query(term)),
|
||||
)
|
||||
)
|
||||
elif "id" in terms:
|
||||
@ -388,9 +465,9 @@ def render(args):
|
||||
elif "name" in terms:
|
||||
query = query.filter(
|
||||
or_(
|
||||
Certificate.name.ilike(term),
|
||||
Certificate.domains.any(Domain.name.ilike(term)),
|
||||
Certificate.cn.ilike(term),
|
||||
func.lower(Certificate.name).like(term.lower()),
|
||||
Certificate.id.in_(like_domain_query(term)),
|
||||
func.lower(Certificate.cn).like(term.lower()),
|
||||
)
|
||||
)
|
||||
elif "fixedName" in terms:
|
||||
@ -435,6 +512,14 @@ def render(args):
|
||||
return result
|
||||
|
||||
|
||||
def like_domain_query(term):
|
||||
domain_query = database.session_query(Domain.id)
|
||||
domain_query = domain_query.filter(func.lower(Domain.name).like(term.lower()))
|
||||
assoc_query = database.session_query(certificate_associations.c.certificate_id)
|
||||
assoc_query = assoc_query.filter(certificate_associations.c.domain_id.in_(domain_query))
|
||||
return assoc_query
|
||||
|
||||
|
||||
def query_name(certificate_name, args):
|
||||
"""
|
||||
Helper function that queries for a certificate by name
|
||||
|
@ -8,6 +8,7 @@
|
||||
import requests
|
||||
import subprocess
|
||||
from flask import current_app
|
||||
from lemur.extensions import sentry
|
||||
from requests.exceptions import ConnectionError, InvalidSchema
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
@ -152,10 +153,19 @@ def verify(cert_path, issuer_chain_path):
|
||||
|
||||
# OCSP is our main source of truth, in a lot of cases CRLs
|
||||
# have been deprecated and are no longer updated
|
||||
verify_result = ocsp_verify(cert, cert_path, issuer_chain_path)
|
||||
verify_result = None
|
||||
try:
|
||||
verify_result = ocsp_verify(cert, cert_path, issuer_chain_path)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
|
||||
if verify_result is None:
|
||||
verify_result = crl_verify(cert, cert_path)
|
||||
try:
|
||||
verify_result = crl_verify(cert, cert_path)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.exception(e)
|
||||
|
||||
if verify_result is None:
|
||||
current_app.logger.debug("Failed to verify {}".format(cert.serial_number))
|
||||
|
@ -10,27 +10,27 @@ command: celery -A lemur.common.celery worker --loglevel=info -l DEBUG -B
|
||||
import copy
|
||||
import sys
|
||||
import time
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
from celery import Celery
|
||||
from celery.app.task import Context
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from celery.signals import task_failure, task_received, task_revoked, task_success
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from flask import current_app
|
||||
|
||||
from lemur.authorities.service import get as get_authority
|
||||
from lemur.certificates import cli as cli_certificate
|
||||
from lemur.common.redis import RedisHandler
|
||||
from lemur.destinations import service as destinations_service
|
||||
from lemur.dns_providers import cli as cli_dns_providers
|
||||
from lemur.endpoints import cli as cli_endpoints
|
||||
from lemur.extensions import metrics, sentry
|
||||
from lemur.factory import create_app
|
||||
from lemur.notifications import cli as cli_notification
|
||||
from lemur.notifications.messaging import send_pending_failure_notification
|
||||
from lemur.pending_certificates import service as pending_certificate_service
|
||||
from lemur.plugins.base import plugins
|
||||
from lemur.sources.cli import clean, sync, validate_sources
|
||||
from lemur.sources.service import add_aws_destination_to_sources
|
||||
from lemur.certificates import cli as cli_certificate
|
||||
from lemur.dns_providers import cli as cli_dns_providers
|
||||
from lemur.notifications import cli as cli_notification
|
||||
from lemur.endpoints import cli as cli_endpoints
|
||||
|
||||
|
||||
if current_app:
|
||||
flask_app = current_app
|
||||
@ -67,7 +67,7 @@ def is_task_active(fun, task_id, args):
|
||||
from celery.task.control import inspect
|
||||
|
||||
if not args:
|
||||
args = '()' # empty args
|
||||
args = "()" # empty args
|
||||
|
||||
i = inspect()
|
||||
active_tasks = i.active()
|
||||
@ -80,6 +80,37 @@ def is_task_active(fun, task_id, args):
|
||||
return False
|
||||
|
||||
|
||||
def get_celery_request_tags(**kwargs):
|
||||
request = kwargs.get("request")
|
||||
sender_hostname = "unknown"
|
||||
sender = kwargs.get("sender")
|
||||
if sender:
|
||||
try:
|
||||
sender_hostname = sender.hostname
|
||||
except AttributeError:
|
||||
sender_hostname = vars(sender.request).get("origin", "unknown")
|
||||
if request and not isinstance(
|
||||
request, Context
|
||||
): # unlike others, task_revoked sends a Context for `request`
|
||||
task_name = request.name
|
||||
task_id = request.id
|
||||
receiver_hostname = request.hostname
|
||||
else:
|
||||
task_name = sender.name
|
||||
task_id = sender.request.id
|
||||
receiver_hostname = sender.request.hostname
|
||||
|
||||
tags = {
|
||||
"task_name": task_name,
|
||||
"task_id": task_id,
|
||||
"sender_hostname": sender_hostname,
|
||||
"receiver_hostname": receiver_hostname,
|
||||
}
|
||||
if kwargs.get("exception"):
|
||||
tags["error"] = repr(kwargs["exception"])
|
||||
return tags
|
||||
|
||||
|
||||
@celery.task()
|
||||
def report_celery_last_success_metrics():
|
||||
"""
|
||||
@ -89,7 +120,6 @@ def report_celery_last_success_metrics():
|
||||
report_celery_last_success_metrics should be ran periodically to emit metrics on when a task was last successful.
|
||||
Admins can then alert when tasks are not ran when intended. Admins should also alert when no metrics are emitted
|
||||
from this function.
|
||||
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
@ -108,15 +138,91 @@ def report_celery_last_success_metrics():
|
||||
return
|
||||
|
||||
current_time = int(time.time())
|
||||
schedule = current_app.config.get('CELERYBEAT_SCHEDULE')
|
||||
schedule = current_app.config.get("CELERYBEAT_SCHEDULE")
|
||||
for _, t in schedule.items():
|
||||
task = t.get("task")
|
||||
last_success = int(red.get(f"{task}.last_success") or 0)
|
||||
metrics.send(f"{task}.time_since_last_success", 'gauge', current_time - last_success)
|
||||
metrics.send(
|
||||
f"{task}.time_since_last_success", "gauge", current_time - last_success
|
||||
)
|
||||
red.set(
|
||||
f"{function}.last_success", int(time.time())
|
||||
) # Alert if this metric is not seen
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
|
||||
|
||||
@task_received.connect
|
||||
def report_number_pending_tasks(**kwargs):
|
||||
"""
|
||||
Report the number of pending tasks to our metrics broker every time a task is published. This metric can be used
|
||||
for autoscaling workers.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-received
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
metrics.send(
|
||||
"celery.new_pending_task",
|
||||
"TIMER",
|
||||
1,
|
||||
metric_tags=get_celery_request_tags(**kwargs),
|
||||
)
|
||||
|
||||
|
||||
@task_success.connect
|
||||
def report_successful_task(**kwargs):
|
||||
"""
|
||||
Report a generic success metric as tasks to our metrics broker every time a task finished correctly.
|
||||
This metric can be used for autoscaling workers.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-success
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
tags = get_celery_request_tags(**kwargs)
|
||||
red.set(f"{tags['task_name']}.last_success", int(time.time()))
|
||||
metrics.send("celery.successful_task", "TIMER", 1, metric_tags=tags)
|
||||
|
||||
|
||||
@task_failure.connect
|
||||
def report_failed_task(**kwargs):
|
||||
"""
|
||||
Report a generic failure metric as tasks to our metrics broker every time a task fails.
|
||||
This metric can be used for alerting.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-failure
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"Message": "Celery Task Failure",
|
||||
}
|
||||
|
||||
# Add traceback if exception info is in the kwargs
|
||||
einfo = kwargs.get("einfo")
|
||||
if einfo:
|
||||
log_data["traceback"] = einfo.traceback
|
||||
|
||||
error_tags = get_celery_request_tags(**kwargs)
|
||||
|
||||
log_data.update(error_tags)
|
||||
current_app.logger.error(log_data)
|
||||
metrics.send("celery.failed_task", "TIMER", 1, metric_tags=error_tags)
|
||||
|
||||
|
||||
@task_revoked.connect
|
||||
def report_revoked_task(**kwargs):
|
||||
"""
|
||||
Report a generic failure metric as tasks to our metrics broker every time a task is revoked.
|
||||
This metric can be used for alerting.
|
||||
https://docs.celeryproject.org/en/latest/userguide/signals.html#task-revoked
|
||||
"""
|
||||
with flask_app.app_context():
|
||||
log_data = {
|
||||
"function": f"{__name__}.{sys._getframe().f_code.co_name}",
|
||||
"Message": "Celery Task Revoked",
|
||||
}
|
||||
|
||||
error_tags = get_celery_request_tags(**kwargs)
|
||||
|
||||
log_data.update(error_tags)
|
||||
current_app.logger.error(log_data)
|
||||
metrics.send("celery.revoked_task", "TIMER", 1, metric_tags=error_tags)
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=600)
|
||||
@ -217,15 +323,15 @@ def fetch_acme_cert(id):
|
||||
log_data["failed"] = failed
|
||||
log_data["wrong_issuer"] = wrong_issuer
|
||||
current_app.logger.debug(log_data)
|
||||
metrics.send(f"{function}.resolved", 'gauge', new)
|
||||
metrics.send(f"{function}.failed", 'gauge', failed)
|
||||
metrics.send(f"{function}.wrong_issuer", 'gauge', wrong_issuer)
|
||||
metrics.send(f"{function}.resolved", "gauge", new)
|
||||
metrics.send(f"{function}.failed", "gauge", failed)
|
||||
metrics.send(f"{function}.wrong_issuer", "gauge", wrong_issuer)
|
||||
print(
|
||||
"[+] Certificates: New: {new} Failed: {failed} Not using ACME: {wrong_issuer}".format(
|
||||
new=new, failed=failed, wrong_issuer=wrong_issuer
|
||||
)
|
||||
)
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
@ -262,8 +368,8 @@ def fetch_all_pending_acme_certs():
|
||||
current_app.logger.debug(log_data)
|
||||
fetch_acme_cert.delay(cert.id)
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
@ -296,8 +402,8 @@ def remove_old_acme_certs():
|
||||
current_app.logger.debug(log_data)
|
||||
pending_certificate_service.delete(cert)
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
@ -328,11 +434,11 @@ def clean_all_sources():
|
||||
current_app.logger.debug(log_data)
|
||||
clean_source.delay(source.label)
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=600)
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def clean_source(source):
|
||||
"""
|
||||
This celery task will clean the specified source. This is a destructive operation that will delete unused
|
||||
@ -366,6 +472,7 @@ def clean_source(source):
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
@ -395,8 +502,8 @@ def sync_all_sources():
|
||||
current_app.logger.debug(log_data)
|
||||
sync_source.delay(source.label)
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=7200)
|
||||
@ -428,19 +535,23 @@ def sync_source(source):
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
sync([source])
|
||||
metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source})
|
||||
metrics.send(
|
||||
f"{function}.success", "counter", 1, metric_tags={"source": source}
|
||||
)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Error syncing source: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
sentry.captureException()
|
||||
metrics.send("sync_source_timeout", "counter", 1, metric_tags={"source": source})
|
||||
metrics.send(
|
||||
"sync_source_timeout", "counter", 1, metric_tags={"source": source}
|
||||
)
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
log_data["message"] = "Done syncing source"
|
||||
current_app.logger.debug(log_data)
|
||||
metrics.send(f"{function}.success", 'counter', 1, metric_tags={"source": source})
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", "counter", 1, metric_tags={"source": source})
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task()
|
||||
@ -477,8 +588,8 @@ def sync_source_destination():
|
||||
|
||||
log_data["message"] = "completed Syncing AWS destinations and sources"
|
||||
current_app.logger.debug(log_data)
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
@ -515,12 +626,13 @@ def certificate_reissue():
|
||||
|
||||
log_data["message"] = "reissuance completed"
|
||||
current_app.logger.debug(log_data)
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def certificate_rotate():
|
||||
def certificate_rotate(**kwargs):
|
||||
|
||||
"""
|
||||
This celery task rotates certificates which are reissued but having endpoints attached to the replaced cert
|
||||
:return:
|
||||
@ -530,11 +642,11 @@ def certificate_rotate():
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
region = kwargs.get("region")
|
||||
log_data = {
|
||||
"function": function,
|
||||
"message": "rotating certificates",
|
||||
"task_id": task_id,
|
||||
|
||||
}
|
||||
|
||||
if task_id and is_task_active(function, task_id, None):
|
||||
@ -544,7 +656,11 @@ def certificate_rotate():
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_certificate.rotate(None, None, None, None, True)
|
||||
if region:
|
||||
log_data["region"] = region
|
||||
cli_certificate.rotate_region(None, None, None, None, True, region)
|
||||
else:
|
||||
cli_certificate.rotate(None, None, None, None, True)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Certificate rotate: Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
@ -554,8 +670,8 @@ def certificate_rotate():
|
||||
|
||||
log_data["message"] = "rotation completed"
|
||||
current_app.logger.debug(log_data)
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
@ -590,8 +706,8 @@ def endpoints_expire():
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=600)
|
||||
@ -626,8 +742,8 @@ def get_all_zones():
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
@ -662,8 +778,8 @@ def check_revoked():
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
@ -690,7 +806,9 @@ def notify_expirations():
|
||||
|
||||
current_app.logger.debug(log_data)
|
||||
try:
|
||||
cli_notification.expirations(current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", []))
|
||||
cli_notification.expirations(
|
||||
current_app.config.get("EXCLUDE_CN_FROM_NOTIFICATION", [])
|
||||
)
|
||||
except SoftTimeLimitExceeded:
|
||||
log_data["message"] = "Notify expiring Time limit exceeded."
|
||||
current_app.logger.error(log_data)
|
||||
@ -698,5 +816,29 @@ def notify_expirations():
|
||||
metrics.send("celery.timeout", "counter", 1, metric_tags={"function": function})
|
||||
return
|
||||
|
||||
red.set(f'{function}.last_success', int(time.time()))
|
||||
metrics.send(f"{function}.success", 'counter', 1)
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
||||
|
||||
@celery.task(soft_time_limit=3600)
|
||||
def enable_autorotate_for_certs_attached_to_endpoint():
|
||||
"""
|
||||
This celery task automatically enables autorotation for unexpired certificates that are
|
||||
attached to an endpoint but do not have autorotate enabled.
|
||||
:return:
|
||||
"""
|
||||
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||
task_id = None
|
||||
if celery.current_task:
|
||||
task_id = celery.current_task.request.id
|
||||
|
||||
log_data = {
|
||||
"function": function,
|
||||
"task_id": task_id,
|
||||
"message": "Enabling autorotate to eligible certificates",
|
||||
}
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
cli_certificate.automatically_enable_autorotate()
|
||||
metrics.send(f"{function}.success", "counter", 1)
|
||||
return log_data
|
||||
|
@ -2,6 +2,7 @@ import re
|
||||
import unicodedata
|
||||
|
||||
from cryptography import x509
|
||||
from cryptography.hazmat.primitives.serialization import Encoding
|
||||
from flask import current_app
|
||||
|
||||
from lemur.common.utils import is_selfsigned
|
||||
@ -71,12 +72,20 @@ def common_name(cert):
|
||||
:return: Common name or None
|
||||
"""
|
||||
try:
|
||||
return cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)[
|
||||
0
|
||||
].value.strip()
|
||||
subject_oid = cert.subject.get_attributes_for_oid(x509.OID_COMMON_NAME)
|
||||
if len(subject_oid) > 0:
|
||||
return subject_oid[0].value.strip()
|
||||
return None
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
current_app.logger.error("Unable to get common name! {0}".format(e))
|
||||
current_app.logger.error(
|
||||
{
|
||||
"message": "Unable to get common name",
|
||||
"error": e,
|
||||
"public_key": cert.public_bytes(Encoding.PEM).decode("utf-8")
|
||||
},
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def organization(cert):
|
||||
|
@ -99,8 +99,12 @@ def csr(data):
|
||||
raise ValidationError("CSR presented is not valid.")
|
||||
|
||||
# Validate common name and SubjectAltNames
|
||||
for name in request.subject.get_attributes_for_oid(NameOID.COMMON_NAME):
|
||||
common_name(name.value)
|
||||
try:
|
||||
for name in request.subject.get_attributes_for_oid(NameOID.COMMON_NAME):
|
||||
common_name(name.value)
|
||||
except ValueError as err:
|
||||
current_app.logger.info("Error parsing Subject from CSR: %s", err)
|
||||
raise ValidationError("Invalid Subject value in supplied CSR")
|
||||
|
||||
try:
|
||||
alt_names = request.extensions.get_extension_for_class(
|
||||
@ -148,6 +152,18 @@ def dates(data):
|
||||
data["authority"].authority_certificate.not_after
|
||||
)
|
||||
)
|
||||
# Allow no more than PUBLIC_CA_MAX_VALIDITY_DAYS (Default: 397) days of validity
|
||||
# for certs issued by public CA
|
||||
# The list of public issuers can be managed through a config named PUBLIC_CA
|
||||
public_CA = current_app.config.get("PUBLIC_CA_AUTHORITY_NAMES", [])
|
||||
if data["authority"].name.lower() in [ca.lower() for ca in public_CA]:
|
||||
max_validity_days = current_app.config.get("PUBLIC_CA_MAX_VALIDITY_DAYS", 397)
|
||||
if (
|
||||
(data.get("validity_end").date() - data.get("validity_start").date()).days
|
||||
> max_validity_days
|
||||
):
|
||||
raise ValidationError("Certificate cannot be valid for more than " +
|
||||
str(max_validity_days) + " days")
|
||||
|
||||
return data
|
||||
|
||||
|
@ -31,11 +31,11 @@ class DNSResolveError(DNSError):
|
||||
|
||||
def is_valid_domain(domain):
|
||||
"""Checks if a domain is syntactically valid and returns a bool"""
|
||||
if len(domain) > 253:
|
||||
return False
|
||||
if domain[-1] == ".":
|
||||
domain = domain[:-1]
|
||||
fqdn_re = re.compile("(?=^.{1,254}$)(^(?:(?!\d+\.|-)[a-zA-Z0-9_\-]{1,63}(?<!-)\.?)+(?:[a-zA-Z]{2,})$)", re.IGNORECASE)
|
||||
if len(domain) > 253:
|
||||
return False
|
||||
fqdn_re = re.compile("(?=^.{1,63}$)(^(?:[a-z0-9_](?:-*[a-z0-9_])+)$|^[a-z0-9]$)", re.IGNORECASE)
|
||||
return all(fqdn_re.match(d) for d in domain.split("."))
|
||||
|
||||
|
||||
|
50
lemur/migrations/versions/8323a5ea723a_.py
Normal file
50
lemur/migrations/versions/8323a5ea723a_.py
Normal file
@ -0,0 +1,50 @@
|
||||
"""Add lowercase index for certificate name and cn and also for domain name
|
||||
|
||||
Revision ID: 8323a5ea723a
|
||||
Revises: b33c838cb669
|
||||
Create Date: 2020-01-10 10:51:44.776052
|
||||
|
||||
"""
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '8323a5ea723a'
|
||||
down_revision = 'b33c838cb669'
|
||||
|
||||
from alembic import op
|
||||
from sqlalchemy import text
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_index(
|
||||
"ix_certificates_cn_lower",
|
||||
"certificates",
|
||||
[text("lower(cn)")],
|
||||
unique=False,
|
||||
postgresql_ops={"lower(cn)": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
)
|
||||
op.create_index(
|
||||
"ix_certificates_name_lower",
|
||||
"certificates",
|
||||
[text("lower(name)")],
|
||||
unique=False,
|
||||
postgresql_ops={"lower(name)": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
)
|
||||
op.create_index(
|
||||
"ix_domains_name_lower",
|
||||
"domains",
|
||||
[text("lower(name)")],
|
||||
unique=False,
|
||||
postgresql_ops={"lower(name)": "gin_trgm_ops"},
|
||||
postgresql_using="gin",
|
||||
)
|
||||
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_certificates_cn_lower", table_name="certificates")
|
||||
op.drop_index("ix_certificates_name_lower", table_name="certificates")
|
||||
op.drop_index("ix_domains_name_lower", table_name="domains")
|
@ -45,6 +45,6 @@ def upgrade():
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_domains_name", table_name="domains")
|
||||
op.drop_index("ix_domains_name_gin", table_name="domains")
|
||||
op.drop_index("ix_certificates_name", table_name="certificates")
|
||||
op.drop_index("ix_certificates_cn", table_name="certificates")
|
||||
|
@ -54,18 +54,30 @@ class AcmeHandler(object):
|
||||
current_app.logger.error(f"Unable to fetch DNS Providers: {e}")
|
||||
self.all_dns_providers = []
|
||||
|
||||
def find_dns_challenge(self, host, authorizations):
|
||||
def get_dns_challenges(self, host, authorizations):
|
||||
"""Get dns challenges for provided domain"""
|
||||
|
||||
domain_to_validate, is_wildcard = self.strip_wildcard(host)
|
||||
dns_challenges = []
|
||||
for authz in authorizations:
|
||||
if not authz.body.identifier.value.lower() == host.lower():
|
||||
if not authz.body.identifier.value.lower() == domain_to_validate.lower():
|
||||
continue
|
||||
if is_wildcard and not authz.body.wildcard:
|
||||
continue
|
||||
if not is_wildcard and authz.body.wildcard:
|
||||
continue
|
||||
for combo in authz.body.challenges:
|
||||
if isinstance(combo.chall, challenges.DNS01):
|
||||
dns_challenges.append(combo)
|
||||
|
||||
return dns_challenges
|
||||
|
||||
def maybe_remove_wildcard(self, host):
|
||||
return host.replace("*.", "")
|
||||
def strip_wildcard(self, host):
|
||||
"""Removes the leading *. and returns Host and whether it was removed or not (True/False)"""
|
||||
prefix = "*."
|
||||
if host.startswith(prefix):
|
||||
return host[len(prefix):], True
|
||||
return host, False
|
||||
|
||||
def maybe_add_extension(self, host, dns_provider_options):
|
||||
if dns_provider_options and dns_provider_options.get(
|
||||
@ -86,9 +98,8 @@ class AcmeHandler(object):
|
||||
current_app.logger.debug("Starting DNS challenge for {0}".format(host))
|
||||
|
||||
change_ids = []
|
||||
|
||||
host_to_validate = self.maybe_remove_wildcard(host)
|
||||
dns_challenges = self.find_dns_challenge(host_to_validate, order.authorizations)
|
||||
dns_challenges = self.get_dns_challenges(host, order.authorizations)
|
||||
host_to_validate, _ = self.strip_wildcard(host)
|
||||
host_to_validate = self.maybe_add_extension(
|
||||
host_to_validate, dns_provider_options
|
||||
)
|
||||
@ -172,7 +183,7 @@ class AcmeHandler(object):
|
||||
|
||||
except (AcmeError, TimeoutError):
|
||||
sentry.captureException(extra={"order_url": str(order.uri)})
|
||||
metrics.send("request_certificate_error", "counter", 1)
|
||||
metrics.send("request_certificate_error", "counter", 1, metric_tags={"uri": order.uri})
|
||||
current_app.logger.error(
|
||||
f"Unable to resolve Acme order: {order.uri}", exc_info=True
|
||||
)
|
||||
@ -183,15 +194,26 @@ class AcmeHandler(object):
|
||||
else:
|
||||
raise
|
||||
|
||||
metrics.send("request_certificate_success", "counter", 1, metric_tags={"uri": order.uri})
|
||||
current_app.logger.info(
|
||||
f"Successfully resolved Acme order: {order.uri}", exc_info=True
|
||||
)
|
||||
|
||||
pem_certificate = OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, orderr.fullchain_pem
|
||||
),
|
||||
).decode()
|
||||
pem_certificate_chain = orderr.fullchain_pem[
|
||||
len(pem_certificate) : # noqa
|
||||
].lstrip()
|
||||
|
||||
if current_app.config.get("IDENTRUST_CROSS_SIGNED_LE_ICA", False) \
|
||||
and datetime.datetime.now() < datetime.datetime.strptime(
|
||||
current_app.config.get("IDENTRUST_CROSS_SIGNED_LE_ICA_EXPIRATION_DATE", "17/03/21"), '%d/%m/%y'):
|
||||
pem_certificate_chain = current_app.config.get("IDENTRUST_CROSS_SIGNED_LE_ICA")
|
||||
else:
|
||||
pem_certificate_chain = orderr.fullchain_pem[
|
||||
len(pem_certificate) : # noqa
|
||||
].lstrip()
|
||||
|
||||
current_app.logger.debug(
|
||||
"{0} {1}".format(type(pem_certificate), type(pem_certificate_chain))
|
||||
@ -320,7 +342,7 @@ class AcmeHandler(object):
|
||||
)
|
||||
dns_provider_options = json.loads(dns_provider.credentials)
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
host_to_validate = self.maybe_remove_wildcard(authz_record.host)
|
||||
host_to_validate, _ = self.strip_wildcard(authz_record.host)
|
||||
host_to_validate = self.maybe_add_extension(
|
||||
host_to_validate, dns_provider_options
|
||||
)
|
||||
@ -352,7 +374,7 @@ class AcmeHandler(object):
|
||||
dns_provider_options = json.loads(dns_provider.credentials)
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
dns_challenges = authz_record.dns_challenge
|
||||
host_to_validate = self.maybe_remove_wildcard(authz_record.host)
|
||||
host_to_validate, _ = self.strip_wildcard(authz_record.host)
|
||||
host_to_validate = self.maybe_add_extension(
|
||||
host_to_validate, dns_provider_options
|
||||
)
|
||||
|
@ -1,11 +1,10 @@
|
||||
import time
|
||||
import requests
|
||||
import json
|
||||
import sys
|
||||
import time
|
||||
|
||||
import lemur.common.utils as utils
|
||||
import lemur.dns_providers.util as dnsutil
|
||||
|
||||
import requests
|
||||
from flask import current_app
|
||||
from lemur.extensions import metrics, sentry
|
||||
|
||||
@ -17,7 +16,9 @@ REQUIRED_VARIABLES = [
|
||||
|
||||
|
||||
class Zone:
|
||||
""" This class implements a PowerDNS zone in JSON. """
|
||||
"""
|
||||
This class implements a PowerDNS zone in JSON.
|
||||
"""
|
||||
|
||||
def __init__(self, _data):
|
||||
self._data = _data
|
||||
@ -39,7 +40,9 @@ class Zone:
|
||||
|
||||
|
||||
class Record:
|
||||
""" This class implements a PowerDNS record. """
|
||||
"""
|
||||
This class implements a PowerDNS record.
|
||||
"""
|
||||
|
||||
def __init__(self, _data):
|
||||
self._data = _data
|
||||
@ -49,20 +52,30 @@ class Record:
|
||||
return self._data["name"]
|
||||
|
||||
@property
|
||||
def disabled(self):
|
||||
return self._data["disabled"]
|
||||
def type(self):
|
||||
return self._data["type"]
|
||||
|
||||
@property
|
||||
def ttl(self):
|
||||
return self._data["ttl"]
|
||||
|
||||
@property
|
||||
def content(self):
|
||||
return self._data["content"]
|
||||
|
||||
@property
|
||||
def ttl(self):
|
||||
return self._data["ttl"]
|
||||
def disabled(self):
|
||||
return self._data["disabled"]
|
||||
|
||||
|
||||
def get_zones(account_number):
|
||||
"""Retrieve authoritative zones from the PowerDNS API and return a list"""
|
||||
"""
|
||||
Retrieve authoritative zones from the PowerDNS API and return a list of zones
|
||||
|
||||
:param account_number:
|
||||
:raise: Exception
|
||||
:return: list of Zone Objects
|
||||
"""
|
||||
_check_conf()
|
||||
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
|
||||
path = f"/api/v1/servers/{server_id}/zones"
|
||||
@ -90,44 +103,41 @@ def get_zones(account_number):
|
||||
|
||||
|
||||
def create_txt_record(domain, token, account_number):
|
||||
""" Create a TXT record for the given domain and token and return a change_id tuple """
|
||||
"""
|
||||
Create a TXT record for the given domain and token and return a change_id tuple
|
||||
|
||||
:param domain: FQDN
|
||||
:param token: challenge value
|
||||
:param account_number:
|
||||
:return: tuple of domain/token
|
||||
"""
|
||||
_check_conf()
|
||||
zone_name = _get_zone_name(domain, account_number)
|
||||
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
|
||||
zone_id = zone_name + "."
|
||||
domain_id = domain + "."
|
||||
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
|
||||
payload = {
|
||||
"rrsets": [
|
||||
{
|
||||
"name": domain_id,
|
||||
"type": "TXT",
|
||||
"ttl": 300,
|
||||
"changetype": "REPLACE",
|
||||
"records": [
|
||||
{
|
||||
"content": f"\"{token}\"",
|
||||
"disabled": False
|
||||
}
|
||||
],
|
||||
"comments": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
function = sys._getframe().f_code.co_name
|
||||
log_data = {
|
||||
"function": function,
|
||||
"fqdn": domain,
|
||||
"token": token,
|
||||
}
|
||||
|
||||
# Create new record
|
||||
domain_id = domain + "."
|
||||
records = [Record({'name': domain_id, 'content': f"\"{token}\"", 'disabled': False})]
|
||||
|
||||
# Get current records
|
||||
cur_records = _get_txt_records(domain)
|
||||
for record in cur_records:
|
||||
if record.content != token:
|
||||
records.append(record)
|
||||
|
||||
try:
|
||||
_patch(path, payload)
|
||||
log_data["message"] = "TXT record successfully created"
|
||||
_patch_txt_records(domain, account_number, records)
|
||||
log_data["message"] = "TXT record(s) successfully created"
|
||||
current_app.logger.debug(log_data)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
log_data["Exception"] = e
|
||||
log_data["message"] = "Unable to create TXT record"
|
||||
log_data["message"] = "Unable to create TXT record(s)"
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
change_id = (domain, token)
|
||||
@ -136,8 +146,11 @@ def create_txt_record(domain, token, account_number):
|
||||
|
||||
def wait_for_dns_change(change_id, account_number=None):
|
||||
"""
|
||||
Checks the authoritative DNS Server to see if changes have propagated to DNS
|
||||
Retries and waits until successful.
|
||||
Checks the authoritative DNS Server to see if changes have propagated.
|
||||
|
||||
:param change_id: tuple of domain/token
|
||||
:param account_number:
|
||||
:return:
|
||||
"""
|
||||
_check_conf()
|
||||
domain, token = change_id
|
||||
@ -171,53 +184,115 @@ def wait_for_dns_change(change_id, account_number=None):
|
||||
|
||||
|
||||
def delete_txt_record(change_id, account_number, domain, token):
|
||||
""" Delete the TXT record for the given domain and token """
|
||||
"""
|
||||
Delete the TXT record for the given domain and token
|
||||
|
||||
:param change_id: tuple of domain/token
|
||||
:param account_number:
|
||||
:param domain: FQDN
|
||||
:param token: challenge to delete
|
||||
:return:
|
||||
"""
|
||||
_check_conf()
|
||||
zone_name = _get_zone_name(domain, account_number)
|
||||
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
|
||||
zone_id = zone_name + "."
|
||||
domain_id = domain + "."
|
||||
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
|
||||
payload = {
|
||||
"rrsets": [
|
||||
{
|
||||
"name": domain_id,
|
||||
"type": "TXT",
|
||||
"ttl": 300,
|
||||
"changetype": "DELETE",
|
||||
"records": [
|
||||
{
|
||||
"content": f"\"{token}\"",
|
||||
"disabled": False
|
||||
}
|
||||
],
|
||||
"comments": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
function = sys._getframe().f_code.co_name
|
||||
log_data = {
|
||||
"function": function,
|
||||
"fqdn": domain,
|
||||
"token": token
|
||||
"token": token,
|
||||
}
|
||||
try:
|
||||
_patch(path, payload)
|
||||
log_data["message"] = "TXT record successfully deleted"
|
||||
current_app.logger.debug(log_data)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
log_data["Exception"] = e
|
||||
log_data["message"] = "Unable to delete TXT record"
|
||||
|
||||
"""
|
||||
Get existing TXT records matching the domain from DNS
|
||||
The token to be deleted should already exist
|
||||
There may be other records with different tokens as well
|
||||
"""
|
||||
cur_records = _get_txt_records(domain)
|
||||
found = False
|
||||
new_records = []
|
||||
for record in cur_records:
|
||||
if record.content == f"\"{token}\"":
|
||||
found = True
|
||||
else:
|
||||
new_records.append(record)
|
||||
|
||||
# Since the matching token is not in DNS, there is nothing to delete
|
||||
if not found:
|
||||
log_data["message"] = "Unable to delete TXT record: Token not found in existing TXT records"
|
||||
current_app.logger.debug(log_data)
|
||||
return
|
||||
|
||||
# The record to delete has been found AND there are other tokens set on the same domain
|
||||
# Since we only want to delete one token value from the RRSet, we need to use the Patch command to
|
||||
# overwrite the current RRSet with the existing records.
|
||||
elif new_records:
|
||||
try:
|
||||
_patch_txt_records(domain, account_number, new_records)
|
||||
log_data["message"] = "TXT record successfully deleted"
|
||||
current_app.logger.debug(log_data)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
log_data["Exception"] = e
|
||||
log_data["message"] = "Unable to delete TXT record: patching exception"
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
# The record to delete has been found AND there are no other token values set on the same domain
|
||||
# Use the Delete command to delete the whole RRSet.
|
||||
else:
|
||||
zone_name = _get_zone_name(domain, account_number)
|
||||
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
|
||||
zone_id = zone_name + "."
|
||||
domain_id = domain + "."
|
||||
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
|
||||
payload = {
|
||||
"rrsets": [
|
||||
{
|
||||
"name": domain_id,
|
||||
"type": "TXT",
|
||||
"ttl": 300,
|
||||
"changetype": "DELETE",
|
||||
"records": [
|
||||
{
|
||||
"content": f"\"{token}\"",
|
||||
"disabled": False
|
||||
}
|
||||
],
|
||||
"comments": []
|
||||
}
|
||||
]
|
||||
}
|
||||
function = sys._getframe().f_code.co_name
|
||||
log_data = {
|
||||
"function": function,
|
||||
"fqdn": domain,
|
||||
"token": token
|
||||
}
|
||||
try:
|
||||
_patch(path, payload)
|
||||
log_data["message"] = "TXT record successfully deleted"
|
||||
current_app.logger.debug(log_data)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
log_data["Exception"] = e
|
||||
log_data["message"] = "Unable to delete TXT record"
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
|
||||
def _check_conf():
|
||||
"""
|
||||
Verifies required configuration variables are set
|
||||
|
||||
:return:
|
||||
"""
|
||||
utils.validate_conf(current_app, REQUIRED_VARIABLES)
|
||||
|
||||
|
||||
def _generate_header():
|
||||
"""Generate a PowerDNS API header and return it as a dictionary"""
|
||||
"""
|
||||
Generate a PowerDNS API header and return it as a dictionary
|
||||
|
||||
:return: Dict of header parameters
|
||||
"""
|
||||
api_key_name = current_app.config.get("ACME_POWERDNS_APIKEYNAME")
|
||||
api_key = current_app.config.get("ACME_POWERDNS_APIKEY")
|
||||
headers = {api_key_name: api_key}
|
||||
@ -225,7 +300,13 @@ def _generate_header():
|
||||
|
||||
|
||||
def _get_zone_name(domain, account_number):
|
||||
"""Get most specific matching zone for the given domain and return as a String"""
|
||||
"""
|
||||
Get most specific matching zone for the given domain and return as a String
|
||||
|
||||
:param domain: FQDN
|
||||
:param account_number:
|
||||
:return: FQDN of domain
|
||||
"""
|
||||
zones = get_zones(account_number)
|
||||
zone_name = ""
|
||||
for z in zones:
|
||||
@ -243,25 +324,113 @@ def _get_zone_name(domain, account_number):
|
||||
return zone_name
|
||||
|
||||
|
||||
def _get_txt_records(domain):
|
||||
"""
|
||||
Retrieve TXT records for a given domain and return list of Record Objects
|
||||
|
||||
:param domain: FQDN
|
||||
:return: list of Record objects
|
||||
"""
|
||||
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
|
||||
|
||||
path = f"/api/v1/servers/{server_id}/search-data?q={domain}&max=100&object_type=record"
|
||||
function = sys._getframe().f_code.co_name
|
||||
log_data = {
|
||||
"function": function
|
||||
}
|
||||
try:
|
||||
records = _get(path)
|
||||
log_data["message"] = "Retrieved TXT Records Successfully"
|
||||
current_app.logger.debug(log_data)
|
||||
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
log_data["Exception"] = e
|
||||
log_data["message"] = "Failed to Retrieve TXT Records"
|
||||
current_app.logger.debug(log_data)
|
||||
return []
|
||||
|
||||
txt_records = []
|
||||
for record in records:
|
||||
cur_record = Record(record)
|
||||
txt_records.append(cur_record)
|
||||
return txt_records
|
||||
|
||||
|
||||
def _get(path, params=None):
|
||||
""" Execute a GET request on the given URL (base_uri + path) and return response as JSON object """
|
||||
"""
|
||||
Execute a GET request on the given URL (base_uri + path) and return response as JSON object
|
||||
|
||||
:param path: Relative URL path
|
||||
:param params: additional parameters
|
||||
:return: json response
|
||||
"""
|
||||
base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN")
|
||||
verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True)
|
||||
resp = requests.get(
|
||||
f"{base_uri}{path}",
|
||||
headers=_generate_header(),
|
||||
params=params,
|
||||
verify=True,
|
||||
verify=verify_value
|
||||
)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
|
||||
def _patch_txt_records(domain, account_number, records):
|
||||
"""
|
||||
Send Patch request to PowerDNS Server
|
||||
|
||||
:param domain: FQDN
|
||||
:param account_number:
|
||||
:param records: List of Record objects
|
||||
:return:
|
||||
"""
|
||||
domain_id = domain + "."
|
||||
|
||||
# Create records
|
||||
txt_records = []
|
||||
for record in records:
|
||||
txt_records.append(
|
||||
{'content': record.content, 'disabled': record.disabled}
|
||||
)
|
||||
|
||||
# Create RRSet
|
||||
payload = {
|
||||
"rrsets": [
|
||||
{
|
||||
"name": domain_id,
|
||||
"type": "TXT",
|
||||
"ttl": 300,
|
||||
"changetype": "REPLACE",
|
||||
"records": txt_records,
|
||||
"comments": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
# Create Txt Records
|
||||
server_id = current_app.config.get("ACME_POWERDNS_SERVERID", "localhost")
|
||||
zone_name = _get_zone_name(domain, account_number)
|
||||
zone_id = zone_name + "."
|
||||
path = f"/api/v1/servers/{server_id}/zones/{zone_id}"
|
||||
_patch(path, payload)
|
||||
|
||||
|
||||
def _patch(path, payload):
|
||||
""" Execute a Patch request on the given URL (base_uri + path) with given payload """
|
||||
"""
|
||||
Execute a Patch request on the given URL (base_uri + path) with given payload
|
||||
|
||||
:param path:
|
||||
:param payload:
|
||||
:return:
|
||||
"""
|
||||
base_uri = current_app.config.get("ACME_POWERDNS_DOMAIN")
|
||||
verify_value = current_app.config.get("ACME_POWERDNS_VERIFY", True)
|
||||
resp = requests.patch(
|
||||
f"{base_uri}{path}",
|
||||
data=json.dumps(payload),
|
||||
headers=_generate_header()
|
||||
headers=_generate_header(),
|
||||
verify=verify_value
|
||||
)
|
||||
resp.raise_for_status()
|
||||
|
@ -35,9 +35,10 @@ def get_zones(client=None):
|
||||
zones = []
|
||||
for page in paginator.paginate():
|
||||
for zone in page["HostedZones"]:
|
||||
zones.append(
|
||||
zone["Name"][:-1]
|
||||
) # We need [:-1] to strip out the trailing dot.
|
||||
if not zone["Config"]["PrivateZone"]:
|
||||
zones.append(
|
||||
zone["Name"][:-1]
|
||||
) # We need [:-1] to strip out the trailing dot.
|
||||
return zones
|
||||
|
||||
|
||||
|
@ -1,11 +1,9 @@
|
||||
import unittest
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
from cryptography.x509 import DNSName
|
||||
from requests.models import Response
|
||||
|
||||
from mock import MagicMock, Mock, patch
|
||||
|
||||
from lemur.plugins.lemur_acme import plugin, ultradns
|
||||
from lemur.plugins.lemur_acme import plugin
|
||||
from mock import MagicMock
|
||||
|
||||
|
||||
class TestAcme(unittest.TestCase):
|
||||
@ -23,11 +21,12 @@ class TestAcme(unittest.TestCase):
|
||||
}
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
|
||||
def test_find_dns_challenge(self, mock_len):
|
||||
def test_get_dns_challenges(self, mock_len):
|
||||
assert mock_len
|
||||
|
||||
from acme import challenges
|
||||
|
||||
host = "example.com"
|
||||
c = challenges.DNS01()
|
||||
|
||||
mock_authz = Mock()
|
||||
@ -35,9 +34,18 @@ class TestAcme(unittest.TestCase):
|
||||
mock_entry = Mock()
|
||||
mock_entry.chall = c
|
||||
mock_authz.body.resolved_combinations.append(mock_entry)
|
||||
result = yield self.acme.find_dns_challenge(mock_authz)
|
||||
result = yield self.acme.get_dns_challenges(host, mock_authz)
|
||||
self.assertEqual(result, mock_entry)
|
||||
|
||||
def test_strip_wildcard(self):
|
||||
expected = ("example.com", False)
|
||||
result = self.acme.strip_wildcard("example.com")
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
expected = ("example.com", True)
|
||||
result = self.acme.strip_wildcard("*.example.com")
|
||||
self.assertEqual(expected, result)
|
||||
|
||||
def test_authz_record(self):
|
||||
a = plugin.AuthorizationRecord("host", "authz", "challenge", "id")
|
||||
self.assertEqual(type(a), plugin.AuthorizationRecord)
|
||||
@ -45,9 +53,9 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("acme.client.Client")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges")
|
||||
def test_start_dns_challenge(
|
||||
self, mock_find_dns_challenge, mock_len, mock_app, mock_acme
|
||||
self, mock_get_dns_challenges, mock_len, mock_app, mock_acme
|
||||
):
|
||||
assert mock_len
|
||||
mock_order = Mock()
|
||||
@ -65,7 +73,7 @@ class TestAcme(unittest.TestCase):
|
||||
mock_dns_provider.create_txt_record = Mock(return_value=1)
|
||||
|
||||
values = [mock_entry]
|
||||
iterable = mock_find_dns_challenge.return_value
|
||||
iterable = mock_get_dns_challenges.return_value
|
||||
iterator = iter(values)
|
||||
iterable.__iter__.return_value = iterator
|
||||
result = self.acme.start_dns_challenge(
|
||||
@ -78,7 +86,7 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
|
||||
@patch("time.sleep")
|
||||
def test_complete_dns_challenge_success(
|
||||
self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme
|
||||
self, mock_sleep, mock_wait_for_dns_change, mock_current_app, mock_acme
|
||||
):
|
||||
mock_dns_provider = Mock()
|
||||
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
|
||||
@ -102,7 +110,7 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
|
||||
def test_complete_dns_challenge_fail(
|
||||
self, mock_wait_for_dns_change, mock_current_app, mock_acme
|
||||
self, mock_wait_for_dns_change, mock_current_app, mock_acme
|
||||
):
|
||||
mock_dns_provider = Mock()
|
||||
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
|
||||
@ -127,15 +135,15 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("acme.client.Client")
|
||||
@patch("OpenSSL.crypto", return_value="mock_cert")
|
||||
@patch("josepy.util.ComparableX509")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_dns_challenges")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
def test_request_certificate(
|
||||
self,
|
||||
mock_current_app,
|
||||
mock_find_dns_challenge,
|
||||
mock_jose,
|
||||
mock_crypto,
|
||||
mock_acme,
|
||||
self,
|
||||
mock_current_app,
|
||||
mock_get_dns_challenges,
|
||||
mock_jose,
|
||||
mock_crypto,
|
||||
mock_acme,
|
||||
):
|
||||
mock_cert_response = Mock()
|
||||
mock_cert_response.body = "123"
|
||||
@ -148,6 +156,7 @@ class TestAcme(unittest.TestCase):
|
||||
mock_acme.fetch_chain = Mock(return_value="mock_chain")
|
||||
mock_crypto.dump_certificate = Mock(return_value=b"chain")
|
||||
mock_order = Mock()
|
||||
mock_current_app.config = {}
|
||||
self.acme.request_certificate(mock_acme, [], mock_order)
|
||||
|
||||
def test_setup_acme_client_fail(self):
|
||||
@ -172,7 +181,7 @@ class TestAcme(unittest.TestCase):
|
||||
assert result_client
|
||||
assert result_registration
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
def test_get_domains_single(self, mock_current_app):
|
||||
options = {"common_name": "test.netflix.net"}
|
||||
result = self.acme.get_domains(options)
|
||||
@ -256,11 +265,11 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("lemur.plugins.lemur_acme.cloudflare.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
|
||||
def test_get_dns_provider(
|
||||
self,
|
||||
mock_dns_provider_service,
|
||||
mock_current_app_cloudflare,
|
||||
mock_current_app_dyn,
|
||||
mock_current_app,
|
||||
self,
|
||||
mock_dns_provider_service,
|
||||
mock_current_app_cloudflare,
|
||||
mock_current_app_dyn,
|
||||
mock_current_app,
|
||||
):
|
||||
provider = plugin.ACMEIssuerPlugin()
|
||||
route53 = provider.get_dns_provider("route53")
|
||||
@ -278,14 +287,14 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
|
||||
def test_get_ordered_certificate(
|
||||
self,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_dns_provider_service,
|
||||
mock_authorization_service,
|
||||
mock_current_app,
|
||||
mock_acme,
|
||||
self,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_dns_provider_service,
|
||||
mock_authorization_service,
|
||||
mock_current_app,
|
||||
mock_acme,
|
||||
):
|
||||
mock_client = Mock()
|
||||
mock_acme.return_value = (mock_client, "")
|
||||
@ -309,14 +318,14 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
|
||||
def test_get_ordered_certificates(
|
||||
self,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_dns_provider_service,
|
||||
mock_authorization_service,
|
||||
mock_current_app,
|
||||
mock_acme,
|
||||
self,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_dns_provider_service,
|
||||
mock_authorization_service,
|
||||
mock_current_app,
|
||||
mock_acme,
|
||||
):
|
||||
mock_client = Mock()
|
||||
mock_acme.return_value = (mock_client, "")
|
||||
@ -349,14 +358,14 @@ class TestAcme(unittest.TestCase):
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.authorization_service")
|
||||
def test_create_certificate(
|
||||
self,
|
||||
mock_authorization_service,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_current_app,
|
||||
mock_dns_provider_service,
|
||||
mock_acme,
|
||||
self,
|
||||
mock_authorization_service,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_current_app,
|
||||
mock_dns_provider_service,
|
||||
mock_acme,
|
||||
):
|
||||
provider = plugin.ACMEIssuerPlugin()
|
||||
mock_authority = Mock()
|
||||
@ -378,121 +387,3 @@ class TestAcme(unittest.TestCase):
|
||||
mock_request_certificate.return_value = ("pem_certificate", "chain")
|
||||
result = provider.create_certificate(csr, issuer_options)
|
||||
assert result
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.requests")
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
def test_ultradns_get_token(self, mock_current_app, mock_requests):
|
||||
# ret_val = json.dumps({"access_token": "access"})
|
||||
the_response = Response()
|
||||
the_response._content = b'{"access_token": "access"}'
|
||||
mock_requests.post = Mock(return_value=the_response)
|
||||
mock_current_app.config.get = Mock(return_value="Test")
|
||||
result = ultradns.get_ultradns_token()
|
||||
self.assertTrue(len(result) > 0)
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
def test_ultradns_create_txt_record(self, mock_current_app):
|
||||
domain = "_acme_challenge.test.example.com"
|
||||
zone = "test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
account_number = "1234567890"
|
||||
change_id = (domain, token)
|
||||
ultradns.get_zone_name = Mock(return_value=zone)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
ultradns._post = Mock()
|
||||
log_data = {
|
||||
"function": "create_txt_record",
|
||||
"fqdn": domain,
|
||||
"token": token,
|
||||
"message": "TXT record created"
|
||||
}
|
||||
result = ultradns.create_txt_record(domain, token, account_number)
|
||||
mock_current_app.logger.debug.assert_called_with(log_data)
|
||||
self.assertEqual(result, change_id)
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
@patch("lemur.extensions.metrics")
|
||||
def test_ultradns_delete_txt_record(self, mock_metrics, mock_current_app):
|
||||
domain = "_acme_challenge.test.example.com"
|
||||
zone = "test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
account_number = "1234567890"
|
||||
change_id = (domain, token)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
ultradns.get_zone_name = Mock(return_value=zone)
|
||||
ultradns._post = Mock()
|
||||
ultradns._get = Mock()
|
||||
ultradns._get.return_value = {'zoneName': 'test.example.com.com',
|
||||
'rrSets': [{'ownerName': '_acme-challenge.test.example.com.',
|
||||
'rrtype': 'TXT (16)', 'ttl': 5, 'rdata': ['ABCDEFGHIJ']}],
|
||||
'queryInfo': {'sort': 'OWNER', 'reverse': False, 'limit': 100},
|
||||
'resultInfo': {'totalCount': 1, 'offset': 0, 'returnedCount': 1}}
|
||||
ultradns._delete = Mock()
|
||||
mock_metrics.send = Mock()
|
||||
ultradns.delete_txt_record(change_id, account_number, domain, token)
|
||||
mock_current_app.logger.debug.assert_not_called()
|
||||
mock_metrics.send.assert_not_called()
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
@patch("lemur.extensions.metrics")
|
||||
def test_ultradns_wait_for_dns_change(self, mock_metrics, mock_current_app):
|
||||
ultradns._has_dns_propagated = Mock(return_value=True)
|
||||
nameserver = "1.1.1.1"
|
||||
ultradns.get_authoritative_nameserver = Mock(return_value=nameserver)
|
||||
mock_metrics.send = Mock()
|
||||
domain = "_acme-challenge.test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
change_id = (domain, token)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
ultradns.wait_for_dns_change(change_id)
|
||||
# mock_metrics.send.assert_not_called()
|
||||
log_data = {
|
||||
"function": "wait_for_dns_change",
|
||||
"fqdn": domain,
|
||||
"status": True,
|
||||
"message": "Record status on Public DNS"
|
||||
}
|
||||
mock_current_app.logger.debug.assert_called_with(log_data)
|
||||
|
||||
def test_ultradns_get_zone_name(self):
|
||||
zones = ['example.com', 'test.example.com']
|
||||
zone = "test.example.com"
|
||||
domain = "_acme-challenge.test.example.com"
|
||||
account_number = "1234567890"
|
||||
ultradns.get_zones = Mock(return_value=zones)
|
||||
result = ultradns.get_zone_name(domain, account_number)
|
||||
self.assertEqual(result, zone)
|
||||
|
||||
def test_ultradns_get_zones(self):
|
||||
account_number = "1234567890"
|
||||
path = "a/b/c"
|
||||
zones = ['example.com', 'test.example.com']
|
||||
paginate_response = [{
|
||||
'properties': {
|
||||
'name': 'example.com.', 'accountName': 'example', 'type': 'PRIMARY',
|
||||
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
|
||||
'lastModifiedDateTime': '2017-06-14T06:45Z'},
|
||||
'registrarInfo': {
|
||||
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
|
||||
'example.ultradns.biz.', 'example.ultradns.org.']}},
|
||||
'inherit': 'ALL'}, {
|
||||
'properties': {
|
||||
'name': 'test.example.com.', 'accountName': 'example', 'type': 'PRIMARY',
|
||||
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
|
||||
'lastModifiedDateTime': '2017-06-14T06:45Z'},
|
||||
'registrarInfo': {
|
||||
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
|
||||
'example.ultradns.biz.', 'example.ultradns.org.']}},
|
||||
'inherit': 'ALL'}, {
|
||||
'properties': {
|
||||
'name': 'example2.com.', 'accountName': 'example', 'type': 'SECONDARY',
|
||||
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
|
||||
'lastModifiedDateTime': '2017-06-14T06:45Z'},
|
||||
'registrarInfo': {
|
||||
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
|
||||
'example.ultradns.biz.', 'example.ultradns.org.']}},
|
||||
'inherit': 'ALL'}]
|
||||
ultradns._paginate = Mock(path, "zones")
|
||||
ultradns._paginate.side_effect = [[paginate_response]]
|
||||
result = ultradns.get_zones(account_number)
|
||||
self.assertEqual(result, zones)
|
||||
|
@ -1,5 +1,5 @@
|
||||
import unittest
|
||||
from mock import Mock, patch
|
||||
from unittest.mock import patch, Mock
|
||||
from lemur.plugins.lemur_acme import plugin, powerdns
|
||||
|
||||
|
||||
@ -48,13 +48,14 @@ class TestPowerdns(unittest.TestCase):
|
||||
self.assertEqual(result, zone)
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
|
||||
def test_create_txt_record(self, mock_current_app):
|
||||
def test_create_txt_record_write_only(self, mock_current_app):
|
||||
domain = "_acme_challenge.test.example.com"
|
||||
zone = "test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
account_number = "1234567890"
|
||||
change_id = (domain, token)
|
||||
powerdns._check_conf = Mock()
|
||||
powerdns._get_txt_records = Mock(return_value=[])
|
||||
powerdns._get_zone_name = Mock(return_value=zone)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
mock_current_app.config.get = Mock(return_value="localhost")
|
||||
@ -63,24 +64,74 @@ class TestPowerdns(unittest.TestCase):
|
||||
"function": "create_txt_record",
|
||||
"fqdn": domain,
|
||||
"token": token,
|
||||
"message": "TXT record successfully created"
|
||||
"message": "TXT record(s) successfully created"
|
||||
}
|
||||
result = powerdns.create_txt_record(domain, token, account_number)
|
||||
mock_current_app.logger.debug.assert_called_with(log_data)
|
||||
self.assertEqual(result, change_id)
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
|
||||
def test_create_txt_record_append(self, mock_current_app):
|
||||
domain = "_acme_challenge.test.example.com"
|
||||
zone = "test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
account_number = "1234567890"
|
||||
change_id = (domain, token)
|
||||
powerdns._check_conf = Mock()
|
||||
cur_token = "123456"
|
||||
cur_records = [powerdns.Record({'name': domain, 'content': f"\"{cur_token}\"", 'disabled': False})]
|
||||
powerdns._get_txt_records = Mock(return_value=cur_records)
|
||||
powerdns._get_zone_name = Mock(return_value=zone)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
mock_current_app.config.get = Mock(return_value="localhost")
|
||||
powerdns._patch = Mock()
|
||||
log_data = {
|
||||
"function": "create_txt_record",
|
||||
"fqdn": domain,
|
||||
"token": token,
|
||||
"message": "TXT record(s) successfully created"
|
||||
}
|
||||
expected_path = "/api/v1/servers/localhost/zones/test.example.com."
|
||||
expected_payload = {
|
||||
"rrsets": [
|
||||
{
|
||||
"name": domain + ".",
|
||||
"type": "TXT",
|
||||
"ttl": 300,
|
||||
"changetype": "REPLACE",
|
||||
"records": [
|
||||
{
|
||||
"content": f"\"{token}\"",
|
||||
"disabled": False
|
||||
},
|
||||
{
|
||||
"content": f"\"{cur_token}\"",
|
||||
"disabled": False
|
||||
}
|
||||
],
|
||||
"comments": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
result = powerdns.create_txt_record(domain, token, account_number)
|
||||
mock_current_app.logger.debug.assert_called_with(log_data)
|
||||
powerdns._patch.assert_called_with(expected_path, expected_payload)
|
||||
self.assertEqual(result, change_id)
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.powerdns.dnsutil")
|
||||
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
|
||||
@patch("lemur.extensions.metrics")
|
||||
@patch("time.sleep")
|
||||
def test_wait_for_dns_change(self, mock_sleep, mock_metrics, mock_current_app, mock_dnsutil):
|
||||
domain = "_acme-challenge.test.example.com"
|
||||
token = "ABCDEFG"
|
||||
token1 = "ABCDEFG"
|
||||
token2 = "HIJKLMN"
|
||||
zone_name = "test.example.com"
|
||||
nameserver = "1.1.1.1"
|
||||
change_id = (domain, token)
|
||||
change_id = (domain, token1)
|
||||
powerdns._check_conf = Mock()
|
||||
mock_records = (token,)
|
||||
mock_records = (token2, token1)
|
||||
mock_current_app.config.get = Mock(return_value=1)
|
||||
powerdns._get_zone_name = Mock(return_value=zone_name)
|
||||
mock_dnsutil.get_authoritative_nameserver = Mock(return_value=nameserver)
|
||||
@ -114,7 +165,7 @@ class TestPowerdns(unittest.TestCase):
|
||||
"function": "delete_txt_record",
|
||||
"fqdn": domain,
|
||||
"token": token,
|
||||
"message": "TXT record successfully deleted"
|
||||
"message": "Unable to delete TXT record: Token not found in existing TXT records"
|
||||
}
|
||||
powerdns.delete_txt_record(change_id, account_number, domain, token)
|
||||
mock_current_app.logger.debug.assert_called_with(log_data)
|
||||
|
138
lemur/plugins/lemur_acme/tests/test_ultradns.py
Normal file
138
lemur/plugins/lemur_acme/tests/test_ultradns.py
Normal file
@ -0,0 +1,138 @@
|
||||
import unittest
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
from lemur.plugins.lemur_acme import plugin, ultradns
|
||||
from requests.models import Response
|
||||
|
||||
|
||||
class TestUltradns(unittest.TestCase):
|
||||
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
|
||||
def setUp(self, mock_dns_provider_service):
|
||||
self.ACMEIssuerPlugin = plugin.ACMEIssuerPlugin()
|
||||
self.acme = plugin.AcmeHandler()
|
||||
mock_dns_provider = Mock()
|
||||
mock_dns_provider.name = "cloudflare"
|
||||
mock_dns_provider.credentials = "{}"
|
||||
mock_dns_provider.provider_type = "cloudflare"
|
||||
self.acme.dns_providers_for_domain = {
|
||||
"www.test.com": [mock_dns_provider],
|
||||
"test.fakedomain.net": [mock_dns_provider],
|
||||
}
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.requests")
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
def test_ultradns_get_token(self, mock_current_app, mock_requests):
|
||||
# ret_val = json.dumps({"access_token": "access"})
|
||||
the_response = Response()
|
||||
the_response._content = b'{"access_token": "access"}'
|
||||
mock_requests.post = Mock(return_value=the_response)
|
||||
mock_current_app.config.get = Mock(return_value="Test")
|
||||
result = ultradns.get_ultradns_token()
|
||||
self.assertTrue(len(result) > 0)
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
def test_ultradns_create_txt_record(self, mock_current_app):
|
||||
domain = "_acme_challenge.test.example.com"
|
||||
zone = "test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
account_number = "1234567890"
|
||||
change_id = (domain, token)
|
||||
ultradns.get_zone_name = Mock(return_value=zone)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
ultradns._post = Mock()
|
||||
log_data = {
|
||||
"function": "create_txt_record",
|
||||
"fqdn": domain,
|
||||
"token": token,
|
||||
"message": "TXT record created"
|
||||
}
|
||||
result = ultradns.create_txt_record(domain, token, account_number)
|
||||
mock_current_app.logger.debug.assert_called_with(log_data)
|
||||
self.assertEqual(result, change_id)
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
@patch("lemur.extensions.metrics")
|
||||
def test_ultradns_delete_txt_record(self, mock_metrics, mock_current_app):
|
||||
domain = "_acme_challenge.test.example.com"
|
||||
zone = "test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
account_number = "1234567890"
|
||||
change_id = (domain, token)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
ultradns.get_zone_name = Mock(return_value=zone)
|
||||
ultradns._post = Mock()
|
||||
ultradns._get = Mock()
|
||||
ultradns._get.return_value = {'zoneName': 'test.example.com.com',
|
||||
'rrSets': [{'ownerName': '_acme-challenge.test.example.com.',
|
||||
'rrtype': 'TXT (16)', 'ttl': 5, 'rdata': ['ABCDEFGHIJ']}],
|
||||
'queryInfo': {'sort': 'OWNER', 'reverse': False, 'limit': 100},
|
||||
'resultInfo': {'totalCount': 1, 'offset': 0, 'returnedCount': 1}}
|
||||
ultradns._delete = Mock()
|
||||
mock_metrics.send = Mock()
|
||||
ultradns.delete_txt_record(change_id, account_number, domain, token)
|
||||
mock_current_app.logger.debug.assert_not_called()
|
||||
mock_metrics.send.assert_not_called()
|
||||
|
||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||
@patch("lemur.extensions.metrics")
|
||||
def test_ultradns_wait_for_dns_change(self, mock_metrics, mock_current_app):
|
||||
ultradns._has_dns_propagated = Mock(return_value=True)
|
||||
nameserver = "1.1.1.1"
|
||||
ultradns.get_authoritative_nameserver = Mock(return_value=nameserver)
|
||||
mock_metrics.send = Mock()
|
||||
domain = "_acme-challenge.test.example.com"
|
||||
token = "ABCDEFGHIJ"
|
||||
change_id = (domain, token)
|
||||
mock_current_app.logger.debug = Mock()
|
||||
ultradns.wait_for_dns_change(change_id)
|
||||
# mock_metrics.send.assert_not_called()
|
||||
log_data = {
|
||||
"function": "wait_for_dns_change",
|
||||
"fqdn": domain,
|
||||
"status": True,
|
||||
"message": "Record status on Public DNS"
|
||||
}
|
||||
mock_current_app.logger.debug.assert_called_with(log_data)
|
||||
|
||||
def test_ultradns_get_zone_name(self):
|
||||
zones = ['example.com', 'test.example.com']
|
||||
zone = "test.example.com"
|
||||
domain = "_acme-challenge.test.example.com"
|
||||
account_number = "1234567890"
|
||||
ultradns.get_zones = Mock(return_value=zones)
|
||||
result = ultradns.get_zone_name(domain, account_number)
|
||||
self.assertEqual(result, zone)
|
||||
|
||||
def test_ultradns_get_zones(self):
|
||||
account_number = "1234567890"
|
||||
path = "a/b/c"
|
||||
zones = ['example.com', 'test.example.com']
|
||||
paginate_response = [{
|
||||
'properties': {
|
||||
'name': 'example.com.', 'accountName': 'example', 'type': 'PRIMARY',
|
||||
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
|
||||
'lastModifiedDateTime': '2017-06-14T06:45Z'},
|
||||
'registrarInfo': {
|
||||
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
|
||||
'example.ultradns.biz.', 'example.ultradns.org.']}},
|
||||
'inherit': 'ALL'}, {
|
||||
'properties': {
|
||||
'name': 'test.example.com.', 'accountName': 'example', 'type': 'PRIMARY',
|
||||
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
|
||||
'lastModifiedDateTime': '2017-06-14T06:45Z'},
|
||||
'registrarInfo': {
|
||||
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
|
||||
'example.ultradns.biz.', 'example.ultradns.org.']}},
|
||||
'inherit': 'ALL'}, {
|
||||
'properties': {
|
||||
'name': 'example2.com.', 'accountName': 'example', 'type': 'SECONDARY',
|
||||
'dnssecStatus': 'UNSIGNED', 'status': 'ACTIVE', 'resourceRecordCount': 9,
|
||||
'lastModifiedDateTime': '2017-06-14T06:45Z'},
|
||||
'registrarInfo': {
|
||||
'nameServers': {'missing': ['example.ultradns.com.', 'example.ultradns.net.',
|
||||
'example.ultradns.biz.', 'example.ultradns.org.']}},
|
||||
'inherit': 'ALL'}]
|
||||
ultradns._paginate = Mock(path, "zones")
|
||||
ultradns._paginate.side_effect = [[paginate_response]]
|
||||
result = ultradns.get_zones(account_number)
|
||||
self.assertEqual(result, zones)
|
@ -24,6 +24,12 @@ def retry_throttled(exception):
|
||||
if exception.response["Error"]["Code"] == "NoSuchEntity":
|
||||
return False
|
||||
|
||||
# No need to retry deletion requests if there is a DeleteConflict error.
|
||||
# This error indicates that the certificate is still attached to an entity
|
||||
# and cannot be deleted.
|
||||
if exception.response["Error"]["Code"] == "DeleteConflict":
|
||||
return False
|
||||
|
||||
metrics.send("iam_retry", "counter", 1, metric_tags={"exception": str(exception)})
|
||||
return True
|
||||
|
||||
|
@ -216,22 +216,24 @@ class AWSSourcePlugin(SourcePlugin):
|
||||
|
||||
for region in regions:
|
||||
elbs = elb.get_all_elbs(account_number=account_number, region=region)
|
||||
current_app.logger.info(
|
||||
"Describing classic load balancers in {0}-{1}".format(
|
||||
account_number, region
|
||||
)
|
||||
)
|
||||
current_app.logger.info({
|
||||
"message": "Describing classic load balancers",
|
||||
"account_number": account_number,
|
||||
"region": region,
|
||||
"number_of_load_balancers": len(elbs)
|
||||
})
|
||||
|
||||
for e in elbs:
|
||||
endpoints.extend(get_elb_endpoints(account_number, region, e))
|
||||
|
||||
# fetch advanced ELBs
|
||||
elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region)
|
||||
current_app.logger.info(
|
||||
"Describing advanced load balancers in {0}-{1}".format(
|
||||
account_number, region
|
||||
)
|
||||
)
|
||||
current_app.logger.info({
|
||||
"message": "Describing advanced load balancers",
|
||||
"account_number": account_number,
|
||||
"region": region,
|
||||
"number_of_load_balancers": len(elbs_v2)
|
||||
})
|
||||
|
||||
for e in elbs_v2:
|
||||
endpoints.extend(get_elb_endpoints_v2(account_number, region, e))
|
||||
@ -325,14 +327,17 @@ class AWSDestinationPlugin(DestinationPlugin):
|
||||
]
|
||||
|
||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||
iam.upload_cert(
|
||||
name,
|
||||
body,
|
||||
private_key,
|
||||
self.get_option("path", options),
|
||||
cert_chain=cert_chain,
|
||||
account_number=self.get_option("accountNumber", options),
|
||||
)
|
||||
try:
|
||||
iam.upload_cert(
|
||||
name,
|
||||
body,
|
||||
private_key,
|
||||
self.get_option("path", options),
|
||||
cert_chain=cert_chain,
|
||||
account_number=self.get_option("accountNumber", options),
|
||||
)
|
||||
except ClientError:
|
||||
sentry.captureException()
|
||||
|
||||
def deploy(self, elb_name, account, region, certificate):
|
||||
pass
|
||||
|
@ -24,7 +24,12 @@ from lemur.certificates.service import create_csr
|
||||
def build_certificate_authority(options):
|
||||
options["certificate_authority"] = True
|
||||
csr, private_key = create_csr(**options)
|
||||
cert_pem, chain_cert_pem = issue_certificate(csr, options, private_key)
|
||||
|
||||
if options.get("parent"):
|
||||
# Intermediate Cert Issuance
|
||||
cert_pem, chain_cert_pem = issue_certificate(csr, options, None)
|
||||
else:
|
||||
cert_pem, chain_cert_pem = issue_certificate(csr, options, private_key)
|
||||
|
||||
return cert_pem, private_key, chain_cert_pem
|
||||
|
||||
|
@ -25,6 +25,31 @@ def test_build_certificate_authority():
|
||||
assert chain_cert_pem == ""
|
||||
|
||||
|
||||
def test_build_intermediate_certificate_authority(authority):
|
||||
from lemur.plugins.lemur_cryptography.plugin import build_certificate_authority
|
||||
|
||||
options = {
|
||||
"key_type": "RSA2048",
|
||||
"country": "US",
|
||||
"state": "CA",
|
||||
"location": "Example place",
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Unit",
|
||||
"common_name": "Example INTERMEDIATE",
|
||||
"validity_start": arrow.get("2016-12-01").datetime,
|
||||
"validity_end": arrow.get("2016-12-02").datetime,
|
||||
"first_serial": 1,
|
||||
"serial_number": 1,
|
||||
"owner": "owner@example.com",
|
||||
"parent": authority
|
||||
}
|
||||
cert_pem, private_key_pem, chain_cert_pem = build_certificate_authority(options)
|
||||
|
||||
assert cert_pem
|
||||
assert private_key_pem
|
||||
assert chain_cert_pem == authority.authority_certificate.body
|
||||
|
||||
|
||||
def test_issue_certificate(authority):
|
||||
from lemur.tests.vectors import CSR_STR
|
||||
from lemur.plugins.lemur_cryptography.plugin import issue_certificate
|
||||
|
@ -14,21 +14,17 @@
|
||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||
"""
|
||||
import json
|
||||
|
||||
import arrow
|
||||
import requests
|
||||
|
||||
import pem
|
||||
from retrying import retry
|
||||
|
||||
from flask import current_app
|
||||
|
||||
import requests
|
||||
from cryptography import x509
|
||||
|
||||
from lemur.extensions import metrics
|
||||
from flask import current_app
|
||||
from lemur.common.utils import validate_conf
|
||||
from lemur.plugins.bases import IssuerPlugin, SourcePlugin
|
||||
|
||||
from lemur.extensions import metrics
|
||||
from lemur.plugins import lemur_digicert as digicert
|
||||
from lemur.plugins.bases import IssuerPlugin, SourcePlugin
|
||||
from retrying import retry
|
||||
|
||||
|
||||
def log_status_code(r, *args, **kwargs):
|
||||
@ -64,24 +60,35 @@ def signature_hash(signing_algorithm):
|
||||
raise Exception("Unsupported signing algorithm.")
|
||||
|
||||
|
||||
def determine_validity_years(end_date):
|
||||
"""Given an end date determine how many years into the future that date is.
|
||||
def determine_validity_years(years):
|
||||
"""
|
||||
Considering maximum allowed certificate validity period of 397 days, this method should not return
|
||||
more than 1 year of validity. Thus changing it to always return 1.
|
||||
Lemur will change this method in future to handle validity in months (determine_validity_months)
|
||||
instead of years. This will allow flexibility to handle short-lived certificates.
|
||||
|
||||
:param years:
|
||||
:return: 1
|
||||
"""
|
||||
return 1
|
||||
|
||||
|
||||
def determine_end_date(end_date):
|
||||
"""
|
||||
Determine appropriate end date
|
||||
|
||||
:param end_date:
|
||||
:return: str validity in years
|
||||
:return: validity_end
|
||||
"""
|
||||
now = arrow.utcnow()
|
||||
default_days = current_app.config.get("DIGICERT_DEFAULT_VALIDITY_DAYS", 397)
|
||||
max_validity_end = arrow.utcnow().shift(days=current_app.config.get("DIGICERT_MAX_VALIDITY_DAYS", default_days))
|
||||
|
||||
if end_date < now.shift(years=+1):
|
||||
return 1
|
||||
elif end_date < now.shift(years=+2):
|
||||
return 2
|
||||
elif end_date < now.shift(years=+3):
|
||||
return 3
|
||||
if not end_date:
|
||||
end_date = arrow.utcnow().shift(days=default_days)
|
||||
|
||||
raise Exception(
|
||||
"DigiCert issued certificates cannot exceed three" " years in validity"
|
||||
)
|
||||
if end_date > max_validity_end:
|
||||
end_date = max_validity_end
|
||||
return end_date
|
||||
|
||||
|
||||
def get_additional_names(options):
|
||||
@ -107,12 +114,6 @@ def map_fields(options, csr):
|
||||
:param csr:
|
||||
:return: dict or valid DigiCert options
|
||||
"""
|
||||
if not options.get("validity_years"):
|
||||
if not options.get("validity_end"):
|
||||
options["validity_years"] = current_app.config.get(
|
||||
"DIGICERT_DEFAULT_VALIDITY", 1
|
||||
)
|
||||
|
||||
data = dict(
|
||||
certificate={
|
||||
"common_name": options["common_name"],
|
||||
@ -125,9 +126,11 @@ def map_fields(options, csr):
|
||||
data["certificate"]["dns_names"] = get_additional_names(options)
|
||||
|
||||
if options.get("validity_years"):
|
||||
data["validity_years"] = options["validity_years"]
|
||||
data["validity_years"] = determine_validity_years(options.get("validity_years"))
|
||||
elif options.get("validity_end"):
|
||||
data["custom_expiration_date"] = determine_end_date(options.get("validity_end")).format("YYYY-MM-DD")
|
||||
else:
|
||||
data["custom_expiration_date"] = options["validity_end"].format("YYYY-MM-DD")
|
||||
data["validity_years"] = determine_validity_years(0)
|
||||
|
||||
if current_app.config.get("DIGICERT_PRIVATE", False):
|
||||
if "product" in data:
|
||||
@ -144,18 +147,15 @@ def map_cis_fields(options, csr):
|
||||
|
||||
:param options:
|
||||
:param csr:
|
||||
:return:
|
||||
:return: data
|
||||
"""
|
||||
if not options.get("validity_years"):
|
||||
if not options.get("validity_end"):
|
||||
options["validity_end"] = arrow.utcnow().shift(
|
||||
years=current_app.config.get("DIGICERT_DEFAULT_VALIDITY", 1)
|
||||
)
|
||||
options["validity_years"] = determine_validity_years(options["validity_end"])
|
||||
|
||||
if options.get("validity_years"):
|
||||
validity_end = determine_end_date(arrow.utcnow().shift(years=options["validity_years"]))
|
||||
elif options.get("validity_end"):
|
||||
validity_end = determine_end_date(options.get("validity_end"))
|
||||
else:
|
||||
options["validity_end"] = arrow.utcnow().shift(
|
||||
years=options["validity_years"]
|
||||
)
|
||||
validity_end = determine_end_date(False)
|
||||
|
||||
data = {
|
||||
"profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAMES", {}).get(options['authority'].name),
|
||||
@ -164,7 +164,7 @@ def map_cis_fields(options, csr):
|
||||
"csr": csr,
|
||||
"signature_hash": signature_hash(options.get("signing_algorithm")),
|
||||
"validity": {
|
||||
"valid_to": options["validity_end"].format("YYYY-MM-DDTHH:MM") + "Z"
|
||||
"valid_to": validity_end.format("YYYY-MM-DDTHH:MM") + "Z"
|
||||
},
|
||||
"organization": {
|
||||
"name": options["organization"],
|
||||
@ -173,7 +173,8 @@ def map_cis_fields(options, csr):
|
||||
}
|
||||
# possibility to default to a SIGNING_ALGORITHM for a given profile
|
||||
if current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get(options['authority'].name):
|
||||
data["signature_hash"] = current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get(options['authority'].name)
|
||||
data["signature_hash"] = current_app.config.get("DIGICERT_CIS_SIGNING_ALGORITHMS", {}).get(
|
||||
options['authority'].name)
|
||||
|
||||
return data
|
||||
|
||||
|
@ -1,117 +1,122 @@
|
||||
import pytest
|
||||
import arrow
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
from unittest.mock import patch, Mock
|
||||
|
||||
import arrow
|
||||
import pytest
|
||||
from cryptography import x509
|
||||
from freezegun import freeze_time
|
||||
|
||||
from lemur.plugins.lemur_digicert import plugin
|
||||
from lemur.tests.vectors import CSR_STR
|
||||
|
||||
from cryptography import x509
|
||||
|
||||
|
||||
def test_map_fields_with_validity_end_and_start(app):
|
||||
from lemur.plugins.lemur_digicert.plugin import map_fields
|
||||
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"validity_end": arrow.get(2017, 5, 7),
|
||||
"validity_start": arrow.get(2016, 10, 30),
|
||||
}
|
||||
|
||||
data = map_fields(options, CSR_STR)
|
||||
|
||||
assert data == {
|
||||
"certificate": {
|
||||
"csr": CSR_STR,
|
||||
"common_name": "example.com",
|
||||
"dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
},
|
||||
"organization": {"id": 111111},
|
||||
"custom_expiration_date": arrow.get(2017, 5, 7).format("YYYY-MM-DD"),
|
||||
def config_mock(*args):
|
||||
values = {
|
||||
"DIGICERT_ORG_ID": 111111,
|
||||
"DIGICERT_PRIVATE": False,
|
||||
"DIGICERT_DEFAULT_SIGNING_ALGORITHM": "sha256",
|
||||
"DIGICERT_CIS_PROFILE_NAMES": {"digicert": 'digicert'},
|
||||
"DIGICERT_CIS_SIGNING_ALGORITHMS": {"digicert": 'digicert'},
|
||||
}
|
||||
return values[args[0]]
|
||||
|
||||
|
||||
def test_map_fields_with_validity_years(app):
|
||||
from lemur.plugins.lemur_digicert.plugin import map_fields
|
||||
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"validity_years": 2,
|
||||
"validity_end": arrow.get(2017, 10, 30),
|
||||
}
|
||||
|
||||
data = map_fields(options, CSR_STR)
|
||||
|
||||
assert data == {
|
||||
"certificate": {
|
||||
"csr": CSR_STR,
|
||||
"common_name": "example.com",
|
||||
"dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
},
|
||||
"organization": {"id": 111111},
|
||||
"validity_years": 2,
|
||||
}
|
||||
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
|
||||
def test_determine_validity_years(mock_current_app):
|
||||
assert plugin.determine_validity_years(1) == 1
|
||||
assert plugin.determine_validity_years(0) == 1
|
||||
assert plugin.determine_validity_years(3) == 1
|
||||
|
||||
|
||||
def test_map_cis_fields(app, authority):
|
||||
from lemur.plugins.lemur_digicert.plugin import map_cis_fields
|
||||
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Org",
|
||||
"validity_end": arrow.get(2017, 5, 7),
|
||||
"validity_start": arrow.get(2016, 10, 30),
|
||||
"authority": authority,
|
||||
}
|
||||
|
||||
data = map_cis_fields(options, CSR_STR)
|
||||
|
||||
assert data == {
|
||||
"common_name": "example.com",
|
||||
"csr": CSR_STR,
|
||||
"additional_dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
"organization": {"name": "Example, Inc.", "units": ["Example Org"]},
|
||||
"validity": {
|
||||
"valid_to": arrow.get(2017, 5, 7).format("YYYY-MM-DDTHH:MM") + "Z"
|
||||
},
|
||||
"profile_name": None,
|
||||
}
|
||||
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Org",
|
||||
"validity_years": 2,
|
||||
"authority": authority,
|
||||
}
|
||||
|
||||
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
|
||||
def test_determine_end_date(mock_current_app):
|
||||
mock_current_app.config.get = Mock(return_value=397) # 397 days validity
|
||||
with freeze_time(time_to_freeze=arrow.get(2016, 11, 3).datetime):
|
||||
data = map_cis_fields(options, CSR_STR)
|
||||
assert arrow.get(2017, 12, 5) == plugin.determine_end_date(0) # 397 days from (2016, 11, 3)
|
||||
assert arrow.get(2017, 12, 5) == plugin.determine_end_date(arrow.get(2017, 12, 5))
|
||||
assert arrow.get(2017, 12, 5) == plugin.determine_end_date(arrow.get(2020, 5, 7))
|
||||
|
||||
assert data == {
|
||||
|
||||
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
|
||||
def test_map_fields_with_validity_years(mock_current_app):
|
||||
mock_current_app.config.get = Mock(side_effect=config_mock)
|
||||
|
||||
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
|
||||
mock_signature_hash.return_value = "sha256"
|
||||
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"validity_years": 1
|
||||
}
|
||||
expected = {
|
||||
"certificate": {
|
||||
"csr": CSR_STR,
|
||||
"common_name": "example.com",
|
||||
"dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
},
|
||||
"organization": {"id": 111111},
|
||||
"validity_years": 1,
|
||||
}
|
||||
assert expected == plugin.map_fields(options, CSR_STR)
|
||||
|
||||
|
||||
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
|
||||
def test_map_fields_with_validity_end_and_start(mock_current_app):
|
||||
mock_current_app.config.get = Mock(side_effect=config_mock)
|
||||
plugin.determine_end_date = Mock(return_value=arrow.get(2017, 5, 7))
|
||||
|
||||
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
|
||||
mock_signature_hash.return_value = "sha256"
|
||||
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"validity_end": arrow.get(2017, 5, 7),
|
||||
"validity_start": arrow.get(2016, 10, 30),
|
||||
}
|
||||
|
||||
expected = {
|
||||
"certificate": {
|
||||
"csr": CSR_STR,
|
||||
"common_name": "example.com",
|
||||
"dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
},
|
||||
"organization": {"id": 111111},
|
||||
"custom_expiration_date": arrow.get(2017, 5, 7).format("YYYY-MM-DD"),
|
||||
}
|
||||
|
||||
assert expected == plugin.map_fields(options, CSR_STR)
|
||||
|
||||
|
||||
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
|
||||
def test_map_cis_fields_with_validity_years(mock_current_app, authority):
|
||||
mock_current_app.config.get = Mock(side_effect=config_mock)
|
||||
plugin.determine_end_date = Mock(return_value=arrow.get(2018, 11, 3))
|
||||
|
||||
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
|
||||
mock_signature_hash.return_value = "sha256"
|
||||
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Org",
|
||||
"validity_years": 2,
|
||||
"authority": authority,
|
||||
}
|
||||
|
||||
expected = {
|
||||
"common_name": "example.com",
|
||||
"csr": CSR_STR,
|
||||
"additional_dns_names": names,
|
||||
@ -123,21 +128,59 @@ def test_map_cis_fields(app, authority):
|
||||
"profile_name": None,
|
||||
}
|
||||
|
||||
assert expected == plugin.map_cis_fields(options, CSR_STR)
|
||||
|
||||
def test_signature_hash(app):
|
||||
from lemur.plugins.lemur_digicert.plugin import signature_hash
|
||||
|
||||
assert signature_hash(None) == "sha256"
|
||||
assert signature_hash("sha256WithRSA") == "sha256"
|
||||
assert signature_hash("sha384WithRSA") == "sha384"
|
||||
assert signature_hash("sha512WithRSA") == "sha512"
|
||||
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
|
||||
def test_map_cis_fields_with_validity_end_and_start(mock_current_app, app, authority):
|
||||
mock_current_app.config.get = Mock(side_effect=config_mock)
|
||||
plugin.determine_end_date = Mock(return_value=arrow.get(2017, 5, 7))
|
||||
|
||||
with patch('lemur.plugins.lemur_digicert.plugin.signature_hash') as mock_signature_hash:
|
||||
mock_signature_hash.return_value = "sha256"
|
||||
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
options = {
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Org",
|
||||
"validity_end": arrow.get(2017, 5, 7),
|
||||
"validity_start": arrow.get(2016, 10, 30),
|
||||
"authority": authority
|
||||
}
|
||||
|
||||
expected = {
|
||||
"common_name": "example.com",
|
||||
"csr": CSR_STR,
|
||||
"additional_dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
"organization": {"name": "Example, Inc.", "units": ["Example Org"]},
|
||||
"validity": {
|
||||
"valid_to": arrow.get(2017, 5, 7).format("YYYY-MM-DDTHH:MM") + "Z"
|
||||
},
|
||||
"profile_name": None,
|
||||
}
|
||||
|
||||
assert expected == plugin.map_cis_fields(options, CSR_STR)
|
||||
|
||||
|
||||
@patch("lemur.plugins.lemur_digicert.plugin.current_app")
|
||||
def test_signature_hash(mock_current_app, app):
|
||||
mock_current_app.config.get = Mock(side_effect=config_mock)
|
||||
assert plugin.signature_hash(None) == "sha256"
|
||||
assert plugin.signature_hash("sha256WithRSA") == "sha256"
|
||||
assert plugin.signature_hash("sha384WithRSA") == "sha384"
|
||||
assert plugin.signature_hash("sha512WithRSA") == "sha512"
|
||||
|
||||
with pytest.raises(Exception):
|
||||
signature_hash("sdfdsf")
|
||||
plugin.signature_hash("sdfdsf")
|
||||
|
||||
|
||||
def test_issuer_plugin_create_certificate(
|
||||
certificate_="""\
|
||||
certificate_="""\
|
||||
-----BEGIN CERTIFICATE-----
|
||||
abc
|
||||
-----END CERTIFICATE-----
|
||||
|
@ -75,7 +75,8 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<td style="font-family:Roboto-Regular,Helvetica,Arial,sans-serif;font-size:13px;color:#202020;line-height:1.5">
|
||||
<br>This is a Lemur certificate expiration notice. Please verify that the following certificates are no longer used.
|
||||
<br>This is a Lemur certificate expiration notice. Please verify that the following certificates are no longer used,
|
||||
and disable notifications via the Notify toggle in Lemur, if applicable.
|
||||
<table border="0" cellspacing="0" cellpadding="0"
|
||||
style="margin-top:48px;margin-bottom:48px">
|
||||
<tbody>
|
||||
|
@ -14,7 +14,7 @@ import re
|
||||
import hvac
|
||||
from flask import current_app
|
||||
|
||||
from lemur.common.defaults import common_name
|
||||
from lemur.common.defaults import common_name, country, state, location, organizational_unit, organization
|
||||
from lemur.common.utils import parse_certificate
|
||||
from lemur.plugins.bases import DestinationPlugin
|
||||
from lemur.plugins.bases import SourcePlugin
|
||||
@ -58,7 +58,7 @@ class VaultSourcePlugin(SourcePlugin):
|
||||
"helpMessage": "Authentication method to use",
|
||||
},
|
||||
{
|
||||
"name": "tokenFile/VaultRole",
|
||||
"name": "tokenFileOrVaultRole",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^([a-zA-Z0-9/._-]+/?)+$",
|
||||
@ -94,7 +94,7 @@ class VaultSourcePlugin(SourcePlugin):
|
||||
body = ""
|
||||
url = self.get_option("vaultUrl", options)
|
||||
auth_method = self.get_option("authenticationMethod", options)
|
||||
auth_key = self.get_option("tokenFile/vaultRole", options)
|
||||
auth_key = self.get_option("tokenFileOrVaultRole", options)
|
||||
mount = self.get_option("vaultMount", options)
|
||||
path = self.get_option("vaultPath", options)
|
||||
obj_name = self.get_option("objectName", options)
|
||||
@ -185,7 +185,7 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||
"helpMessage": "Authentication method to use",
|
||||
},
|
||||
{
|
||||
"name": "tokenFile/VaultRole",
|
||||
"name": "tokenFileOrVaultRole",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^([a-zA-Z0-9/._-]+/?)+$",
|
||||
@ -202,15 +202,15 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||
"name": "vaultPath",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^([a-zA-Z0-9._-]+/?)+$",
|
||||
"helpMessage": "Must be a valid Vault secrets path",
|
||||
"validation": "^(([a-zA-Z0-9._-]+|{(CN|OU|O|L|S|C)})+/?)+$",
|
||||
"helpMessage": "Must be a valid Vault secrets path. Support vars: {CN|OU|O|L|S|C}",
|
||||
},
|
||||
{
|
||||
"name": "objectName",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "[0-9a-zA-Z.:_-]+",
|
||||
"helpMessage": "Name to bundle certs under, if blank use cn",
|
||||
"validation": "^([0-9a-zA-Z.:_-]+|{(CN|OU|O|L|S|C)})+$",
|
||||
"helpMessage": "Name to bundle certs under, if blank use {CN}. Support vars: {CN|OU|O|L|S|C}",
|
||||
},
|
||||
{
|
||||
"name": "bundleChain",
|
||||
@ -241,11 +241,12 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||
:param cert_chain:
|
||||
:return:
|
||||
"""
|
||||
cname = common_name(parse_certificate(body))
|
||||
cert = parse_certificate(body)
|
||||
cname = common_name(cert)
|
||||
|
||||
url = self.get_option("vaultUrl", options)
|
||||
auth_method = self.get_option("authenticationMethod", options)
|
||||
auth_key = self.get_option("tokenFile/vaultRole", options)
|
||||
auth_key = self.get_option("tokenFileOrVaultRole", options)
|
||||
mount = self.get_option("vaultMount", options)
|
||||
path = self.get_option("vaultPath", options)
|
||||
bundle = self.get_option("bundleChain", options)
|
||||
@ -285,10 +286,27 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||
|
||||
client.secrets.kv.default_kv_version = api_version
|
||||
|
||||
if obj_name:
|
||||
path = "{0}/{1}".format(path, obj_name)
|
||||
else:
|
||||
path = "{0}/{1}".format(path, cname)
|
||||
t_path = path.format(
|
||||
CN=cname,
|
||||
OU=organizational_unit(cert),
|
||||
O=organization(cert), # noqa: E741
|
||||
L=location(cert),
|
||||
S=state(cert),
|
||||
C=country(cert)
|
||||
)
|
||||
if not obj_name:
|
||||
obj_name = '{CN}'
|
||||
|
||||
f_obj_name = obj_name.format(
|
||||
CN=cname,
|
||||
OU=organizational_unit(cert),
|
||||
O=organization(cert), # noqa: E741
|
||||
L=location(cert),
|
||||
S=state(cert),
|
||||
C=country(cert)
|
||||
)
|
||||
|
||||
path = "{0}/{1}".format(t_path, f_obj_name)
|
||||
|
||||
secret = get_secret(client, mount, path)
|
||||
secret["data"][cname] = {}
|
||||
|
@ -54,6 +54,24 @@ def validate_sources(source_strings):
|
||||
return sources
|
||||
|
||||
|
||||
def execute_clean(plugin, certificate, source):
|
||||
try:
|
||||
plugin.clean(certificate, source.options)
|
||||
certificate.sources.remove(source)
|
||||
|
||||
# If we want to remove the source from the certificate, we also need to clear any equivalent destinations to
|
||||
# prevent Lemur from re-uploading the certificate.
|
||||
for destination in certificate.destinations:
|
||||
if destination.label == source.label:
|
||||
certificate.destinations.remove(destination)
|
||||
|
||||
certificate_service.database.update(certificate)
|
||||
return SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
sentry.captureException()
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-s",
|
||||
"--sources",
|
||||
@ -132,11 +150,9 @@ def clean(source_strings, commit):
|
||||
s = plugins.get(source.plugin_name)
|
||||
|
||||
if not hasattr(s, "clean"):
|
||||
print(
|
||||
"Cannot clean source: {0}, source plugin does not implement 'clean()'".format(
|
||||
source.label
|
||||
)
|
||||
)
|
||||
info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'"
|
||||
current_app.logger.warning(info_text)
|
||||
print(info_text)
|
||||
continue
|
||||
|
||||
start_time = time.time()
|
||||
@ -144,35 +160,147 @@ def clean(source_strings, commit):
|
||||
print("[+] Staring to clean source: {label}!\n".format(label=source.label))
|
||||
|
||||
cleaned = 0
|
||||
for certificate in certificate_service.get_all_pending_cleaning(source):
|
||||
certificates = certificate_service.get_all_pending_cleaning_expired(source)
|
||||
for certificate in certificates:
|
||||
status = FAILURE_METRIC_STATUS
|
||||
if commit:
|
||||
try:
|
||||
s.clean(certificate, source.options)
|
||||
certificate.sources.remove(source)
|
||||
certificate_service.database.update(certificate)
|
||||
status = SUCCESS_METRIC_STATUS
|
||||
except Exception as e:
|
||||
current_app.logger.exception(e)
|
||||
sentry.captureException()
|
||||
status = execute_clean(s, certificate, source)
|
||||
|
||||
metrics.send(
|
||||
"clean",
|
||||
"certificate_clean",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"source": source.label, "status": status},
|
||||
metric_tags={"status": status, "source": source.label, "certificate": certificate.name},
|
||||
)
|
||||
|
||||
current_app.logger.warning(
|
||||
"Removed {0} from source {1} during cleaning".format(
|
||||
certificate.name, source.label
|
||||
)
|
||||
)
|
||||
|
||||
current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning")
|
||||
cleaned += 1
|
||||
|
||||
print(
|
||||
"[+] Finished cleaning source: {label}. Removed {cleaned} certificates from source. Run Time: {time}\n".format(
|
||||
label=source.label, time=(time.time() - start_time), cleaned=cleaned
|
||||
info_text = f"[+] Finished cleaning source: {source.label}. " \
|
||||
f"Removed {cleaned} certificates from source. " \
|
||||
f"Run Time: {(time.time() - start_time)}\n"
|
||||
print(info_text)
|
||||
current_app.logger.warning(info_text)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-s",
|
||||
"--sources",
|
||||
dest="source_strings",
|
||||
action="append",
|
||||
help="Sources to operate on.",
|
||||
)
|
||||
@manager.option(
|
||||
"-d",
|
||||
"--days",
|
||||
dest="days_to_expire",
|
||||
type=int,
|
||||
action="store",
|
||||
required=True,
|
||||
help="The expiry range within days.",
|
||||
)
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
dest="commit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
def clean_unused_and_expiring_within_days(source_strings, days_to_expire, commit):
|
||||
sources = validate_sources(source_strings)
|
||||
for source in sources:
|
||||
s = plugins.get(source.plugin_name)
|
||||
|
||||
if not hasattr(s, "clean"):
|
||||
info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'"
|
||||
current_app.logger.warning(info_text)
|
||||
print(info_text)
|
||||
continue
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
print("[+] Staring to clean source: {label}!\n".format(label=source.label))
|
||||
|
||||
cleaned = 0
|
||||
certificates = certificate_service.get_all_pending_cleaning_expiring_in_days(source, days_to_expire)
|
||||
for certificate in certificates:
|
||||
status = FAILURE_METRIC_STATUS
|
||||
if commit:
|
||||
status = execute_clean(s, certificate, source)
|
||||
|
||||
metrics.send(
|
||||
"certificate_clean",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": status, "source": source.label, "certificate": certificate.name},
|
||||
)
|
||||
)
|
||||
current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning")
|
||||
cleaned += 1
|
||||
|
||||
info_text = f"[+] Finished cleaning source: {source.label}. " \
|
||||
f"Removed {cleaned} certificates from source. " \
|
||||
f"Run Time: {(time.time() - start_time)}\n"
|
||||
print(info_text)
|
||||
current_app.logger.warning(info_text)
|
||||
|
||||
|
||||
@manager.option(
|
||||
"-s",
|
||||
"--sources",
|
||||
dest="source_strings",
|
||||
action="append",
|
||||
help="Sources to operate on.",
|
||||
)
|
||||
@manager.option(
|
||||
"-d",
|
||||
"--days",
|
||||
dest="days_since_issuance",
|
||||
type=int,
|
||||
action="store",
|
||||
required=True,
|
||||
help="Days since issuance.",
|
||||
)
|
||||
@manager.option(
|
||||
"-c",
|
||||
"--commit",
|
||||
dest="commit",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Persist changes.",
|
||||
)
|
||||
def clean_unused_and_issued_since_days(source_strings, days_since_issuance, commit):
|
||||
sources = validate_sources(source_strings)
|
||||
for source in sources:
|
||||
s = plugins.get(source.plugin_name)
|
||||
|
||||
if not hasattr(s, "clean"):
|
||||
info_text = f"Cannot clean source: {source.label}, source plugin does not implement 'clean()'"
|
||||
current_app.logger.warning(info_text)
|
||||
print(info_text)
|
||||
continue
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
print("[+] Staring to clean source: {label}!\n".format(label=source.label))
|
||||
|
||||
cleaned = 0
|
||||
certificates = certificate_service.get_all_pending_cleaning_issued_since_days(source, days_since_issuance)
|
||||
for certificate in certificates:
|
||||
status = FAILURE_METRIC_STATUS
|
||||
if commit:
|
||||
status = execute_clean(s, certificate, source)
|
||||
|
||||
metrics.send(
|
||||
"certificate_clean",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"status": status, "source": source.label, "certificate": certificate.name},
|
||||
)
|
||||
current_app.logger.warning(f"Removed {certificate.name} from source {source.label} during cleaning")
|
||||
cleaned += 1
|
||||
|
||||
info_text = f"[+] Finished cleaning source: {source.label}. " \
|
||||
f"Removed {cleaned} certificates from source. " \
|
||||
f"Run Time: {(time.time() - start_time)}\n"
|
||||
print(info_text)
|
||||
current_app.logger.warning(info_text)
|
||||
|
@ -123,15 +123,19 @@ def sync_endpoints(source):
|
||||
"acct": s.get_option("accountNumber", source.options)})
|
||||
|
||||
if not endpoint["certificate"]:
|
||||
current_app.logger.error(
|
||||
"Certificate Not Found. Name: {0} Endpoint: {1}".format(
|
||||
certificate_name, endpoint["name"]
|
||||
)
|
||||
)
|
||||
current_app.logger.error({
|
||||
"message": "Certificate Not Found",
|
||||
"certificate_name": certificate_name,
|
||||
"endpoint_name": endpoint["name"],
|
||||
"dns_name": endpoint.get("dnsname"),
|
||||
"account": s.get_option("accountNumber", source.options),
|
||||
})
|
||||
|
||||
metrics.send("endpoint.certificate.not.found",
|
||||
"counter", 1,
|
||||
metric_tags={"cert": certificate_name, "endpoint": endpoint["name"],
|
||||
"acct": s.get_option("accountNumber", source.options)})
|
||||
"acct": s.get_option("accountNumber", source.options),
|
||||
"dnsname": endpoint.get("dnsname")})
|
||||
continue
|
||||
|
||||
policy = endpoint.pop("policy")
|
||||
@ -193,6 +197,11 @@ def sync_certificates(source, user):
|
||||
s = plugins.get(source.plugin_name)
|
||||
certificates = s.get_certificates(source.options)
|
||||
|
||||
# emitting the count of certificates on the source
|
||||
metrics.send("sync_certificates_count",
|
||||
"gauge", len(certificates),
|
||||
metric_tags={"source": source.label})
|
||||
|
||||
for certificate in certificates:
|
||||
exists, updated_by_hash = find_cert(certificate)
|
||||
|
||||
|
@ -212,12 +212,18 @@ angular.module('lemur')
|
||||
})
|
||||
|
||||
.controller('CertificateCloneController', function ($scope, $uibModalInstance, CertificateApi, CertificateService, DestinationService, AuthorityService, AuthorityApi, PluginService, MomentService, WizardHandler, LemurRestangular, NotificationService, toaster, editId) {
|
||||
$scope.certificate = LemurRestangular.restangularizeElement(null, {}, 'certificates');
|
||||
CertificateApi.get(editId).then(function (certificate) {
|
||||
$scope.certificate = certificate;
|
||||
// prepare the certificate for cloning
|
||||
$scope.certificate.name = ''; // we should prefer the generated name
|
||||
$scope.certificate.csr = null; // should not clone CSR in case other settings are changed in clone
|
||||
$scope.certificate.validityStart = null;
|
||||
$scope.certificate.validityEnd = null;
|
||||
$scope.certificate.keyType = 'RSA2048'; // default algo to show during clone
|
||||
$scope.certificate.description = 'Cloning from cert ID ' + editId;
|
||||
$scope.certificate.replacedBy = []; // should not clone 'replaced by' info
|
||||
$scope.certificate.removeReplaces(); // should not clone 'replacement cert' info
|
||||
CertificateService.getDefaults($scope.certificate);
|
||||
});
|
||||
|
||||
|
@ -140,7 +140,6 @@
|
||||
<select ng-model="certificate.validityYears" class="form-control">
|
||||
<option value="">-</option>
|
||||
<option value="1">1 year</option>
|
||||
<option value="2">2 years</option>
|
||||
</select>
|
||||
</div>
|
||||
<span style="padding-top: 15px" class="text-center col-sm-1">
|
||||
|
@ -9,7 +9,8 @@ from cryptography import x509
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from marshmallow import ValidationError
|
||||
from freezegun import freeze_time
|
||||
from mock import patch
|
||||
# from mock import patch
|
||||
from unittest.mock import patch
|
||||
|
||||
from lemur.certificates.service import create_csr
|
||||
from lemur.certificates.views import * # noqa
|
||||
@ -906,12 +907,12 @@ def test_certificate_get_body(client):
|
||||
assert response_body["serial"] == "211983098819107449768450703123665283596"
|
||||
assert response_body["serialHex"] == "9F7A75B39DAE4C3F9524C68B06DA6A0C"
|
||||
assert response_body["distinguishedName"] == (
|
||||
"CN=LemurTrust Unittests Class 1 CA 2018,"
|
||||
"O=LemurTrust Enterprises Ltd,"
|
||||
"OU=Unittesting Operations Center,"
|
||||
"C=EE,"
|
||||
"L=Earth,"
|
||||
"ST=N/A,"
|
||||
"L=Earth"
|
||||
"C=EE,"
|
||||
"OU=Unittesting Operations Center,"
|
||||
"O=LemurTrust Enterprises Ltd,"
|
||||
"CN=LemurTrust Unittests Class 1 CA 2018"
|
||||
)
|
||||
|
||||
|
||||
|
@ -4,9 +4,20 @@ from lemur.dns_providers import util as dnsutil
|
||||
|
||||
class TestDNSProvider(unittest.TestCase):
|
||||
def test_is_valid_domain(self):
|
||||
self.assertTrue(dnsutil.is_valid_domain("example.com"))
|
||||
self.assertTrue(dnsutil.is_valid_domain("foo.bar.org"))
|
||||
self.assertTrue(dnsutil.is_valid_domain("_acme-chall.example.com"))
|
||||
self.assertFalse(dnsutil.is_valid_domain("e/xample.com"))
|
||||
self.assertFalse(dnsutil.is_valid_domain("exam\ple.com"))
|
||||
self.assertFalse(dnsutil.is_valid_domain("*.example.com"))
|
||||
self.assertTrue(dnsutil.is_valid_domain('example.com'))
|
||||
self.assertTrue(dnsutil.is_valid_domain('foo.bar.org'))
|
||||
self.assertTrue(dnsutil.is_valid_domain('exam--ple.io'))
|
||||
self.assertTrue(dnsutil.is_valid_domain('a.example.com'))
|
||||
self.assertTrue(dnsutil.is_valid_domain('example.io'))
|
||||
self.assertTrue(dnsutil.is_valid_domain('example-of-under-63-character-domain-label-length-limit-1234567.com'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('example-of-over-63-character-domain-label-length-limit-123456789.com'))
|
||||
self.assertTrue(dnsutil.is_valid_domain('_acme-chall.example.com'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('e/xample.com'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('exam\ple.com'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('<example.com'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('*.example.com'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('-example.io'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('example-.io'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('example..io'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('exa mple.io'))
|
||||
self.assertFalse(dnsutil.is_valid_domain('-'))
|
||||
|
Reference in New Issue
Block a user