Merge branch 'master' of github.com:Netflix/lemur into add-remove-certs-for-notification
This commit is contained in:
commit
dac6838c3b
|
@ -47,4 +47,7 @@ after_success:
|
||||||
|
|
||||||
notifications:
|
notifications:
|
||||||
email:
|
email:
|
||||||
lemur@netflix.com
|
recipients:
|
||||||
|
- lemur@netflix.com
|
||||||
|
on_success: never
|
||||||
|
on_failure: always
|
||||||
|
|
|
@ -1,12 +1,15 @@
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
|
import arrow
|
||||||
|
|
||||||
from flask_script import Manager
|
from flask_script import Manager
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
from lemur.extensions import sentry
|
from lemur.extensions import sentry
|
||||||
from lemur.constants import SUCCESS_METRIC_STATUS
|
from lemur.constants import SUCCESS_METRIC_STATUS
|
||||||
|
from lemur.plugins import plugins
|
||||||
from lemur.plugins.lemur_acme.plugin import AcmeHandler
|
from lemur.plugins.lemur_acme.plugin import AcmeHandler
|
||||||
|
from lemur.plugins.lemur_aws import s3
|
||||||
|
|
||||||
manager = Manager(
|
manager = Manager(
|
||||||
usage="Handles all ACME related tasks"
|
usage="Handles all ACME related tasks"
|
||||||
|
@ -84,3 +87,105 @@ def dnstest(domain, token):
|
||||||
|
|
||||||
status = SUCCESS_METRIC_STATUS
|
status = SUCCESS_METRIC_STATUS
|
||||||
print("[+] Done with ACME Tests.")
|
print("[+] Done with ACME Tests.")
|
||||||
|
|
||||||
|
|
||||||
|
@manager.option(
|
||||||
|
"-t",
|
||||||
|
"--token",
|
||||||
|
dest="token",
|
||||||
|
default="date: " + arrow.utcnow().format("YYYY-MM-DDTHH-mm-ss"),
|
||||||
|
required=False,
|
||||||
|
help="Value of the Token",
|
||||||
|
)
|
||||||
|
@manager.option(
|
||||||
|
"-n",
|
||||||
|
"--token_name",
|
||||||
|
dest="token_name",
|
||||||
|
default="Token-" + arrow.utcnow().format("YYYY-MM-DDTHH-mm-ss"),
|
||||||
|
required=False,
|
||||||
|
help="path",
|
||||||
|
)
|
||||||
|
@manager.option(
|
||||||
|
"-p",
|
||||||
|
"--prefix",
|
||||||
|
dest="prefix",
|
||||||
|
default="test/",
|
||||||
|
required=False,
|
||||||
|
help="S3 bucket prefix",
|
||||||
|
)
|
||||||
|
@manager.option(
|
||||||
|
"-a",
|
||||||
|
"--account_number",
|
||||||
|
dest="account_number",
|
||||||
|
required=True,
|
||||||
|
help="AWS Account",
|
||||||
|
)
|
||||||
|
@manager.option(
|
||||||
|
"-b",
|
||||||
|
"--bucket_name",
|
||||||
|
dest="bucket_name",
|
||||||
|
required=True,
|
||||||
|
help="Bucket Name",
|
||||||
|
)
|
||||||
|
def upload_acme_token_s3(token, token_name, prefix, account_number, bucket_name):
|
||||||
|
"""
|
||||||
|
This method serves for testing the upload_acme_token to S3, fetching the token to verify it, and then deleting it.
|
||||||
|
It mainly serves for testing purposes.
|
||||||
|
:param token:
|
||||||
|
:param token_name:
|
||||||
|
:param prefix:
|
||||||
|
:param account_number:
|
||||||
|
:param bucket_name:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
additional_options = [
|
||||||
|
{
|
||||||
|
"name": "bucket",
|
||||||
|
"value": bucket_name,
|
||||||
|
"type": "str",
|
||||||
|
"required": True,
|
||||||
|
"validation": r"[0-9a-z.-]{3,63}",
|
||||||
|
"helpMessage": "Must be a valid S3 bucket name!",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "accountNumber",
|
||||||
|
"type": "str",
|
||||||
|
"value": account_number,
|
||||||
|
"required": True,
|
||||||
|
"validation": r"[0-9]{12}",
|
||||||
|
"helpMessage": "A valid AWS account number with permission to access S3",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "region",
|
||||||
|
"type": "str",
|
||||||
|
"default": "us-east-1",
|
||||||
|
"required": False,
|
||||||
|
"helpMessage": "Region bucket exists",
|
||||||
|
"available": ["us-east-1", "us-west-2", "eu-west-1"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "encrypt",
|
||||||
|
"type": "bool",
|
||||||
|
"value": False,
|
||||||
|
"required": False,
|
||||||
|
"helpMessage": "Enable server side encryption",
|
||||||
|
"default": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "prefix",
|
||||||
|
"type": "str",
|
||||||
|
"value": prefix,
|
||||||
|
"required": False,
|
||||||
|
"helpMessage": "Must be a valid S3 object prefix!",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
p = plugins.get("aws-s3")
|
||||||
|
p.upload_acme_token(token_name, token, additional_options)
|
||||||
|
|
||||||
|
if not prefix.endswith("/"):
|
||||||
|
prefix + "/"
|
||||||
|
|
||||||
|
token_res = s3.get(bucket_name, prefix + token_name, account_number=account_number)
|
||||||
|
assert(token_res == token)
|
||||||
|
s3.delete(bucket_name, prefix + token_name, account_number=account_number)
|
||||||
|
|
|
@ -210,7 +210,8 @@ class LdapPrincipal:
|
||||||
self.ldap_groups = []
|
self.ldap_groups = []
|
||||||
for group in lgroups:
|
for group in lgroups:
|
||||||
(dn, values) = group
|
(dn, values) = group
|
||||||
self.ldap_groups.append(values["cn"][0].decode("ascii"))
|
if type(values) == dict:
|
||||||
|
self.ldap_groups.append(values["cn"][0].decode("ascii"))
|
||||||
else:
|
else:
|
||||||
lgroups = self.ldap_client.search_s(
|
lgroups = self.ldap_client.search_s(
|
||||||
self.ldap_base_dn, ldap.SCOPE_SUBTREE, ldap_filter, self.ldap_attrs
|
self.ldap_base_dn, ldap.SCOPE_SUBTREE, ldap_filter, self.ldap_attrs
|
||||||
|
|
|
@ -101,7 +101,8 @@ def login_required(f):
|
||||||
return dict(message="Token is invalid"), 403
|
return dict(message="Token is invalid"), 403
|
||||||
|
|
||||||
try:
|
try:
|
||||||
payload = jwt.decode(token, current_app.config["LEMUR_TOKEN_SECRET"])
|
header_data = fetch_token_header(token)
|
||||||
|
payload = jwt.decode(token, current_app.config["LEMUR_TOKEN_SECRET"], algorithms=[header_data["alg"]])
|
||||||
except jwt.DecodeError:
|
except jwt.DecodeError:
|
||||||
return dict(message="Token is invalid"), 403
|
return dict(message="Token is invalid"), 403
|
||||||
except jwt.ExpiredSignatureError:
|
except jwt.ExpiredSignatureError:
|
||||||
|
|
|
@ -18,7 +18,7 @@ from sqlalchemy import (
|
||||||
func,
|
func,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
DateTime,
|
DateTime,
|
||||||
PassiveDefault,
|
DefaultClause,
|
||||||
Boolean,
|
Boolean,
|
||||||
)
|
)
|
||||||
from sqlalchemy.dialects.postgresql import JSON
|
from sqlalchemy.dialects.postgresql import JSON
|
||||||
|
@ -39,7 +39,7 @@ class Authority(db.Model):
|
||||||
plugin_name = Column(String(64))
|
plugin_name = Column(String(64))
|
||||||
description = Column(Text)
|
description = Column(Text)
|
||||||
options = Column(JSON)
|
options = Column(JSON)
|
||||||
date_created = Column(DateTime, PassiveDefault(func.now()), nullable=False)
|
date_created = Column(DateTime, DefaultClause(func.now()), nullable=False)
|
||||||
roles = relationship(
|
roles = relationship(
|
||||||
"Role",
|
"Role",
|
||||||
secondary=roles_authorities,
|
secondary=roles_authorities,
|
||||||
|
|
|
@ -16,7 +16,7 @@ from sqlalchemy import (
|
||||||
Integer,
|
Integer,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
String,
|
String,
|
||||||
PassiveDefault,
|
DefaultClause,
|
||||||
func,
|
func,
|
||||||
Column,
|
Column,
|
||||||
Text,
|
Text,
|
||||||
|
@ -138,7 +138,7 @@ class Certificate(db.Model):
|
||||||
not_after = Column(ArrowType)
|
not_after = Column(ArrowType)
|
||||||
not_after_ix = Index("ix_certificates_not_after", not_after.desc())
|
not_after_ix = Index("ix_certificates_not_after", not_after.desc())
|
||||||
|
|
||||||
date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False)
|
date_created = Column(ArrowType, DefaultClause(func.now()), nullable=False)
|
||||||
|
|
||||||
signing_algorithm = Column(String(128))
|
signing_algorithm = Column(String(128))
|
||||||
status = Column(String(128))
|
status = Column(String(128))
|
||||||
|
@ -184,7 +184,6 @@ class Certificate(db.Model):
|
||||||
"PendingCertificate",
|
"PendingCertificate",
|
||||||
secondary=pending_cert_replacement_associations,
|
secondary=pending_cert_replacement_associations,
|
||||||
backref="pending_replace",
|
backref="pending_replace",
|
||||||
viewonly=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
logs = relationship("Log", backref="certificate")
|
logs = relationship("Log", backref="certificate")
|
||||||
|
|
|
@ -12,6 +12,7 @@ from cryptography.hazmat.backends import default_backend
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from sqlalchemy import func, or_, not_, cast, Integer
|
from sqlalchemy import func, or_, not_, cast, Integer
|
||||||
|
from sqlalchemy.sql.expression import false, true
|
||||||
|
|
||||||
from lemur import database
|
from lemur import database
|
||||||
from lemur.authorities.models import Authority
|
from lemur.authorities.models import Authority
|
||||||
|
@ -150,7 +151,7 @@ def get_all_certs_attached_to_endpoint_without_autorotate():
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
Certificate.query.filter(Certificate.endpoints.any())
|
Certificate.query.filter(Certificate.endpoints.any())
|
||||||
.filter(Certificate.rotation == False)
|
.filter(Certificate.rotation == false())
|
||||||
.filter(Certificate.not_after >= arrow.now())
|
.filter(Certificate.not_after >= arrow.now())
|
||||||
.filter(not_(Certificate.replaced.any()))
|
.filter(not_(Certificate.replaced.any()))
|
||||||
.all() # noqa
|
.all() # noqa
|
||||||
|
@ -205,9 +206,9 @@ def get_all_pending_reissue():
|
||||||
:return:
|
:return:
|
||||||
"""
|
"""
|
||||||
return (
|
return (
|
||||||
Certificate.query.filter(Certificate.rotation == True)
|
Certificate.query.filter(Certificate.rotation == true())
|
||||||
.filter(not_(Certificate.replaced.any()))
|
.filter(not_(Certificate.replaced.any()))
|
||||||
.filter(Certificate.in_rotation_window == True)
|
.filter(Certificate.in_rotation_window == true())
|
||||||
.all()
|
.all()
|
||||||
) # noqa
|
) # noqa
|
||||||
|
|
||||||
|
@ -525,7 +526,7 @@ def render(args):
|
||||||
)
|
)
|
||||||
|
|
||||||
if current_app.config.get("ALLOW_CERT_DELETION", False):
|
if current_app.config.get("ALLOW_CERT_DELETION", False):
|
||||||
query = query.filter(Certificate.deleted == False) # noqa
|
query = query.filter(Certificate.deleted == false())
|
||||||
|
|
||||||
result = database.sort_and_page(query, Certificate, args)
|
result = database.sort_and_page(query, Certificate, args)
|
||||||
return result
|
return result
|
||||||
|
|
|
@ -82,4 +82,4 @@ def get_key_type_from_csr(data):
|
||||||
raise Exception("Unsupported key type")
|
raise Exception("Unsupported key type")
|
||||||
|
|
||||||
except NotImplemented:
|
except NotImplemented:
|
||||||
raise NotImplemented()
|
raise NotImplementedError
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import imp
|
import importlib
|
||||||
import errno
|
import errno
|
||||||
import pkg_resources
|
import pkg_resources
|
||||||
import socket
|
import socket
|
||||||
|
@ -73,8 +73,9 @@ def from_file(file_path, silent=False):
|
||||||
:param file_path:
|
:param file_path:
|
||||||
:param silent:
|
:param silent:
|
||||||
"""
|
"""
|
||||||
d = imp.new_module("config")
|
module_spec = importlib.util.spec_from_file_location("config", file_path)
|
||||||
d.__file__ = file_path
|
d = importlib.util.module_from_spec(module_spec)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with open(file_path) as config_file:
|
with open(file_path) as config_file:
|
||||||
exec( # nosec: config file safe
|
exec( # nosec: config file safe
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
|
|
||||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||||
"""
|
"""
|
||||||
from sqlalchemy import Column, Integer, ForeignKey, PassiveDefault, func, Enum
|
from sqlalchemy import Column, Integer, ForeignKey, DefaultClause, func, Enum
|
||||||
|
|
||||||
from sqlalchemy_utils.types.arrow import ArrowType
|
from sqlalchemy_utils.types.arrow import ArrowType
|
||||||
|
|
||||||
|
@ -29,5 +29,5 @@ class Log(db.Model):
|
||||||
),
|
),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
)
|
)
|
||||||
logged_at = Column(ArrowType(), PassiveDefault(func.now()), nullable=False)
|
logged_at = Column(ArrowType(), DefaultClause(func.now()), nullable=False)
|
||||||
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
user_id = Column(Integer, ForeignKey("users.id"), nullable=False)
|
||||||
|
|
|
@ -16,6 +16,7 @@ from itertools import groupby
|
||||||
import arrow
|
import arrow
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from sqlalchemy import and_
|
from sqlalchemy import and_
|
||||||
|
from sqlalchemy.sql.expression import false, true
|
||||||
|
|
||||||
from lemur import database
|
from lemur import database
|
||||||
from lemur.certificates.models import Certificate
|
from lemur.certificates.models import Certificate
|
||||||
|
@ -40,10 +41,10 @@ def get_certificates(exclude=None):
|
||||||
q = (
|
q = (
|
||||||
database.db.session.query(Certificate)
|
database.db.session.query(Certificate)
|
||||||
.filter(Certificate.not_after <= max)
|
.filter(Certificate.not_after <= max)
|
||||||
.filter(Certificate.notify == True)
|
.filter(Certificate.notify == true())
|
||||||
.filter(Certificate.expired == False)
|
.filter(Certificate.expired == false())
|
||||||
.filter(Certificate.revoked == False)
|
.filter(Certificate.revoked == false())
|
||||||
) # noqa
|
)
|
||||||
|
|
||||||
exclude_conditions = []
|
exclude_conditions = []
|
||||||
if exclude:
|
if exclude:
|
||||||
|
@ -138,11 +139,11 @@ def send_expiration_notifications(exclude):
|
||||||
# security team gets all
|
# security team gets all
|
||||||
security_email = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")
|
security_email = current_app.config.get("LEMUR_SECURITY_TEAM_EMAIL")
|
||||||
|
|
||||||
security_data = []
|
|
||||||
for owner, notification_group in get_eligible_certificates(exclude=exclude).items():
|
for owner, notification_group in get_eligible_certificates(exclude=exclude).items():
|
||||||
|
|
||||||
for notification_label, certificates in notification_group.items():
|
for notification_label, certificates in notification_group.items():
|
||||||
notification_data = []
|
notification_data = []
|
||||||
|
security_data = []
|
||||||
|
|
||||||
notification = certificates[0][0]
|
notification = certificates[0][0]
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ def create_default_expiration_notifications(name, recipients, intervals=None):
|
||||||
"name": "recipients",
|
"name": "recipients",
|
||||||
"type": "str",
|
"type": "str",
|
||||||
"required": True,
|
"required": True,
|
||||||
"validation": "^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
|
"validation": r"^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
|
||||||
"helpMessage": "Comma delimited list of email addresses",
|
"helpMessage": "Comma delimited list of email addresses",
|
||||||
"value": ",".join(recipients),
|
"value": ",".join(recipients),
|
||||||
},
|
},
|
||||||
|
@ -63,7 +63,7 @@ def create_default_expiration_notifications(name, recipients, intervals=None):
|
||||||
"name": "interval",
|
"name": "interval",
|
||||||
"type": "int",
|
"type": "int",
|
||||||
"required": True,
|
"required": True,
|
||||||
"validation": "^\d+$",
|
"validation": r"^\d+$",
|
||||||
"helpMessage": "Number of days to be alert before expiration.",
|
"helpMessage": "Number of days to be alert before expiration.",
|
||||||
"value": i,
|
"value": i,
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ from sqlalchemy import (
|
||||||
Integer,
|
Integer,
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
String,
|
String,
|
||||||
PassiveDefault,
|
DefaultClause,
|
||||||
func,
|
func,
|
||||||
Column,
|
Column,
|
||||||
Text,
|
Text,
|
||||||
|
@ -76,14 +76,14 @@ class PendingCertificate(db.Model):
|
||||||
chain = Column(Text())
|
chain = Column(Text())
|
||||||
private_key = Column(Vault, nullable=True)
|
private_key = Column(Vault, nullable=True)
|
||||||
|
|
||||||
date_created = Column(ArrowType, PassiveDefault(func.now()), nullable=False)
|
date_created = Column(ArrowType, DefaultClause(func.now()), nullable=False)
|
||||||
dns_provider_id = Column(
|
dns_provider_id = Column(
|
||||||
Integer, ForeignKey("dns_providers.id", ondelete="CASCADE")
|
Integer, ForeignKey("dns_providers.id", ondelete="CASCADE")
|
||||||
)
|
)
|
||||||
|
|
||||||
status = Column(Text(), nullable=True)
|
status = Column(Text(), nullable=True)
|
||||||
last_updated = Column(
|
last_updated = Column(
|
||||||
ArrowType, PassiveDefault(func.now()), onupdate=func.now(), nullable=False
|
ArrowType, DefaultClause(func.now()), onupdate=func.now(), nullable=False
|
||||||
)
|
)
|
||||||
|
|
||||||
rotation = Column(Boolean, default=False)
|
rotation = Column(Boolean, default=False)
|
||||||
|
|
|
@ -42,7 +42,7 @@ class ExpirationNotificationPlugin(NotificationPlugin):
|
||||||
"name": "interval",
|
"name": "interval",
|
||||||
"type": "int",
|
"type": "int",
|
||||||
"required": True,
|
"required": True,
|
||||||
"validation": "^\d+$",
|
"validation": r"^\d+$",
|
||||||
"helpMessage": "Number of days to be alert before expiration.",
|
"helpMessage": "Number of days to be alert before expiration.",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -481,7 +481,7 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||||
"name": "acme_url",
|
"name": "acme_url",
|
||||||
"type": "str",
|
"type": "str",
|
||||||
"required": True,
|
"required": True,
|
||||||
"validation": "/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/",
|
"validation": r"/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/",
|
||||||
"helpMessage": "Must be a valid web url starting with http[s]://",
|
"helpMessage": "Must be a valid web url starting with http[s]://",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
@ -494,7 +494,7 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||||
"name": "email",
|
"name": "email",
|
||||||
"type": "str",
|
"type": "str",
|
||||||
"default": "",
|
"default": "",
|
||||||
"validation": "/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/",
|
"validation": r"/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/",
|
||||||
"helpMessage": "Email to use",
|
"helpMessage": "Email to use",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -3,6 +3,7 @@ from unittest.mock import patch, Mock
|
||||||
|
|
||||||
import josepy as jose
|
import josepy as jose
|
||||||
from cryptography.x509 import DNSName
|
from cryptography.x509 import DNSName
|
||||||
|
from flask import Flask
|
||||||
from lemur.plugins.lemur_acme import plugin
|
from lemur.plugins.lemur_acme import plugin
|
||||||
from lemur.common.utils import generate_private_key
|
from lemur.common.utils import generate_private_key
|
||||||
from mock import MagicMock
|
from mock import MagicMock
|
||||||
|
@ -22,6 +23,16 @@ class TestAcme(unittest.TestCase):
|
||||||
"test.fakedomain.net": [mock_dns_provider],
|
"test.fakedomain.net": [mock_dns_provider],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Creates a new Flask application for a test duration. In python 3.8, manual push of application context is
|
||||||
|
# needed to run tests in dev environment without getting error 'Working outside of application context'.
|
||||||
|
_app = Flask('lemur_test_acme')
|
||||||
|
self.ctx = _app.app_context()
|
||||||
|
assert self.ctx
|
||||||
|
self.ctx.push()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.ctx.pop()
|
||||||
|
|
||||||
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
|
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
|
||||||
def test_get_dns_challenges(self, mock_len):
|
def test_get_dns_challenges(self, mock_len):
|
||||||
assert mock_len
|
assert mock_len
|
||||||
|
@ -117,22 +128,24 @@ class TestAcme(unittest.TestCase):
|
||||||
mock_dns_provider = Mock()
|
mock_dns_provider = Mock()
|
||||||
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
|
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
|
||||||
|
|
||||||
|
mock_dns_challenge = Mock()
|
||||||
|
response = Mock()
|
||||||
|
response.simple_verify = Mock(return_value=False)
|
||||||
|
mock_dns_challenge.response = Mock(return_value=response)
|
||||||
|
|
||||||
mock_authz = Mock()
|
mock_authz = Mock()
|
||||||
mock_authz.dns_challenge.response = Mock()
|
mock_authz.dns_challenge = []
|
||||||
mock_authz.dns_challenge.response.simple_verify = Mock(return_value=False)
|
mock_authz.dns_challenge.append(mock_dns_challenge)
|
||||||
mock_authz.authz = []
|
|
||||||
mock_authz.target_domain = "www.test.com"
|
mock_authz.target_domain = "www.test.com"
|
||||||
mock_authz_record = Mock()
|
mock_authz_record = Mock()
|
||||||
mock_authz_record.body.identifier.value = "test"
|
mock_authz_record.body.identifier.value = "test"
|
||||||
|
mock_authz.authz = []
|
||||||
mock_authz.authz.append(mock_authz_record)
|
mock_authz.authz.append(mock_authz_record)
|
||||||
mock_authz.change_id = []
|
mock_authz.change_id = []
|
||||||
mock_authz.change_id.append("123")
|
mock_authz.change_id.append("123")
|
||||||
mock_authz.dns_challenge = []
|
with self.assertRaises(ValueError):
|
||||||
dns_challenge = Mock()
|
self.acme.complete_dns_challenge(mock_acme, mock_authz)
|
||||||
mock_authz.dns_challenge.append(dns_challenge)
|
|
||||||
self.assertRaises(
|
|
||||||
ValueError, self.acme.complete_dns_challenge(mock_acme, mock_authz)
|
|
||||||
)
|
|
||||||
|
|
||||||
@patch("acme.client.Client")
|
@patch("acme.client.Client")
|
||||||
@patch("OpenSSL.crypto", return_value="mock_cert")
|
@patch("OpenSSL.crypto", return_value="mock_cert")
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch, Mock
|
from unittest.mock import patch, Mock
|
||||||
|
|
||||||
|
from flask import Flask
|
||||||
from lemur.plugins.lemur_acme import plugin, powerdns
|
from lemur.plugins.lemur_acme import plugin, powerdns
|
||||||
|
|
||||||
|
|
||||||
|
@ -17,6 +19,16 @@ class TestPowerdns(unittest.TestCase):
|
||||||
"test.fakedomain.net": [mock_dns_provider],
|
"test.fakedomain.net": [mock_dns_provider],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Creates a new Flask application for a test duration. In python 3.8, manual push of application context is
|
||||||
|
# needed to run tests in dev environment without getting error 'Working outside of application context'.
|
||||||
|
_app = Flask('lemur_test_acme')
|
||||||
|
self.ctx = _app.app_context()
|
||||||
|
assert self.ctx
|
||||||
|
self.ctx.push()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.ctx.pop()
|
||||||
|
|
||||||
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
|
@patch("lemur.plugins.lemur_acme.powerdns.current_app")
|
||||||
def test_get_zones(self, mock_current_app):
|
def test_get_zones(self, mock_current_app):
|
||||||
account_number = "1234567890"
|
account_number = "1234567890"
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import unittest
|
import unittest
|
||||||
from unittest.mock import patch, Mock
|
from unittest.mock import patch, Mock
|
||||||
|
|
||||||
|
from flask import Flask
|
||||||
from lemur.plugins.lemur_acme import plugin, ultradns
|
from lemur.plugins.lemur_acme import plugin, ultradns
|
||||||
from requests.models import Response
|
from requests.models import Response
|
||||||
|
|
||||||
|
@ -19,6 +20,16 @@ class TestUltradns(unittest.TestCase):
|
||||||
"test.fakedomain.net": [mock_dns_provider],
|
"test.fakedomain.net": [mock_dns_provider],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Creates a new Flask application for a test duration. In python 3.8, manual push of application context is
|
||||||
|
# needed to run tests in dev environment without getting error 'Working outside of application context'.
|
||||||
|
_app = Flask('lemur_test_acme')
|
||||||
|
self.ctx = _app.app_context()
|
||||||
|
assert self.ctx
|
||||||
|
self.ctx.push()
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.ctx.pop()
|
||||||
|
|
||||||
@patch("lemur.plugins.lemur_acme.ultradns.requests")
|
@patch("lemur.plugins.lemur_acme.ultradns.requests")
|
||||||
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
@patch("lemur.plugins.lemur_acme.ultradns.current_app")
|
||||||
def test_ultradns_get_token(self, mock_current_app, mock_requests):
|
def test_ultradns_get_token(self, mock_current_app, mock_requests):
|
||||||
|
|
|
@ -33,6 +33,7 @@
|
||||||
.. moduleauthor:: Harm Weites <harm@weites.com>
|
.. moduleauthor:: Harm Weites <harm@weites.com>
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
from acme.errors import ClientError
|
from acme.errors import ClientError
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
|
@ -408,6 +409,47 @@ class S3DestinationPlugin(ExportDestinationPlugin):
|
||||||
account_number=self.get_option("accountNumber", options),
|
account_number=self.get_option("accountNumber", options),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def upload_acme_token(self, token_path, token, options, **kwargs):
|
||||||
|
"""
|
||||||
|
This is called from the acme http challenge
|
||||||
|
:param self:
|
||||||
|
:param token_path:
|
||||||
|
:param token:
|
||||||
|
:param options:
|
||||||
|
:param kwargs:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
current_app.logger.debug("S3 destination plugin is started for HTTP-01 challenge")
|
||||||
|
|
||||||
|
function = f"{__name__}.{sys._getframe().f_code.co_name}"
|
||||||
|
|
||||||
|
account_number = self.get_option("accountNumber", options)
|
||||||
|
bucket_name = self.get_option("bucket", options)
|
||||||
|
prefix = self.get_option("prefix", options)
|
||||||
|
region = self.get_option("region", options)
|
||||||
|
filename = token_path.split("/")[-1]
|
||||||
|
if not prefix.endswith("/"):
|
||||||
|
prefix + "/"
|
||||||
|
|
||||||
|
res = s3.put(bucket_name=bucket_name,
|
||||||
|
region_name=region,
|
||||||
|
prefix=prefix + filename,
|
||||||
|
data=token,
|
||||||
|
encrypt=False,
|
||||||
|
account_number=account_number)
|
||||||
|
res = "Success" if res else "Failure"
|
||||||
|
log_data = {
|
||||||
|
"function": function,
|
||||||
|
"message": "check if any valid certificate is revoked",
|
||||||
|
"result": res,
|
||||||
|
"bucket_name": bucket_name,
|
||||||
|
"filename": filename
|
||||||
|
}
|
||||||
|
current_app.logger.info(log_data)
|
||||||
|
metrics.send(f"{function}", "counter", 1, metric_tags={"result": res,
|
||||||
|
"bucket_name": bucket_name,
|
||||||
|
"filename": filename})
|
||||||
|
|
||||||
|
|
||||||
class SNSNotificationPlugin(ExpirationNotificationPlugin):
|
class SNSNotificationPlugin(ExpirationNotificationPlugin):
|
||||||
title = "AWS SNS"
|
title = "AWS SNS"
|
||||||
|
|
|
@ -6,12 +6,15 @@
|
||||||
:license: Apache, see LICENSE for more details.
|
:license: Apache, see LICENSE for more details.
|
||||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||||
"""
|
"""
|
||||||
|
from botocore.exceptions import ClientError
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
from lemur.extensions import sentry
|
||||||
|
|
||||||
from .sts import sts_client
|
from .sts import sts_client
|
||||||
|
|
||||||
|
|
||||||
@sts_client("s3", service_type="resource")
|
@sts_client("s3", service_type="resource")
|
||||||
def put(bucket_name, region, prefix, data, encrypt, **kwargs):
|
def put(bucket_name, region_name, prefix, data, encrypt, **kwargs):
|
||||||
"""
|
"""
|
||||||
Use STS to write to an S3 bucket
|
Use STS to write to an S3 bucket
|
||||||
"""
|
"""
|
||||||
|
@ -32,4 +35,41 @@ def put(bucket_name, region, prefix, data, encrypt, **kwargs):
|
||||||
ServerSideEncryption="AES256",
|
ServerSideEncryption="AES256",
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
bucket.put_object(Key=prefix, Body=data, ACL="bucket-owner-full-control")
|
try:
|
||||||
|
bucket.put_object(Key=prefix, Body=data, ACL="bucket-owner-full-control")
|
||||||
|
return True
|
||||||
|
except ClientError:
|
||||||
|
sentry.captureException()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@sts_client("s3", service_type="client")
|
||||||
|
def delete(bucket_name, prefixed_object_name, **kwargs):
|
||||||
|
"""
|
||||||
|
Use STS to delete an object
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = kwargs["client"].delete_object(Bucket=bucket_name, Key=prefixed_object_name)
|
||||||
|
current_app.logger.debug(f"Delete data from S3."
|
||||||
|
f"Bucket: {bucket_name},"
|
||||||
|
f"Prefix: {prefixed_object_name},"
|
||||||
|
f"Status_code: {response}")
|
||||||
|
return response['ResponseMetadata']['HTTPStatusCode'] < 300
|
||||||
|
except ClientError:
|
||||||
|
sentry.captureException()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
@sts_client("s3", service_type="client")
|
||||||
|
def get(bucket_name, prefixed_object_name, **kwargs):
|
||||||
|
"""
|
||||||
|
Use STS to get an object
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
response = kwargs["client"].get_object(Bucket=bucket_name, Key=prefixed_object_name)
|
||||||
|
current_app.logger.debug(f"Get data from S3. Bucket: {bucket_name},"
|
||||||
|
f"object_name: {prefixed_object_name}")
|
||||||
|
return response['Body'].read().decode("utf-8")
|
||||||
|
except ClientError:
|
||||||
|
sentry.captureException()
|
||||||
|
return None
|
||||||
|
|
|
@ -1,5 +1,82 @@
|
||||||
|
import boto3
|
||||||
|
from moto import mock_sts, mock_s3
|
||||||
|
|
||||||
|
|
||||||
def test_get_certificates(app):
|
def test_get_certificates(app):
|
||||||
from lemur.plugins.base import plugins
|
from lemur.plugins.base import plugins
|
||||||
|
|
||||||
p = plugins.get("aws-s3")
|
p = plugins.get("aws-s3")
|
||||||
assert p
|
assert p
|
||||||
|
|
||||||
|
|
||||||
|
@mock_sts()
|
||||||
|
@mock_s3()
|
||||||
|
def test_upload_acme_token(app):
|
||||||
|
from lemur.plugins.base import plugins
|
||||||
|
from lemur.plugins.lemur_aws.s3 import get
|
||||||
|
|
||||||
|
bucket = "public-bucket"
|
||||||
|
account = "123456789012"
|
||||||
|
prefix = "some-path/more-path/"
|
||||||
|
token_content = "Challenge"
|
||||||
|
token_name = "TOKEN"
|
||||||
|
token_path = ".well-known/acme-challenge/" + token_name
|
||||||
|
|
||||||
|
additional_options = [
|
||||||
|
{
|
||||||
|
"name": "bucket",
|
||||||
|
"value": bucket,
|
||||||
|
"type": "str",
|
||||||
|
"required": True,
|
||||||
|
"validation": r"[0-9a-z.-]{3,63}",
|
||||||
|
"helpMessage": "Must be a valid S3 bucket name!",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "accountNumber",
|
||||||
|
"type": "str",
|
||||||
|
"value": account,
|
||||||
|
"required": True,
|
||||||
|
"validation": r"[0-9]{12}",
|
||||||
|
"helpMessage": "A valid AWS account number with permission to access S3",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "region",
|
||||||
|
"type": "str",
|
||||||
|
"default": "us-east-1",
|
||||||
|
"required": False,
|
||||||
|
"helpMessage": "Region bucket exists",
|
||||||
|
"available": ["us-east-1", "us-west-2", "eu-west-1"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "encrypt",
|
||||||
|
"type": "bool",
|
||||||
|
"value": False,
|
||||||
|
"required": False,
|
||||||
|
"helpMessage": "Enable server side encryption",
|
||||||
|
"default": True,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "prefix",
|
||||||
|
"type": "str",
|
||||||
|
"value": prefix,
|
||||||
|
"required": False,
|
||||||
|
"helpMessage": "Must be a valid S3 object prefix!",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
s3_client = boto3.client('s3')
|
||||||
|
s3_client.create_bucket(Bucket=bucket)
|
||||||
|
p = plugins.get("aws-s3")
|
||||||
|
|
||||||
|
p.upload_acme_token(token_path=token_path,
|
||||||
|
token_content=token_content,
|
||||||
|
token=token_content,
|
||||||
|
options=additional_options)
|
||||||
|
|
||||||
|
response = get(bucket_name=bucket,
|
||||||
|
prefixed_object_name=prefix + token_name,
|
||||||
|
encrypt=False,
|
||||||
|
account_number=account)
|
||||||
|
|
||||||
|
# put data, and getting the same data
|
||||||
|
assert (response == token_content)
|
||||||
|
|
|
@ -0,0 +1,41 @@
|
||||||
|
import boto3
|
||||||
|
from moto import mock_sts, mock_s3
|
||||||
|
|
||||||
|
|
||||||
|
@mock_sts()
|
||||||
|
@mock_s3()
|
||||||
|
def test_put_delete_s3_object(app):
|
||||||
|
from lemur.plugins.lemur_aws.s3 import put, delete, get
|
||||||
|
|
||||||
|
bucket = "public-bucket"
|
||||||
|
region = "us-east-1"
|
||||||
|
account = "123456789012"
|
||||||
|
path = "some-path/foo"
|
||||||
|
data = "dummy data"
|
||||||
|
|
||||||
|
s3_client = boto3.client('s3')
|
||||||
|
s3_client.create_bucket(Bucket=bucket)
|
||||||
|
|
||||||
|
put(bucket_name=bucket,
|
||||||
|
region_name=region,
|
||||||
|
prefix=path,
|
||||||
|
data=data,
|
||||||
|
encrypt=False,
|
||||||
|
account_number=account,
|
||||||
|
region=region)
|
||||||
|
|
||||||
|
response = get(bucket_name=bucket, prefixed_object_name=path, account_number=account)
|
||||||
|
|
||||||
|
# put data, and getting the same data
|
||||||
|
assert (response == data)
|
||||||
|
|
||||||
|
response = get(bucket_name="wrong-bucket", prefixed_object_name=path, account_number=account)
|
||||||
|
|
||||||
|
# attempting to get thccle wrong data
|
||||||
|
assert (response is None)
|
||||||
|
|
||||||
|
delete(bucket_name=bucket, prefixed_object_name=path, account_number=account)
|
||||||
|
response = get(bucket_name=bucket, prefixed_object_name=path, account_number=account)
|
||||||
|
|
||||||
|
# delete data, and getting the same data
|
||||||
|
assert (response is None)
|
|
@ -91,7 +91,7 @@ class EmailNotificationPlugin(ExpirationNotificationPlugin):
|
||||||
"name": "recipients",
|
"name": "recipients",
|
||||||
"type": "str",
|
"type": "str",
|
||||||
"required": True,
|
"required": True,
|
||||||
"validation": "^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
|
"validation": r"^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
|
||||||
"helpMessage": "Comma delimited list of email addresses",
|
"helpMessage": "Comma delimited list of email addresses",
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
|
@ -47,7 +47,7 @@ class SFTPDestinationPlugin(DestinationPlugin):
|
||||||
"type": "int",
|
"type": "int",
|
||||||
"required": True,
|
"required": True,
|
||||||
"helpMessage": "The SFTP port, default is 22.",
|
"helpMessage": "The SFTP port, default is 22.",
|
||||||
"validation": "^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})",
|
"validation": r"^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})",
|
||||||
"default": "22",
|
"default": "22",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -89,7 +89,7 @@ class SlackNotificationPlugin(ExpirationNotificationPlugin):
|
||||||
"name": "webhook",
|
"name": "webhook",
|
||||||
"type": "str",
|
"type": "str",
|
||||||
"required": True,
|
"required": True,
|
||||||
"validation": "^https:\/\/hooks\.slack\.com\/services\/.+$",
|
"validation": r"^https:\/\/hooks\.slack\.com\/services\/.+$",
|
||||||
"helpMessage": "The url Slack told you to use for this integration",
|
"helpMessage": "The url Slack told you to use for this integration",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
@ -13,7 +13,7 @@ class TestDNSProvider(unittest.TestCase):
|
||||||
self.assertFalse(dnsutil.is_valid_domain('example-of-over-63-character-domain-label-length-limit-123456789.com'))
|
self.assertFalse(dnsutil.is_valid_domain('example-of-over-63-character-domain-label-length-limit-123456789.com'))
|
||||||
self.assertTrue(dnsutil.is_valid_domain('_acme-chall.example.com'))
|
self.assertTrue(dnsutil.is_valid_domain('_acme-chall.example.com'))
|
||||||
self.assertFalse(dnsutil.is_valid_domain('e/xample.com'))
|
self.assertFalse(dnsutil.is_valid_domain('e/xample.com'))
|
||||||
self.assertFalse(dnsutil.is_valid_domain('exam\ple.com'))
|
self.assertFalse(dnsutil.is_valid_domain('exam\\ple.com'))
|
||||||
self.assertFalse(dnsutil.is_valid_domain('<example.com'))
|
self.assertFalse(dnsutil.is_valid_domain('<example.com'))
|
||||||
self.assertFalse(dnsutil.is_valid_domain('*.example.com'))
|
self.assertFalse(dnsutil.is_valid_domain('*.example.com'))
|
||||||
self.assertFalse(dnsutil.is_valid_domain('-example.io'))
|
self.assertFalse(dnsutil.is_valid_domain('-example.io'))
|
||||||
|
|
|
@ -81,7 +81,7 @@ class Vault(types.TypeDecorator):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# required by SQLAlchemy. defines the underlying column type
|
# required by SQLAlchemy. defines the underlying column type
|
||||||
impl = types.Binary
|
impl = types.LargeBinary
|
||||||
|
|
||||||
def process_bind_param(self, value, dialect):
|
def process_bind_param(self, value, dialect):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# Run `make up-reqs` to update pinned dependencies in requirement text files
|
# Run `make up-reqs` to update pinned dependencies in requirement text files
|
||||||
|
|
||||||
flake8==3.5.0 # flake8 3.6.0 is giving erroneous "W605 invalid escape sequence" errors.
|
flake8==3.8.4 # flake8 latest version
|
||||||
pre-commit
|
pre-commit
|
||||||
invoke
|
invoke
|
||||||
twine
|
twine
|
||||||
|
|
|
@ -6,16 +6,16 @@
|
||||||
#
|
#
|
||||||
appdirs==1.4.3 # via virtualenv
|
appdirs==1.4.3 # via virtualenv
|
||||||
bleach==3.1.4 # via readme-renderer
|
bleach==3.1.4 # via readme-renderer
|
||||||
certifi==2020.6.20 # via requests
|
certifi==2020.11.8 # via requests
|
||||||
cffi==1.14.0 # via cryptography
|
cffi==1.14.0 # via cryptography
|
||||||
cfgv==3.1.0 # via pre-commit
|
cfgv==3.1.0 # via pre-commit
|
||||||
chardet==3.0.4 # via requests
|
chardet==3.0.4 # via requests
|
||||||
colorama==0.4.3 # via twine
|
colorama==0.4.3 # via twine
|
||||||
cryptography==3.2 # via secretstorage
|
cryptography==3.2.1 # via secretstorage
|
||||||
distlib==0.3.0 # via virtualenv
|
distlib==0.3.0 # via virtualenv
|
||||||
docutils==0.16 # via readme-renderer
|
docutils==0.16 # via readme-renderer
|
||||||
filelock==3.0.12 # via virtualenv
|
filelock==3.0.12 # via virtualenv
|
||||||
flake8==3.5.0 # via -r requirements-dev.in
|
flake8==3.8.4 # via -r requirements-dev.in
|
||||||
identify==1.4.14 # via pre-commit
|
identify==1.4.14 # via pre-commit
|
||||||
idna==2.9 # via requests
|
idna==2.9 # via requests
|
||||||
invoke==1.4.1 # via -r requirements-dev.in
|
invoke==1.4.1 # via -r requirements-dev.in
|
||||||
|
@ -24,10 +24,10 @@ keyring==21.2.0 # via twine
|
||||||
mccabe==0.6.1 # via flake8
|
mccabe==0.6.1 # via flake8
|
||||||
nodeenv==1.5.0 # via -r requirements-dev.in, pre-commit
|
nodeenv==1.5.0 # via -r requirements-dev.in, pre-commit
|
||||||
pkginfo==1.5.0.1 # via twine
|
pkginfo==1.5.0.1 # via twine
|
||||||
pre-commit==2.7.1 # via -r requirements-dev.in
|
pre-commit==2.8.2 # via -r requirements-dev.in
|
||||||
pycodestyle==2.3.1 # via flake8
|
pycodestyle==2.6.0 # via flake8
|
||||||
pycparser==2.20 # via cffi
|
pycparser==2.20 # via cffi
|
||||||
pyflakes==1.6.0 # via flake8
|
pyflakes==2.2.0 # via flake8
|
||||||
pygments==2.6.1 # via readme-renderer
|
pygments==2.6.1 # via readme-renderer
|
||||||
pyyaml==5.3.1 # via -r requirements-dev.in, pre-commit
|
pyyaml==5.3.1 # via -r requirements-dev.in, pre-commit
|
||||||
readme-renderer==25.0 # via twine
|
readme-renderer==25.0 # via twine
|
||||||
|
|
|
@ -17,16 +17,16 @@ bcrypt==3.1.7 # via -r requirements.txt, flask-bcrypt, paramiko
|
||||||
beautifulsoup4==4.9.1 # via -r requirements.txt, cloudflare
|
beautifulsoup4==4.9.1 # via -r requirements.txt, cloudflare
|
||||||
billiard==3.6.3.0 # via -r requirements.txt, celery
|
billiard==3.6.3.0 # via -r requirements.txt, celery
|
||||||
blinker==1.4 # via -r requirements.txt, flask-mail, flask-principal, raven
|
blinker==1.4 # via -r requirements.txt, flask-mail, flask-principal, raven
|
||||||
boto3==1.16.5 # via -r requirements.txt
|
boto3==1.16.14 # via -r requirements.txt
|
||||||
botocore==1.19.5 # via -r requirements.txt, boto3, s3transfer
|
botocore==1.19.14 # via -r requirements.txt, boto3, s3transfer
|
||||||
celery[redis]==4.4.2 # via -r requirements.txt
|
celery[redis]==4.4.2 # via -r requirements.txt
|
||||||
certifi==2020.6.20 # via -r requirements.txt, requests
|
certifi==2020.11.8 # via -r requirements.txt, requests
|
||||||
certsrv==2.1.1 # via -r requirements.txt
|
certsrv==2.1.1 # via -r requirements.txt
|
||||||
cffi==1.14.0 # via -r requirements.txt, bcrypt, cryptography, pynacl
|
cffi==1.14.0 # via -r requirements.txt, bcrypt, cryptography, pynacl
|
||||||
chardet==3.0.4 # via -r requirements.txt, requests
|
chardet==3.0.4 # via -r requirements.txt, requests
|
||||||
click==7.1.1 # via -r requirements.txt, flask
|
click==7.1.2 # via -r requirements.txt, flask
|
||||||
cloudflare==2.8.13 # via -r requirements.txt
|
cloudflare==2.8.13 # via -r requirements.txt
|
||||||
cryptography==3.2 # via -r requirements.txt, acme, josepy, paramiko, pyopenssl, requests
|
cryptography==3.2.1 # via -r requirements.txt, acme, josepy, paramiko, pyopenssl, requests
|
||||||
dnspython3==1.15.0 # via -r requirements.txt
|
dnspython3==1.15.0 # via -r requirements.txt
|
||||||
dnspython==1.15.0 # via -r requirements.txt, dnspython3
|
dnspython==1.15.0 # via -r requirements.txt, dnspython3
|
||||||
docutils==0.15.2 # via sphinx
|
docutils==0.15.2 # via sphinx
|
||||||
|
@ -92,7 +92,7 @@ six==1.15.0 # via -r requirements.txt, acme, bcrypt, cryptography,
|
||||||
snowballstemmer==2.0.0 # via sphinx
|
snowballstemmer==2.0.0 # via sphinx
|
||||||
soupsieve==2.0.1 # via -r requirements.txt, beautifulsoup4
|
soupsieve==2.0.1 # via -r requirements.txt, beautifulsoup4
|
||||||
sphinx-rtd-theme==0.5.0 # via -r requirements-docs.in
|
sphinx-rtd-theme==0.5.0 # via -r requirements-docs.in
|
||||||
sphinx==3.2.1 # via -r requirements-docs.in, sphinx-rtd-theme, sphinxcontrib-httpdomain
|
sphinx==3.3.0 # via -r requirements-docs.in, sphinx-rtd-theme, sphinxcontrib-httpdomain
|
||||||
sphinxcontrib-applehelp==1.0.2 # via sphinx
|
sphinxcontrib-applehelp==1.0.2 # via sphinx
|
||||||
sphinxcontrib-devhelp==1.0.2 # via sphinx
|
sphinxcontrib-devhelp==1.0.2 # via sphinx
|
||||||
sphinxcontrib-htmlhelp==1.0.3 # via sphinx
|
sphinxcontrib-htmlhelp==1.0.3 # via sphinx
|
||||||
|
|
|
@ -10,21 +10,21 @@ aws-sam-translator==1.22.0 # via cfn-lint
|
||||||
aws-xray-sdk==2.5.0 # via moto
|
aws-xray-sdk==2.5.0 # via moto
|
||||||
bandit==1.6.2 # via -r requirements-tests.in
|
bandit==1.6.2 # via -r requirements-tests.in
|
||||||
black==20.8b1 # via -r requirements-tests.in
|
black==20.8b1 # via -r requirements-tests.in
|
||||||
boto3==1.16.5 # via aws-sam-translator, moto
|
boto3==1.16.14 # via aws-sam-translator, moto
|
||||||
boto==2.49.0 # via moto
|
boto==2.49.0 # via moto
|
||||||
botocore==1.19.5 # via aws-xray-sdk, boto3, moto, s3transfer
|
botocore==1.19.14 # via aws-xray-sdk, boto3, moto, s3transfer
|
||||||
certifi==2020.6.20 # via requests
|
certifi==2020.11.8 # via requests
|
||||||
cffi==1.14.0 # via cryptography
|
cffi==1.14.0 # via cryptography
|
||||||
cfn-lint==0.29.5 # via moto
|
cfn-lint==0.29.5 # via moto
|
||||||
chardet==3.0.4 # via requests
|
chardet==3.0.4 # via requests
|
||||||
click==7.1.2 # via black, flask
|
click==7.1.2 # via black, flask
|
||||||
coverage==5.3 # via -r requirements-tests.in
|
coverage==5.3 # via -r requirements-tests.in
|
||||||
cryptography==3.2 # via moto, python-jose, sshpubkeys
|
cryptography==3.2.1 # via moto, python-jose, sshpubkeys
|
||||||
decorator==4.4.2 # via networkx
|
decorator==4.4.2 # via networkx
|
||||||
docker==4.2.0 # via moto
|
docker==4.2.0 # via moto
|
||||||
ecdsa==0.14.1 # via moto, python-jose, sshpubkeys
|
ecdsa==0.14.1 # via moto, python-jose, sshpubkeys
|
||||||
factory-boy==3.1.0 # via -r requirements-tests.in
|
factory-boy==3.1.0 # via -r requirements-tests.in
|
||||||
faker==4.14.0 # via -r requirements-tests.in, factory-boy
|
faker==4.14.2 # via -r requirements-tests.in, factory-boy
|
||||||
fakeredis==1.4.4 # via -r requirements-tests.in
|
fakeredis==1.4.4 # via -r requirements-tests.in
|
||||||
flask==1.1.2 # via pytest-flask
|
flask==1.1.2 # via pytest-flask
|
||||||
freezegun==1.0.0 # via -r requirements-tests.in
|
freezegun==1.0.0 # via -r requirements-tests.in
|
||||||
|
@ -59,9 +59,9 @@ pycparser==2.20 # via cffi
|
||||||
pyflakes==2.2.0 # via -r requirements-tests.in
|
pyflakes==2.2.0 # via -r requirements-tests.in
|
||||||
pyparsing==2.4.7 # via packaging
|
pyparsing==2.4.7 # via packaging
|
||||||
pyrsistent==0.16.0 # via jsonschema
|
pyrsistent==0.16.0 # via jsonschema
|
||||||
pytest-flask==1.0.0 # via -r requirements-tests.in
|
pytest-flask==1.1.0 # via -r requirements-tests.in
|
||||||
pytest-mock==3.3.1 # via -r requirements-tests.in
|
pytest-mock==3.3.1 # via -r requirements-tests.in
|
||||||
pytest==6.1.1 # via -r requirements-tests.in, pytest-flask, pytest-mock
|
pytest==6.1.2 # via -r requirements-tests.in, pytest-flask, pytest-mock
|
||||||
python-dateutil==2.8.1 # via botocore, faker, freezegun, moto
|
python-dateutil==2.8.1 # via botocore, faker, freezegun, moto
|
||||||
python-jose[cryptography]==3.1.0 # via moto
|
python-jose[cryptography]==3.1.0 # via moto
|
||||||
pytz==2019.3 # via moto
|
pytz==2019.3 # via moto
|
||||||
|
|
|
@ -15,16 +15,16 @@ bcrypt==3.1.7 # via flask-bcrypt, paramiko
|
||||||
beautifulsoup4==4.9.1 # via cloudflare
|
beautifulsoup4==4.9.1 # via cloudflare
|
||||||
billiard==3.6.3.0 # via celery
|
billiard==3.6.3.0 # via celery
|
||||||
blinker==1.4 # via flask-mail, flask-principal, raven
|
blinker==1.4 # via flask-mail, flask-principal, raven
|
||||||
boto3==1.16.5 # via -r requirements.in
|
boto3==1.16.14 # via -r requirements.in
|
||||||
botocore==1.19.5 # via -r requirements.in, boto3, s3transfer
|
botocore==1.19.14 # via -r requirements.in, boto3, s3transfer
|
||||||
celery[redis]==4.4.2 # via -r requirements.in
|
celery[redis]==4.4.2 # via -r requirements.in
|
||||||
certifi==2020.6.20 # via -r requirements.in, requests
|
certifi==2020.11.8 # via -r requirements.in, requests
|
||||||
certsrv==2.1.1 # via -r requirements.in
|
certsrv==2.1.1 # via -r requirements.in
|
||||||
cffi==1.14.0 # via bcrypt, cryptography, pynacl
|
cffi==1.14.0 # via bcrypt, cryptography, pynacl
|
||||||
chardet==3.0.4 # via requests
|
chardet==3.0.4 # via requests
|
||||||
click==7.1.1 # via flask
|
click==7.1.2 # via flask
|
||||||
cloudflare==2.8.13 # via -r requirements.in
|
cloudflare==2.8.13 # via -r requirements.in
|
||||||
cryptography==3.2 # via -r requirements.in, acme, josepy, paramiko, pyopenssl, requests
|
cryptography==3.2.1 # via -r requirements.in, acme, josepy, paramiko, pyopenssl, requests
|
||||||
dnspython3==1.15.0 # via -r requirements.in
|
dnspython3==1.15.0 # via -r requirements.in
|
||||||
dnspython==1.15.0 # via dnspython3
|
dnspython==1.15.0 # via dnspython3
|
||||||
dyn==1.8.1 # via -r requirements.in
|
dyn==1.8.1 # via -r requirements.in
|
||||||
|
|
Loading…
Reference in New Issue