Merge branch 'master' of github.com:Netflix/lemur into update-travis-sudo
# Conflicts: # .travis.yml
This commit is contained in:
commit
45da5847f7
|
@ -1,5 +1,5 @@
|
||||||
language: python
|
language: python
|
||||||
dist: trusty
|
dist: xenial
|
||||||
|
|
||||||
node_js:
|
node_js:
|
||||||
- "6.2.0"
|
- "6.2.0"
|
||||||
|
@ -9,8 +9,8 @@ addons:
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- python: "3.5"
|
- python: "3.7"
|
||||||
env: TOXENV=py35
|
env: TOXENV=py37
|
||||||
|
|
||||||
cache:
|
cache:
|
||||||
directories:
|
directories:
|
||||||
|
|
4
Makefile
4
Makefile
|
@ -125,5 +125,9 @@ endif
|
||||||
@echo "--> Done installing new dependencies"
|
@echo "--> Done installing new dependencies"
|
||||||
@echo ""
|
@echo ""
|
||||||
|
|
||||||
|
# Execute with make checkout-pr pr=<pr number>
|
||||||
|
checkout-pr:
|
||||||
|
git fetch upstream pull/$(pr)/head:pr-$(pr)
|
||||||
|
|
||||||
|
|
||||||
.PHONY: develop dev-postgres dev-docs setup-git build clean update-submodules test testloop test-cli test-js test-python lint lint-python lint-js coverage publish release
|
.PHONY: develop dev-postgres dev-docs setup-git build clean update-submodules test testloop test-cli test-js test-python lint lint-python lint-js coverage publish release
|
||||||
|
|
|
@ -13,10 +13,13 @@ services:
|
||||||
VIRTUAL_ENV: 'true'
|
VIRTUAL_ENV: 'true'
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:9.4
|
image: postgres
|
||||||
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: lemur
|
POSTGRES_USER: lemur
|
||||||
POSTGRES_PASSWORD: lemur
|
POSTGRES_PASSWORD: lemur
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
|
||||||
redis:
|
redis:
|
||||||
image: "redis:alpine"
|
image: "redis:alpine"
|
||||||
|
|
|
@ -112,10 +112,20 @@ class CertificateInputSchema(CertificateCreationSchema):
|
||||||
if data.get('replacements'):
|
if data.get('replacements'):
|
||||||
data['replaces'] = data['replacements'] # TODO remove when field is deprecated
|
data['replaces'] = data['replacements'] # TODO remove when field is deprecated
|
||||||
if data.get('csr'):
|
if data.get('csr'):
|
||||||
dns_names = cert_utils.get_dns_names_from_csr(data['csr'])
|
csr_sans = cert_utils.get_sans_from_csr(data['csr'])
|
||||||
if not data['extensions']['subAltNames']['names']:
|
if not data.get('extensions'):
|
||||||
|
data['extensions'] = {
|
||||||
|
'subAltNames': {
|
||||||
|
'names': []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
elif not data['extensions'].get('subAltNames'):
|
||||||
|
data['extensions']['subAltNames'] = {
|
||||||
|
'names': []
|
||||||
|
}
|
||||||
|
elif not data['extensions']['subAltNames'].get('names'):
|
||||||
data['extensions']['subAltNames']['names'] = []
|
data['extensions']['subAltNames']['names'] = []
|
||||||
data['extensions']['subAltNames']['names'] += dns_names
|
data['extensions']['subAltNames']['names'] += csr_sans
|
||||||
return missing.convert_validity_years(data)
|
return missing.convert_validity_years(data)
|
||||||
|
|
||||||
|
|
||||||
|
@ -255,6 +265,7 @@ class CertificateUploadInputSchema(CertificateCreationSchema):
|
||||||
private_key = fields.String()
|
private_key = fields.String()
|
||||||
body = fields.String(required=True)
|
body = fields.String(required=True)
|
||||||
chain = fields.String(missing=None, allow_none=True)
|
chain = fields.String(missing=None, allow_none=True)
|
||||||
|
csr = fields.String(required=False, allow_none=True, validate=validators.csr)
|
||||||
|
|
||||||
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
|
destinations = fields.Nested(AssociatedDestinationSchema, missing=[], many=True)
|
||||||
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
|
notifications = fields.Nested(AssociatedNotificationSchema, missing=[], many=True)
|
||||||
|
|
|
@ -14,14 +14,14 @@ from cryptography.hazmat.backends import default_backend
|
||||||
from marshmallow.exceptions import ValidationError
|
from marshmallow.exceptions import ValidationError
|
||||||
|
|
||||||
|
|
||||||
def get_dns_names_from_csr(data):
|
def get_sans_from_csr(data):
|
||||||
"""
|
"""
|
||||||
Fetches DNSNames from CSR.
|
Fetches SubjectAlternativeNames from CSR.
|
||||||
Potentially extendable to any kind of SubjectAlternativeName
|
Works with any kind of SubjectAlternativeName
|
||||||
:param data: PEM-encoded string with CSR
|
:param data: PEM-encoded string with CSR
|
||||||
:return:
|
:return: List of LemurAPI-compatible subAltNames
|
||||||
"""
|
"""
|
||||||
dns_names = []
|
sub_alt_names = []
|
||||||
try:
|
try:
|
||||||
request = x509.load_pem_x509_csr(data.encode('utf-8'), default_backend())
|
request = x509.load_pem_x509_csr(data.encode('utf-8'), default_backend())
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -29,14 +29,12 @@ def get_dns_names_from_csr(data):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
alt_names = request.extensions.get_extension_for_class(x509.SubjectAlternativeName)
|
alt_names = request.extensions.get_extension_for_class(x509.SubjectAlternativeName)
|
||||||
|
for alt_name in alt_names.value:
|
||||||
for name in alt_names.value.get_values_for_type(x509.DNSName):
|
sub_alt_names.append({
|
||||||
dns_name = {
|
'nameType': type(alt_name).__name__,
|
||||||
'nameType': 'DNSName',
|
'value': alt_name.value
|
||||||
'value': name
|
})
|
||||||
}
|
|
||||||
dns_names.append(dns_name)
|
|
||||||
except x509.ExtensionNotFound:
|
except x509.ExtensionNotFound:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return dns_names
|
return sub_alt_names
|
||||||
|
|
|
@ -306,6 +306,7 @@ class CertificatesUpload(AuthenticatedResource):
|
||||||
"body": "-----BEGIN CERTIFICATE-----...",
|
"body": "-----BEGIN CERTIFICATE-----...",
|
||||||
"chain": "-----BEGIN CERTIFICATE-----...",
|
"chain": "-----BEGIN CERTIFICATE-----...",
|
||||||
"privateKey": "-----BEGIN RSA PRIVATE KEY-----..."
|
"privateKey": "-----BEGIN RSA PRIVATE KEY-----..."
|
||||||
|
"csr": "-----BEGIN CERTIFICATE REQUEST-----..."
|
||||||
"destinations": [],
|
"destinations": [],
|
||||||
"notifications": [],
|
"notifications": [],
|
||||||
"replacements": [],
|
"replacements": [],
|
||||||
|
|
|
@ -20,6 +20,8 @@ from lemur.notifications.messaging import send_pending_failure_notification
|
||||||
from lemur.pending_certificates import service as pending_certificate_service
|
from lemur.pending_certificates import service as pending_certificate_service
|
||||||
from lemur.plugins.base import plugins
|
from lemur.plugins.base import plugins
|
||||||
from lemur.sources.cli import clean, sync, validate_sources
|
from lemur.sources.cli import clean, sync, validate_sources
|
||||||
|
from lemur.destinations import service as destinations_service
|
||||||
|
from lemur.sources.service import add_aws_destination_to_sources
|
||||||
|
|
||||||
if current_app:
|
if current_app:
|
||||||
flask_app = current_app
|
flask_app = current_app
|
||||||
|
@ -255,3 +257,21 @@ def sync_source(source):
|
||||||
sync([source])
|
sync([source])
|
||||||
log_data["message"] = "Done syncing source"
|
log_data["message"] = "Done syncing source"
|
||||||
current_app.logger.debug(log_data)
|
current_app.logger.debug(log_data)
|
||||||
|
|
||||||
|
|
||||||
|
@celery.task()
|
||||||
|
def sync_source_destination():
|
||||||
|
"""
|
||||||
|
This celery task will sync destination and source, to make sure all new destinations are also present as source.
|
||||||
|
Some destinations do not qualify as sources, and hence should be excluded from being added as sources
|
||||||
|
We identify qualified destinations based on the sync_as_source attributed of the plugin.
|
||||||
|
The destination sync_as_source_name reveals the name of the suitable source-plugin.
|
||||||
|
We rely on account numbers to avoid duplicates.
|
||||||
|
"""
|
||||||
|
current_app.logger.debug("Syncing AWS destinations and sources")
|
||||||
|
|
||||||
|
for dst in destinations_service.get_all():
|
||||||
|
if add_aws_destination_to_sources(dst):
|
||||||
|
current_app.logger.debug("Source: %s added", dst.label)
|
||||||
|
|
||||||
|
current_app.logger.debug("Completed Syncing AWS destinations and sources")
|
||||||
|
|
|
@ -85,7 +85,9 @@ def parse_cert_chain(pem_chain):
|
||||||
:param pem_chain: string
|
:param pem_chain: string
|
||||||
:return: List of parsed certificates
|
:return: List of parsed certificates
|
||||||
"""
|
"""
|
||||||
return [parse_certificate(cert) for cert in split_pem(pem_chain) if pem_chain]
|
if pem_chain is None:
|
||||||
|
return []
|
||||||
|
return [parse_certificate(cert) for cert in split_pem(pem_chain) if cert]
|
||||||
|
|
||||||
|
|
||||||
def parse_csr(csr):
|
def parse_csr(csr):
|
||||||
|
|
|
@ -6,11 +6,13 @@
|
||||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||||
"""
|
"""
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
|
from flask import current_app
|
||||||
|
|
||||||
from lemur import database
|
from lemur import database
|
||||||
from lemur.models import certificate_destination_associations
|
from lemur.models import certificate_destination_associations
|
||||||
from lemur.destinations.models import Destination
|
from lemur.destinations.models import Destination
|
||||||
from lemur.certificates.models import Certificate
|
from lemur.certificates.models import Certificate
|
||||||
|
from lemur.sources.service import add_aws_destination_to_sources
|
||||||
|
|
||||||
|
|
||||||
def create(label, plugin_name, options, description=None):
|
def create(label, plugin_name, options, description=None):
|
||||||
|
@ -28,6 +30,12 @@ def create(label, plugin_name, options, description=None):
|
||||||
del option['value']['plugin_object']
|
del option['value']['plugin_object']
|
||||||
|
|
||||||
destination = Destination(label=label, options=options, plugin_name=plugin_name, description=description)
|
destination = Destination(label=label, options=options, plugin_name=plugin_name, description=description)
|
||||||
|
current_app.logger.info("Destination: %s created", label)
|
||||||
|
|
||||||
|
# add the destination as source, to avoid new destinations that are not in source, as long as an AWS destination
|
||||||
|
if add_aws_destination_to_sources(destination):
|
||||||
|
current_app.logger.info("Source: %s created", label)
|
||||||
|
|
||||||
return database.create(destination)
|
return database.create(destination)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,8 @@ from lemur.policies.models import RotationPolicy # noqa
|
||||||
from lemur.pending_certificates.models import PendingCertificate # noqa
|
from lemur.pending_certificates.models import PendingCertificate # noqa
|
||||||
from lemur.dns_providers.models import DnsProvider # noqa
|
from lemur.dns_providers.models import DnsProvider # noqa
|
||||||
|
|
||||||
|
from sqlalchemy.sql import text
|
||||||
|
|
||||||
manager = Manager(create_app)
|
manager = Manager(create_app)
|
||||||
manager.add_option('-c', '--config', dest='config_path', required=False)
|
manager.add_option('-c', '--config', dest='config_path', required=False)
|
||||||
|
|
||||||
|
@ -142,6 +144,7 @@ SQLALCHEMY_DATABASE_URI = 'postgresql://lemur:lemur@localhost:5432/lemur'
|
||||||
|
|
||||||
@MigrateCommand.command
|
@MigrateCommand.command
|
||||||
def create():
|
def create():
|
||||||
|
database.db.engine.execute(text('CREATE EXTENSION IF NOT EXISTS pg_trgm'))
|
||||||
database.db.create_all()
|
database.db.create_all()
|
||||||
stamp(revision='head')
|
stamp(revision='head')
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,7 @@
|
||||||
from marshmallow import fields, post_load
|
from marshmallow import fields, validates_schema, post_load
|
||||||
|
from marshmallow.exceptions import ValidationError
|
||||||
|
|
||||||
|
from lemur.common import utils, validators
|
||||||
from lemur.authorities.schemas import AuthorityNestedOutputSchema
|
from lemur.authorities.schemas import AuthorityNestedOutputSchema
|
||||||
from lemur.certificates.schemas import CertificateNestedOutputSchema
|
from lemur.certificates.schemas import CertificateNestedOutputSchema
|
||||||
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
|
from lemur.common.schema import LemurInputSchema, LemurOutputSchema
|
||||||
|
@ -98,6 +100,31 @@ class PendingCertificateCancelSchema(LemurInputSchema):
|
||||||
note = fields.String()
|
note = fields.String()
|
||||||
|
|
||||||
|
|
||||||
|
class PendingCertificateUploadInputSchema(LemurInputSchema):
|
||||||
|
external_id = fields.String(missing=None, allow_none=True)
|
||||||
|
body = fields.String(required=True)
|
||||||
|
chain = fields.String(missing=None, allow_none=True)
|
||||||
|
|
||||||
|
@validates_schema
|
||||||
|
def validate_cert_chain(self, data):
|
||||||
|
cert = None
|
||||||
|
if data.get('body'):
|
||||||
|
try:
|
||||||
|
cert = utils.parse_certificate(data['body'])
|
||||||
|
except ValueError:
|
||||||
|
raise ValidationError("Public certificate presented is not valid.", field_names=['body'])
|
||||||
|
|
||||||
|
if data.get('chain'):
|
||||||
|
try:
|
||||||
|
chain = utils.parse_cert_chain(data['chain'])
|
||||||
|
except ValueError:
|
||||||
|
raise ValidationError("Invalid certificate in certificate chain.", field_names=['chain'])
|
||||||
|
|
||||||
|
# Throws ValidationError
|
||||||
|
validators.verify_cert_chain([cert] + chain)
|
||||||
|
|
||||||
|
|
||||||
pending_certificate_output_schema = PendingCertificateOutputSchema()
|
pending_certificate_output_schema = PendingCertificateOutputSchema()
|
||||||
pending_certificate_edit_input_schema = PendingCertificateEditInputSchema()
|
pending_certificate_edit_input_schema = PendingCertificateEditInputSchema()
|
||||||
pending_certificate_cancel_schema = PendingCertificateCancelSchema()
|
pending_certificate_cancel_schema = PendingCertificateCancelSchema()
|
||||||
|
pending_certificate_upload_input_schema = PendingCertificateUploadInputSchema()
|
||||||
|
|
|
@ -8,9 +8,11 @@ from sqlalchemy import or_, cast, Integer
|
||||||
|
|
||||||
from lemur import database
|
from lemur import database
|
||||||
from lemur.authorities.models import Authority
|
from lemur.authorities.models import Authority
|
||||||
|
from lemur.authorities import service as authorities_service
|
||||||
from lemur.certificates import service as certificate_service
|
from lemur.certificates import service as certificate_service
|
||||||
from lemur.certificates.schemas import CertificateUploadInputSchema
|
from lemur.certificates.schemas import CertificateUploadInputSchema
|
||||||
from lemur.common.utils import truthiness
|
from lemur.common.utils import truthiness, parse_cert_chain, parse_certificate
|
||||||
|
from lemur.common import validators
|
||||||
from lemur.destinations.models import Destination
|
from lemur.destinations.models import Destination
|
||||||
from lemur.domains.models import Domain
|
from lemur.domains.models import Domain
|
||||||
from lemur.notifications.models import Notification
|
from lemur.notifications.models import Notification
|
||||||
|
@ -230,3 +232,40 @@ def render(args):
|
||||||
# Only show unresolved certificates in the UI
|
# Only show unresolved certificates in the UI
|
||||||
query = query.filter(PendingCertificate.resolved.is_(False))
|
query = query.filter(PendingCertificate.resolved.is_(False))
|
||||||
return database.sort_and_page(query, PendingCertificate, args)
|
return database.sort_and_page(query, PendingCertificate, args)
|
||||||
|
|
||||||
|
|
||||||
|
def upload(pending_certificate_id, **kwargs):
|
||||||
|
"""
|
||||||
|
Uploads a (signed) pending certificate. The allowed fields are validated by
|
||||||
|
PendingCertificateUploadInputSchema. The certificate is also validated to be
|
||||||
|
signed by the correct authoritity.
|
||||||
|
"""
|
||||||
|
pending_cert = get(pending_certificate_id)
|
||||||
|
partial_cert = kwargs
|
||||||
|
uploaded_chain = partial_cert['chain']
|
||||||
|
|
||||||
|
authority = authorities_service.get(pending_cert.authority.id)
|
||||||
|
|
||||||
|
# Construct the chain for cert validation
|
||||||
|
if uploaded_chain:
|
||||||
|
chain = uploaded_chain + '\n' + authority.authority_certificate.body
|
||||||
|
else:
|
||||||
|
chain = authority.authority_certificate.body
|
||||||
|
|
||||||
|
parsed_chain = parse_cert_chain(chain)
|
||||||
|
|
||||||
|
# Check that the certificate is actually signed by the CA to avoid incorrect cert pasting
|
||||||
|
validators.verify_cert_chain([parse_certificate(partial_cert['body'])] + parsed_chain)
|
||||||
|
|
||||||
|
final_cert = create_certificate(pending_cert, partial_cert, pending_cert.user)
|
||||||
|
|
||||||
|
update(
|
||||||
|
pending_cert.id,
|
||||||
|
resolved=True
|
||||||
|
)
|
||||||
|
pending_cert_final_result = update(
|
||||||
|
pending_cert.id,
|
||||||
|
resolved_cert_id=final_cert.id
|
||||||
|
)
|
||||||
|
|
||||||
|
return pending_cert_final_result
|
||||||
|
|
|
@ -20,6 +20,7 @@ from lemur.pending_certificates.schemas import (
|
||||||
pending_certificate_output_schema,
|
pending_certificate_output_schema,
|
||||||
pending_certificate_edit_input_schema,
|
pending_certificate_edit_input_schema,
|
||||||
pending_certificate_cancel_schema,
|
pending_certificate_cancel_schema,
|
||||||
|
pending_certificate_upload_input_schema,
|
||||||
)
|
)
|
||||||
|
|
||||||
mod = Blueprint('pending_certificates', __name__)
|
mod = Blueprint('pending_certificates', __name__)
|
||||||
|
@ -419,6 +420,101 @@ class PendingCertificatePrivateKey(AuthenticatedResource):
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
class PendingCertificatesUpload(AuthenticatedResource):
|
||||||
|
""" Defines the 'pending_certificates' upload endpoint """
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.reqparse = reqparse.RequestParser()
|
||||||
|
super(PendingCertificatesUpload, self).__init__()
|
||||||
|
|
||||||
|
@validate_schema(pending_certificate_upload_input_schema, pending_certificate_output_schema)
|
||||||
|
def post(self, pending_certificate_id, data=None):
|
||||||
|
"""
|
||||||
|
.. http:post:: /pending_certificates/1/upload
|
||||||
|
|
||||||
|
Upload the body for a (signed) pending_certificate
|
||||||
|
|
||||||
|
**Example request**:
|
||||||
|
|
||||||
|
.. sourcecode:: http
|
||||||
|
|
||||||
|
POST /certificates/1/upload HTTP/1.1
|
||||||
|
Host: example.com
|
||||||
|
Accept: application/json, text/javascript
|
||||||
|
|
||||||
|
{
|
||||||
|
"body": "-----BEGIN CERTIFICATE-----...",
|
||||||
|
"chain": "-----BEGIN CERTIFICATE-----...",
|
||||||
|
}
|
||||||
|
|
||||||
|
**Example response**:
|
||||||
|
|
||||||
|
.. sourcecode:: http
|
||||||
|
|
||||||
|
HTTP/1.1 200 OK
|
||||||
|
Vary: Accept
|
||||||
|
Content-Type: text/javascript
|
||||||
|
|
||||||
|
{
|
||||||
|
"status": null,
|
||||||
|
"cn": "*.test.example.net",
|
||||||
|
"chain": "",
|
||||||
|
"authority": {
|
||||||
|
"active": true,
|
||||||
|
"owner": "secure@example.com",
|
||||||
|
"id": 1,
|
||||||
|
"description": "verisign test authority",
|
||||||
|
"name": "verisign"
|
||||||
|
},
|
||||||
|
"owner": "joe@example.com",
|
||||||
|
"serial": "82311058732025924142789179368889309156",
|
||||||
|
"id": 2288,
|
||||||
|
"issuer": "SymantecCorporation",
|
||||||
|
"dateCreated": "2016-06-03T06:09:42.133769+00:00",
|
||||||
|
"notBefore": "2016-06-03T00:00:00+00:00",
|
||||||
|
"notAfter": "2018-01-12T23:59:59+00:00",
|
||||||
|
"destinations": [],
|
||||||
|
"bits": 2048,
|
||||||
|
"body": "-----BEGIN CERTIFICATE-----...",
|
||||||
|
"description": null,
|
||||||
|
"deleted": null,
|
||||||
|
"notifications": [{
|
||||||
|
"id": 1
|
||||||
|
}],
|
||||||
|
"signingAlgorithm": "sha256",
|
||||||
|
"user": {
|
||||||
|
"username": "jane",
|
||||||
|
"active": true,
|
||||||
|
"email": "jane@example.com",
|
||||||
|
"id": 2
|
||||||
|
},
|
||||||
|
"active": true,
|
||||||
|
"domains": [{
|
||||||
|
"sensitive": false,
|
||||||
|
"id": 1090,
|
||||||
|
"name": "*.test.example.net"
|
||||||
|
}],
|
||||||
|
"replaces": [],
|
||||||
|
"rotation": true,
|
||||||
|
"rotationPolicy": {"name": "default"},
|
||||||
|
"name": "WILDCARD.test.example.net-SymantecCorporation-20160603-20180112",
|
||||||
|
"roles": [{
|
||||||
|
"id": 464,
|
||||||
|
"description": "This is a google group based role created by Lemur",
|
||||||
|
"name": "joe@example.com"
|
||||||
|
}],
|
||||||
|
"san": null
|
||||||
|
}
|
||||||
|
|
||||||
|
:reqheader Authorization: OAuth token to authenticate
|
||||||
|
:statuscode 403: unauthenticated
|
||||||
|
:statuscode 200: no error
|
||||||
|
|
||||||
|
"""
|
||||||
|
return service.upload(pending_certificate_id, **data)
|
||||||
|
|
||||||
|
|
||||||
api.add_resource(PendingCertificatesList, '/pending_certificates', endpoint='pending_certificates')
|
api.add_resource(PendingCertificatesList, '/pending_certificates', endpoint='pending_certificates')
|
||||||
api.add_resource(PendingCertificates, '/pending_certificates/<int:pending_certificate_id>', endpoint='pending_certificate')
|
api.add_resource(PendingCertificates, '/pending_certificates/<int:pending_certificate_id>', endpoint='pending_certificate')
|
||||||
|
api.add_resource(PendingCertificatesUpload, '/pending_certificates/<int:pending_certificate_id>/upload', endpoint='pendingCertificateUpload')
|
||||||
api.add_resource(PendingCertificatePrivateKey, '/pending_certificates/<int:pending_certificate_id>/key', endpoint='privateKeyPendingCertificates')
|
api.add_resource(PendingCertificatePrivateKey, '/pending_certificates/<int:pending_certificate_id>/key', endpoint='privateKeyPendingCertificates')
|
||||||
|
|
|
@ -12,6 +12,8 @@ from lemur.plugins.base import Plugin, plugins
|
||||||
class DestinationPlugin(Plugin):
|
class DestinationPlugin(Plugin):
|
||||||
type = 'destination'
|
type = 'destination'
|
||||||
requires_key = True
|
requires_key = True
|
||||||
|
sync_as_source = False
|
||||||
|
sync_as_source_name = ''
|
||||||
|
|
||||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
|
@ -10,13 +10,21 @@ from dyn.tm.session import DynectSession
|
||||||
from dyn.tm.zones import Node, Zone, get_all_zones
|
from dyn.tm.zones import Node, Zone, get_all_zones
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
|
from lemur.extensions import metrics, sentry
|
||||||
|
|
||||||
|
|
||||||
def get_dynect_session():
|
def get_dynect_session():
|
||||||
dynect_session = DynectSession(
|
try:
|
||||||
current_app.config.get('ACME_DYN_CUSTOMER_NAME', ''),
|
dynect_session = DynectSession(
|
||||||
current_app.config.get('ACME_DYN_USERNAME', ''),
|
current_app.config.get('ACME_DYN_CUSTOMER_NAME', ''),
|
||||||
current_app.config.get('ACME_DYN_PASSWORD', ''),
|
current_app.config.get('ACME_DYN_USERNAME', ''),
|
||||||
)
|
current_app.config.get('ACME_DYN_PASSWORD', ''),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
sentry.captureException()
|
||||||
|
metrics.send('get_dynect_session_fail', 'counter', 1)
|
||||||
|
current_app.logger.debug("Unable to establish connection to Dyn", exc_info=True)
|
||||||
|
raise
|
||||||
return dynect_session
|
return dynect_session
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,10 +38,12 @@ def _has_dns_propagated(name, token):
|
||||||
for txt_record in rdata.strings:
|
for txt_record in rdata.strings:
|
||||||
txt_records.append(txt_record.decode("utf-8"))
|
txt_records.append(txt_record.decode("utf-8"))
|
||||||
except dns.exception.DNSException:
|
except dns.exception.DNSException:
|
||||||
|
metrics.send('has_dns_propagated_fail', 'counter', 1)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for txt_record in txt_records:
|
for txt_record in txt_records:
|
||||||
if txt_record == token:
|
if txt_record == token:
|
||||||
|
metrics.send('has_dns_propagated_success', 'counter', 1)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
@ -46,10 +56,12 @@ def wait_for_dns_change(change_id, account_number=None):
|
||||||
status = _has_dns_propagated(fqdn, token)
|
status = _has_dns_propagated(fqdn, token)
|
||||||
current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status))
|
current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status))
|
||||||
if status:
|
if status:
|
||||||
|
metrics.send('wait_for_dns_change_success', 'counter', 1)
|
||||||
break
|
break
|
||||||
time.sleep(20)
|
time.sleep(20)
|
||||||
if not status:
|
if not status:
|
||||||
# TODO: Delete associated DNS text record here
|
# TODO: Delete associated DNS text record here
|
||||||
|
metrics.send('wait_for_dns_change_fail', 'counter', 1)
|
||||||
raise Exception("Unable to query DNS token for fqdn {}.".format(fqdn))
|
raise Exception("Unable to query DNS token for fqdn {}.".format(fqdn))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -67,6 +79,7 @@ def get_zone_name(domain):
|
||||||
if z.name.count(".") > zone_name.count("."):
|
if z.name.count(".") > zone_name.count("."):
|
||||||
zone_name = z.name
|
zone_name = z.name
|
||||||
if not zone_name:
|
if not zone_name:
|
||||||
|
metrics.send('dyn_no_zone_name', 'counter', 1)
|
||||||
raise Exception("No Dyn zone found for domain: {}".format(domain))
|
raise Exception("No Dyn zone found for domain: {}".format(domain))
|
||||||
return zone_name
|
return zone_name
|
||||||
|
|
||||||
|
@ -99,6 +112,8 @@ def create_txt_record(domain, token, account_number):
|
||||||
"Record already exists: {}".format(domain, token, e), exc_info=True
|
"Record already exists: {}".format(domain, token, e), exc_info=True
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
|
metrics.send('create_txt_record_error', 'counter', 1)
|
||||||
|
sentry.captureException()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
change_id = (fqdn, token)
|
change_id = (fqdn, token)
|
||||||
|
@ -122,6 +137,8 @@ def delete_txt_record(change_id, account_number, domain, token):
|
||||||
try:
|
try:
|
||||||
all_txt_records = node.get_all_records_by_type('TXT')
|
all_txt_records = node.get_all_records_by_type('TXT')
|
||||||
except DynectGetError:
|
except DynectGetError:
|
||||||
|
sentry.captureException()
|
||||||
|
metrics.send('delete_txt_record_error', 'counter', 1)
|
||||||
# No Text Records remain or host is not in the zone anymore because all records have been deleted.
|
# No Text Records remain or host is not in the zone anymore because all records have been deleted.
|
||||||
return
|
return
|
||||||
for txt_record in all_txt_records:
|
for txt_record in all_txt_records:
|
||||||
|
@ -178,6 +195,7 @@ def get_authoritative_nameserver(domain):
|
||||||
|
|
||||||
rcode = response.rcode()
|
rcode = response.rcode()
|
||||||
if rcode != dns.rcode.NOERROR:
|
if rcode != dns.rcode.NOERROR:
|
||||||
|
metrics.send('get_authoritative_nameserver_error', 'counter', 1)
|
||||||
if rcode == dns.rcode.NXDOMAIN:
|
if rcode == dns.rcode.NXDOMAIN:
|
||||||
raise Exception('%s does not exist.' % sub)
|
raise Exception('%s does not exist.' % sub)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -28,6 +28,7 @@ from lemur.authorizations import service as authorization_service
|
||||||
from lemur.common.utils import generate_private_key
|
from lemur.common.utils import generate_private_key
|
||||||
from lemur.dns_providers import service as dns_provider_service
|
from lemur.dns_providers import service as dns_provider_service
|
||||||
from lemur.exceptions import InvalidAuthority, InvalidConfiguration, UnknownProvider
|
from lemur.exceptions import InvalidAuthority, InvalidConfiguration, UnknownProvider
|
||||||
|
from lemur.extensions import metrics, sentry
|
||||||
from lemur.plugins import lemur_acme as acme
|
from lemur.plugins import lemur_acme as acme
|
||||||
from lemur.plugins.bases import IssuerPlugin
|
from lemur.plugins.bases import IssuerPlugin
|
||||||
from lemur.plugins.lemur_acme import cloudflare, dyn, route53
|
from lemur.plugins.lemur_acme import cloudflare, dyn, route53
|
||||||
|
@ -47,7 +48,9 @@ class AcmeHandler(object):
|
||||||
try:
|
try:
|
||||||
self.all_dns_providers = dns_provider_service.get_all_dns_providers()
|
self.all_dns_providers = dns_provider_service.get_all_dns_providers()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
current_app.logger.error("Unable to fetch DNS Providers: {}".format(e))
|
metrics.send('AcmeHandler_init_error', 'counter', 1)
|
||||||
|
sentry.captureException()
|
||||||
|
current_app.logger.error(f"Unable to fetch DNS Providers: {e}")
|
||||||
self.all_dns_providers = []
|
self.all_dns_providers = []
|
||||||
|
|
||||||
def find_dns_challenge(self, authorizations):
|
def find_dns_challenge(self, authorizations):
|
||||||
|
@ -94,6 +97,7 @@ class AcmeHandler(object):
|
||||||
current_app.logger.debug("Finalizing DNS challenge for {0}".format(authz_record.authz[0].body.identifier.value))
|
current_app.logger.debug("Finalizing DNS challenge for {0}".format(authz_record.authz[0].body.identifier.value))
|
||||||
dns_providers = self.dns_providers_for_domain.get(authz_record.host)
|
dns_providers = self.dns_providers_for_domain.get(authz_record.host)
|
||||||
if not dns_providers:
|
if not dns_providers:
|
||||||
|
metrics.send('complete_dns_challenge_error_no_dnsproviders', 'counter', 1)
|
||||||
raise Exception("No DNS providers found for domain: {}".format(authz_record.host))
|
raise Exception("No DNS providers found for domain: {}".format(authz_record.host))
|
||||||
|
|
||||||
for dns_provider in dns_providers:
|
for dns_provider in dns_providers:
|
||||||
|
@ -102,7 +106,15 @@ class AcmeHandler(object):
|
||||||
account_number = dns_provider_options.get("account_id")
|
account_number = dns_provider_options.get("account_id")
|
||||||
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
||||||
for change_id in authz_record.change_id:
|
for change_id in authz_record.change_id:
|
||||||
dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number)
|
try:
|
||||||
|
dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number)
|
||||||
|
except Exception:
|
||||||
|
metrics.send('complete_dns_challenge_error', 'counter', 1)
|
||||||
|
sentry.captureException()
|
||||||
|
current_app.logger.debug(
|
||||||
|
f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: "
|
||||||
|
f"{account_number}", exc_info=True)
|
||||||
|
raise
|
||||||
|
|
||||||
for dns_challenge in authz_record.dns_challenge:
|
for dns_challenge in authz_record.dns_challenge:
|
||||||
response = dns_challenge.response(acme_client.client.net.key)
|
response = dns_challenge.response(acme_client.client.net.key)
|
||||||
|
@ -114,6 +126,7 @@ class AcmeHandler(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
if not verified:
|
if not verified:
|
||||||
|
metrics.send('complete_dns_challenge_verification_error', 'counter', 1)
|
||||||
raise ValueError("Failed verification")
|
raise ValueError("Failed verification")
|
||||||
|
|
||||||
time.sleep(5)
|
time.sleep(5)
|
||||||
|
@ -129,7 +142,9 @@ class AcmeHandler(object):
|
||||||
try:
|
try:
|
||||||
orderr = acme_client.finalize_order(order, deadline)
|
orderr = acme_client.finalize_order(order, deadline)
|
||||||
except AcmeError:
|
except AcmeError:
|
||||||
current_app.logger.error("Unable to resolve Acme order: {}".format(order), exc_info=True)
|
sentry.captureException()
|
||||||
|
metrics.send('request_certificate_error', 'counter', 1)
|
||||||
|
current_app.logger.error(f"Unable to resolve Acme order: {order}", exc_info=True)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
pem_certificate = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,
|
pem_certificate = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,
|
||||||
|
@ -196,6 +211,7 @@ class AcmeHandler(object):
|
||||||
|
|
||||||
for domain in order_info.domains:
|
for domain in order_info.domains:
|
||||||
if not self.dns_providers_for_domain.get(domain):
|
if not self.dns_providers_for_domain.get(domain):
|
||||||
|
metrics.send('get_authorizations_no_dns_provider_for_domain', 'counter', 1)
|
||||||
raise Exception("No DNS providers found for domain: {}".format(domain))
|
raise Exception("No DNS providers found for domain: {}".format(domain))
|
||||||
for dns_provider in self.dns_providers_for_domain[domain]:
|
for dns_provider in self.dns_providers_for_domain[domain]:
|
||||||
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
||||||
|
@ -284,6 +300,8 @@ class AcmeHandler(object):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# If this fails, it's most likely because the record doesn't exist (It was already cleaned up)
|
# If this fails, it's most likely because the record doesn't exist (It was already cleaned up)
|
||||||
# or we're not authorized to modify it.
|
# or we're not authorized to modify it.
|
||||||
|
metrics.send('cleanup_dns_challenges_error', 'counter', 1)
|
||||||
|
sentry.captureException()
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def get_dns_provider(self, type):
|
def get_dns_provider(self, type):
|
||||||
|
@ -378,12 +396,15 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||||
try:
|
try:
|
||||||
order = acme_client.new_order(pending_cert.csr)
|
order = acme_client.new_order(pending_cert.csr)
|
||||||
except WildcardUnsupportedError:
|
except WildcardUnsupportedError:
|
||||||
|
metrics.send('get_ordered_certificate_wildcard_unsupported', 'counter', 1)
|
||||||
raise Exception("The currently selected ACME CA endpoint does"
|
raise Exception("The currently selected ACME CA endpoint does"
|
||||||
" not support issuing wildcard certificates.")
|
" not support issuing wildcard certificates.")
|
||||||
try:
|
try:
|
||||||
authorizations = self.acme.get_authorizations(acme_client, order, order_info)
|
authorizations = self.acme.get_authorizations(acme_client, order, order_info)
|
||||||
except ClientError:
|
except ClientError:
|
||||||
current_app.logger.error("Unable to resolve pending cert: {}".format(pending_cert.name), exc_info=True)
|
sentry.captureException()
|
||||||
|
metrics.send('get_ordered_certificate_error', 'counter', 1)
|
||||||
|
current_app.logger.error(f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
authorizations = self.acme.finalize_authorizations(acme_client, authorizations)
|
authorizations = self.acme.finalize_authorizations(acme_client, authorizations)
|
||||||
|
@ -418,6 +439,8 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||||
try:
|
try:
|
||||||
order = acme_client.new_order(pending_cert.csr)
|
order = acme_client.new_order(pending_cert.csr)
|
||||||
except WildcardUnsupportedError:
|
except WildcardUnsupportedError:
|
||||||
|
sentry.captureException()
|
||||||
|
metrics.send('get_ordered_certificates_wildcard_unsupported_error', 'counter', 1)
|
||||||
raise Exception("The currently selected ACME CA endpoint does"
|
raise Exception("The currently selected ACME CA endpoint does"
|
||||||
" not support issuing wildcard certificates.")
|
" not support issuing wildcard certificates.")
|
||||||
|
|
||||||
|
@ -430,7 +453,13 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||||
"order": order,
|
"order": order,
|
||||||
})
|
})
|
||||||
except (ClientError, ValueError, Exception) as e:
|
except (ClientError, ValueError, Exception) as e:
|
||||||
current_app.logger.error("Unable to resolve pending cert: {}".format(pending_cert), exc_info=True)
|
sentry.captureException()
|
||||||
|
metrics.send('get_ordered_certificates_pending_creation_error', 'counter', 1)
|
||||||
|
current_app.logger.error(f"Unable to resolve pending cert: {pending_cert}", exc_info=True)
|
||||||
|
|
||||||
|
error = e
|
||||||
|
if globals().get("order") and order:
|
||||||
|
error += f" Order uri: {order.uri}"
|
||||||
certs.append({
|
certs.append({
|
||||||
"cert": False,
|
"cert": False,
|
||||||
"pending_cert": pending_cert,
|
"pending_cert": pending_cert,
|
||||||
|
@ -459,14 +488,17 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||||
"pending_cert": entry["pending_cert"],
|
"pending_cert": entry["pending_cert"],
|
||||||
})
|
})
|
||||||
except (PollError, AcmeError, Exception) as e:
|
except (PollError, AcmeError, Exception) as e:
|
||||||
|
sentry.captureException()
|
||||||
|
metrics.send('get_ordered_certificates_resolution_error', 'counter', 1)
|
||||||
order_url = order.uri
|
order_url = order.uri
|
||||||
|
error = f"{e}. Order URI: {order_url}"
|
||||||
current_app.logger.error(
|
current_app.logger.error(
|
||||||
"Unable to resolve pending cert: {}. "
|
f"Unable to resolve pending cert: {pending_cert}. "
|
||||||
"Check out {} for more information.".format(pending_cert, order_url), exc_info=True)
|
f"Check out {order_url} for more information.", exc_info=True)
|
||||||
certs.append({
|
certs.append({
|
||||||
"cert": False,
|
"cert": False,
|
||||||
"pending_cert": entry["pending_cert"],
|
"pending_cert": entry["pending_cert"],
|
||||||
"last_error": e,
|
"last_error": error,
|
||||||
})
|
})
|
||||||
# Ensure DNS records get deleted
|
# Ensure DNS records get deleted
|
||||||
self.acme.cleanup_dns_challenges(
|
self.acme.cleanup_dns_challenges(
|
||||||
|
|
|
@ -149,47 +149,6 @@ def get_elb_endpoints_v2(account_number, region, elb_dict):
|
||||||
return endpoints
|
return endpoints
|
||||||
|
|
||||||
|
|
||||||
class AWSDestinationPlugin(DestinationPlugin):
|
|
||||||
title = 'AWS'
|
|
||||||
slug = 'aws-destination'
|
|
||||||
description = 'Allow the uploading of certificates to AWS IAM'
|
|
||||||
version = aws.VERSION
|
|
||||||
|
|
||||||
author = 'Kevin Glisson'
|
|
||||||
author_url = 'https://github.com/netflix/lemur'
|
|
||||||
|
|
||||||
options = [
|
|
||||||
{
|
|
||||||
'name': 'accountNumber',
|
|
||||||
'type': 'str',
|
|
||||||
'required': True,
|
|
||||||
'validation': '[0-9]{12}',
|
|
||||||
'helpMessage': 'Must be a valid AWS account number!',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'path',
|
|
||||||
'type': 'str',
|
|
||||||
'default': '/',
|
|
||||||
'helpMessage': 'Path to upload certificate.'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
# 'elb': {
|
|
||||||
# 'name': {'type': 'name'},
|
|
||||||
# 'region': {'type': 'str'},
|
|
||||||
# 'port': {'type': 'int'}
|
|
||||||
# }
|
|
||||||
|
|
||||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
|
||||||
iam.upload_cert(name, body, private_key,
|
|
||||||
self.get_option('path', options),
|
|
||||||
cert_chain=cert_chain,
|
|
||||||
account_number=self.get_option('accountNumber', options))
|
|
||||||
|
|
||||||
def deploy(self, elb_name, account, region, certificate):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class AWSSourcePlugin(SourcePlugin):
|
class AWSSourcePlugin(SourcePlugin):
|
||||||
title = 'AWS'
|
title = 'AWS'
|
||||||
slug = 'aws-source'
|
slug = 'aws-source'
|
||||||
|
@ -266,6 +225,43 @@ class AWSSourcePlugin(SourcePlugin):
|
||||||
iam.delete_cert(certificate.name, account_number=account_number)
|
iam.delete_cert(certificate.name, account_number=account_number)
|
||||||
|
|
||||||
|
|
||||||
|
class AWSDestinationPlugin(DestinationPlugin):
|
||||||
|
title = 'AWS'
|
||||||
|
slug = 'aws-destination'
|
||||||
|
description = 'Allow the uploading of certificates to AWS IAM'
|
||||||
|
version = aws.VERSION
|
||||||
|
sync_as_source = True
|
||||||
|
sync_as_source_name = AWSSourcePlugin.slug
|
||||||
|
|
||||||
|
author = 'Kevin Glisson'
|
||||||
|
author_url = 'https://github.com/netflix/lemur'
|
||||||
|
|
||||||
|
options = [
|
||||||
|
{
|
||||||
|
'name': 'accountNumber',
|
||||||
|
'type': 'str',
|
||||||
|
'required': True,
|
||||||
|
'validation': '[0-9]{12}',
|
||||||
|
'helpMessage': 'Must be a valid AWS account number!',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'path',
|
||||||
|
'type': 'str',
|
||||||
|
'default': '/',
|
||||||
|
'helpMessage': 'Path to upload certificate.'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||||
|
iam.upload_cert(name, body, private_key,
|
||||||
|
self.get_option('path', options),
|
||||||
|
cert_chain=cert_chain,
|
||||||
|
account_number=self.get_option('accountNumber', options))
|
||||||
|
|
||||||
|
def deploy(self, elb_name, account, region, certificate):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
class S3DestinationPlugin(ExportDestinationPlugin):
|
class S3DestinationPlugin(ExportDestinationPlugin):
|
||||||
title = 'AWS-S3'
|
title = 'AWS-S3'
|
||||||
slug = 'aws-s3'
|
slug = 'aws-s3'
|
||||||
|
|
|
@ -1,246 +0,0 @@
|
||||||
"""
|
|
||||||
.. module: lemur.plugins.lemur_java.plugin
|
|
||||||
:platform: Unix
|
|
||||||
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
|
||||||
:license: Apache, see LICENSE for more details.
|
|
||||||
|
|
||||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
|
||||||
"""
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from flask import current_app
|
|
||||||
|
|
||||||
from cryptography.fernet import Fernet
|
|
||||||
|
|
||||||
from lemur.utils import mktempfile, mktemppath
|
|
||||||
from lemur.plugins.bases import ExportPlugin
|
|
||||||
from lemur.plugins import lemur_java as java
|
|
||||||
from lemur.common.utils import parse_certificate
|
|
||||||
from lemur.common.defaults import common_name
|
|
||||||
|
|
||||||
|
|
||||||
def run_process(command):
|
|
||||||
"""
|
|
||||||
Runs a given command with pOpen and wraps some
|
|
||||||
error handling around it.
|
|
||||||
:param command:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
|
||||||
stdout, stderr = p.communicate()
|
|
||||||
|
|
||||||
if p.returncode != 0:
|
|
||||||
current_app.logger.debug(" ".join(command))
|
|
||||||
current_app.logger.error(stderr)
|
|
||||||
current_app.logger.error(stdout)
|
|
||||||
raise Exception(stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def split_chain(chain):
|
|
||||||
"""
|
|
||||||
Split the chain into individual certificates for import into keystore
|
|
||||||
|
|
||||||
:param chain:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
certs = []
|
|
||||||
|
|
||||||
if not chain:
|
|
||||||
return certs
|
|
||||||
|
|
||||||
lines = chain.split('\n')
|
|
||||||
|
|
||||||
cert = []
|
|
||||||
for line in lines:
|
|
||||||
cert.append(line + '\n')
|
|
||||||
if line == '-----END CERTIFICATE-----':
|
|
||||||
certs.append("".join(cert))
|
|
||||||
cert = []
|
|
||||||
|
|
||||||
return certs
|
|
||||||
|
|
||||||
|
|
||||||
def create_truststore(cert, chain, jks_tmp, alias, passphrase):
|
|
||||||
assert isinstance(cert, str)
|
|
||||||
assert isinstance(chain, str)
|
|
||||||
|
|
||||||
with mktempfile() as cert_tmp:
|
|
||||||
with open(cert_tmp, 'w') as f:
|
|
||||||
f.write(cert)
|
|
||||||
|
|
||||||
run_process([
|
|
||||||
"keytool",
|
|
||||||
"-importcert",
|
|
||||||
"-file", cert_tmp,
|
|
||||||
"-keystore", jks_tmp,
|
|
||||||
"-alias", "{0}_cert".format(alias),
|
|
||||||
"-storepass", passphrase,
|
|
||||||
"-noprompt"
|
|
||||||
])
|
|
||||||
|
|
||||||
# Import the entire chain
|
|
||||||
for idx, cert in enumerate(split_chain(chain)):
|
|
||||||
with mktempfile() as c_tmp:
|
|
||||||
with open(c_tmp, 'w') as f:
|
|
||||||
f.write(cert)
|
|
||||||
|
|
||||||
# Import signed cert in to JKS keystore
|
|
||||||
run_process([
|
|
||||||
"keytool",
|
|
||||||
"-importcert",
|
|
||||||
"-file", c_tmp,
|
|
||||||
"-keystore", jks_tmp,
|
|
||||||
"-alias", "{0}_cert_{1}".format(alias, idx),
|
|
||||||
"-storepass", passphrase,
|
|
||||||
"-noprompt"
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def create_keystore(cert, chain, jks_tmp, key, alias, passphrase):
|
|
||||||
assert isinstance(cert, str)
|
|
||||||
assert isinstance(chain, str)
|
|
||||||
assert isinstance(key, str)
|
|
||||||
|
|
||||||
# Create PKCS12 keystore from private key and public certificate
|
|
||||||
with mktempfile() as cert_tmp:
|
|
||||||
with open(cert_tmp, 'w') as f:
|
|
||||||
if chain:
|
|
||||||
f.writelines([key.strip() + "\n", cert.strip() + "\n", chain.strip() + "\n"])
|
|
||||||
else:
|
|
||||||
f.writelines([key.strip() + "\n", cert.strip() + "\n"])
|
|
||||||
|
|
||||||
with mktempfile() as p12_tmp:
|
|
||||||
run_process([
|
|
||||||
"openssl",
|
|
||||||
"pkcs12",
|
|
||||||
"-export",
|
|
||||||
"-nodes",
|
|
||||||
"-name", alias,
|
|
||||||
"-in", cert_tmp,
|
|
||||||
"-out", p12_tmp,
|
|
||||||
"-password", "pass:{}".format(passphrase)
|
|
||||||
])
|
|
||||||
|
|
||||||
# Convert PKCS12 keystore into a JKS keystore
|
|
||||||
run_process([
|
|
||||||
"keytool",
|
|
||||||
"-importkeystore",
|
|
||||||
"-destkeystore", jks_tmp,
|
|
||||||
"-srckeystore", p12_tmp,
|
|
||||||
"-srcstoretype", "pkcs12",
|
|
||||||
"-deststoretype", "JKS",
|
|
||||||
"-alias", alias,
|
|
||||||
"-srcstorepass", passphrase,
|
|
||||||
"-deststorepass", passphrase
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
class JavaTruststoreExportPlugin(ExportPlugin):
|
|
||||||
title = 'Java Truststore (JKS)'
|
|
||||||
slug = 'java-truststore-jks'
|
|
||||||
description = 'Attempts to generate a JKS truststore'
|
|
||||||
requires_key = False
|
|
||||||
version = java.VERSION
|
|
||||||
|
|
||||||
author = 'Kevin Glisson'
|
|
||||||
author_url = 'https://github.com/netflix/lemur'
|
|
||||||
|
|
||||||
options = [
|
|
||||||
{
|
|
||||||
'name': 'alias',
|
|
||||||
'type': 'str',
|
|
||||||
'required': False,
|
|
||||||
'helpMessage': 'Enter the alias you wish to use for the truststore.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'passphrase',
|
|
||||||
'type': 'str',
|
|
||||||
'required': False,
|
|
||||||
'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8.',
|
|
||||||
'validation': ''
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
def export(self, body, chain, key, options, **kwargs):
|
|
||||||
"""
|
|
||||||
Generates a Java Truststore
|
|
||||||
|
|
||||||
:param key:
|
|
||||||
:param chain:
|
|
||||||
:param body:
|
|
||||||
:param options:
|
|
||||||
:param kwargs:
|
|
||||||
"""
|
|
||||||
|
|
||||||
if self.get_option('alias', options):
|
|
||||||
alias = self.get_option('alias', options)
|
|
||||||
else:
|
|
||||||
alias = "blah"
|
|
||||||
|
|
||||||
if self.get_option('passphrase', options):
|
|
||||||
passphrase = self.get_option('passphrase', options)
|
|
||||||
else:
|
|
||||||
passphrase = Fernet.generate_key().decode('utf-8')
|
|
||||||
|
|
||||||
with mktemppath() as jks_tmp:
|
|
||||||
create_truststore(body, chain, jks_tmp, alias, passphrase)
|
|
||||||
|
|
||||||
with open(jks_tmp, 'rb') as f:
|
|
||||||
raw = f.read()
|
|
||||||
|
|
||||||
return "jks", passphrase, raw
|
|
||||||
|
|
||||||
|
|
||||||
class JavaKeystoreExportPlugin(ExportPlugin):
|
|
||||||
title = 'Java Keystore (JKS)'
|
|
||||||
slug = 'java-keystore-jks'
|
|
||||||
description = 'Attempts to generate a JKS keystore'
|
|
||||||
version = java.VERSION
|
|
||||||
|
|
||||||
author = 'Kevin Glisson'
|
|
||||||
author_url = 'https://github.com/netflix/lemur'
|
|
||||||
|
|
||||||
options = [
|
|
||||||
{
|
|
||||||
'name': 'passphrase',
|
|
||||||
'type': 'str',
|
|
||||||
'required': False,
|
|
||||||
'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this. Minimum length is 8.',
|
|
||||||
'validation': ''
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'name': 'alias',
|
|
||||||
'type': 'str',
|
|
||||||
'required': False,
|
|
||||||
'helpMessage': 'Enter the alias you wish to use for the keystore.',
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
def export(self, body, chain, key, options, **kwargs):
|
|
||||||
"""
|
|
||||||
Generates a Java Keystore
|
|
||||||
|
|
||||||
:param key:
|
|
||||||
:param chain:
|
|
||||||
:param body:
|
|
||||||
:param options:
|
|
||||||
:param kwargs:
|
|
||||||
"""
|
|
||||||
|
|
||||||
if self.get_option('passphrase', options):
|
|
||||||
passphrase = self.get_option('passphrase', options)
|
|
||||||
else:
|
|
||||||
passphrase = Fernet.generate_key().decode('utf-8')
|
|
||||||
|
|
||||||
if self.get_option('alias', options):
|
|
||||||
alias = self.get_option('alias', options)
|
|
||||||
else:
|
|
||||||
alias = common_name(parse_certificate(body))
|
|
||||||
|
|
||||||
with mktemppath() as jks_tmp:
|
|
||||||
create_keystore(body, chain, jks_tmp, key, alias, passphrase)
|
|
||||||
|
|
||||||
with open(jks_tmp, 'rb') as f:
|
|
||||||
raw = f.read()
|
|
||||||
|
|
||||||
return "jks", passphrase, raw
|
|
|
@ -1,63 +0,0 @@
|
||||||
import pytest
|
|
||||||
|
|
||||||
from lemur.tests.vectors import INTERNAL_CERTIFICATE_A_STR, INTERNAL_PRIVATE_KEY_A_STR
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="no way of currently testing this")
|
|
||||||
def test_export_truststore(app):
|
|
||||||
from lemur.plugins.base import plugins
|
|
||||||
|
|
||||||
p = plugins.get('java-truststore-jks')
|
|
||||||
options = [{'name': 'passphrase', 'value': 'test1234'}]
|
|
||||||
actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
|
||||||
|
|
||||||
assert actual[0] == 'jks'
|
|
||||||
assert actual[1] == 'test1234'
|
|
||||||
assert isinstance(actual[2], bytes)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="no way of currently testing this")
|
|
||||||
def test_export_truststore_default_password(app):
|
|
||||||
from lemur.plugins.base import plugins
|
|
||||||
|
|
||||||
p = plugins.get('java-truststore-jks')
|
|
||||||
options = []
|
|
||||||
actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
|
||||||
|
|
||||||
assert actual[0] == 'jks'
|
|
||||||
assert isinstance(actual[1], str)
|
|
||||||
assert isinstance(actual[2], bytes)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="no way of currently testing this")
|
|
||||||
def test_export_keystore(app):
|
|
||||||
from lemur.plugins.base import plugins
|
|
||||||
|
|
||||||
p = plugins.get('java-keystore-jks')
|
|
||||||
options = [{'name': 'passphrase', 'value': 'test1234'}]
|
|
||||||
|
|
||||||
with pytest.raises(Exception):
|
|
||||||
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
|
||||||
|
|
||||||
actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", INTERNAL_PRIVATE_KEY_A_STR, options)
|
|
||||||
|
|
||||||
assert actual[0] == 'jks'
|
|
||||||
assert actual[1] == 'test1234'
|
|
||||||
assert isinstance(actual[2], bytes)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skip(reason="no way of currently testing this")
|
|
||||||
def test_export_keystore_default_password(app):
|
|
||||||
from lemur.plugins.base import plugins
|
|
||||||
|
|
||||||
p = plugins.get('java-keystore-jks')
|
|
||||||
options = []
|
|
||||||
|
|
||||||
with pytest.raises(Exception):
|
|
||||||
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
|
||||||
|
|
||||||
actual = p.export(INTERNAL_CERTIFICATE_A_STR, "", INTERNAL_PRIVATE_KEY_A_STR, options)
|
|
||||||
|
|
||||||
assert actual[0] == 'jks'
|
|
||||||
assert isinstance(actual[1], str)
|
|
||||||
assert isinstance(actual[2], bytes)
|
|
|
@ -0,0 +1,140 @@
|
||||||
|
"""
|
||||||
|
.. module: lemur.plugins.lemur_jks.plugin
|
||||||
|
:platform: Unix
|
||||||
|
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
|
||||||
|
:license: Apache, see LICENSE for more details.
|
||||||
|
|
||||||
|
.. moduleauthor:: Marti Raudsepp <marti@juffo.org>
|
||||||
|
"""
|
||||||
|
|
||||||
|
from cryptography.fernet import Fernet
|
||||||
|
from cryptography.hazmat.primitives import serialization
|
||||||
|
from jks import PrivateKeyEntry, KeyStore, TrustedCertEntry
|
||||||
|
|
||||||
|
from lemur.common.defaults import common_name
|
||||||
|
from lemur.common.utils import parse_certificate, parse_cert_chain, parse_private_key
|
||||||
|
from lemur.plugins import lemur_jks as jks
|
||||||
|
from lemur.plugins.bases import ExportPlugin
|
||||||
|
|
||||||
|
|
||||||
|
def cert_chain_as_der(cert, chain):
|
||||||
|
"""Return a certificate and its chain in a list format, as expected by pyjks."""
|
||||||
|
|
||||||
|
certs = [parse_certificate(cert)]
|
||||||
|
certs.extend(parse_cert_chain(chain))
|
||||||
|
# certs (list) – A list of certificates, as byte strings. The first one should be the one belonging to the private
|
||||||
|
# key, the others the chain (in correct order).
|
||||||
|
return [cert.public_bytes(encoding=serialization.Encoding.DER) for cert in certs]
|
||||||
|
|
||||||
|
|
||||||
|
def create_truststore(cert, chain, alias, passphrase):
|
||||||
|
entries = []
|
||||||
|
for idx, cert_bytes in enumerate(cert_chain_as_der(cert, chain)):
|
||||||
|
# The original cert gets name <ALIAS>_cert, first chain element is <ALIAS>_cert_1, etc.
|
||||||
|
cert_alias = alias + '_cert' + ('_{}'.format(idx) if idx else '')
|
||||||
|
entries.append(TrustedCertEntry.new(cert_alias, cert_bytes))
|
||||||
|
|
||||||
|
return KeyStore.new('jks', entries).saves(passphrase)
|
||||||
|
|
||||||
|
|
||||||
|
def create_keystore(cert, chain, key, alias, passphrase):
|
||||||
|
certs_bytes = cert_chain_as_der(cert, chain)
|
||||||
|
key_bytes = parse_private_key(key).private_bytes(
|
||||||
|
encoding=serialization.Encoding.DER,
|
||||||
|
format=serialization.PrivateFormat.PKCS8,
|
||||||
|
encryption_algorithm=serialization.NoEncryption()
|
||||||
|
)
|
||||||
|
entry = PrivateKeyEntry.new(alias, certs_bytes, key_bytes)
|
||||||
|
|
||||||
|
return KeyStore.new('jks', [entry]).saves(passphrase)
|
||||||
|
|
||||||
|
|
||||||
|
class JavaTruststoreExportPlugin(ExportPlugin):
|
||||||
|
title = 'Java Truststore (JKS)'
|
||||||
|
slug = 'java-truststore-jks'
|
||||||
|
description = 'Generates a JKS truststore'
|
||||||
|
requires_key = False
|
||||||
|
version = jks.VERSION
|
||||||
|
|
||||||
|
author = 'Marti Raudsepp'
|
||||||
|
author_url = 'https://github.com/intgr'
|
||||||
|
|
||||||
|
options = [
|
||||||
|
{
|
||||||
|
'name': 'alias',
|
||||||
|
'type': 'str',
|
||||||
|
'required': False,
|
||||||
|
'helpMessage': 'Enter the alias you wish to use for the truststore.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'passphrase',
|
||||||
|
'type': 'str',
|
||||||
|
'required': False,
|
||||||
|
'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.',
|
||||||
|
'validation': ''
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
def export(self, body, chain, key, options, **kwargs):
|
||||||
|
"""
|
||||||
|
Generates a Java Truststore
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.get_option('alias', options):
|
||||||
|
alias = self.get_option('alias', options)
|
||||||
|
else:
|
||||||
|
alias = common_name(parse_certificate(body))
|
||||||
|
|
||||||
|
if self.get_option('passphrase', options):
|
||||||
|
passphrase = self.get_option('passphrase', options)
|
||||||
|
else:
|
||||||
|
passphrase = Fernet.generate_key().decode('utf-8')
|
||||||
|
|
||||||
|
raw = create_truststore(body, chain, alias, passphrase)
|
||||||
|
|
||||||
|
return 'jks', passphrase, raw
|
||||||
|
|
||||||
|
|
||||||
|
class JavaKeystoreExportPlugin(ExportPlugin):
|
||||||
|
title = 'Java Keystore (JKS)'
|
||||||
|
slug = 'java-keystore-jks'
|
||||||
|
description = 'Generates a JKS keystore'
|
||||||
|
version = jks.VERSION
|
||||||
|
|
||||||
|
author = 'Marti Raudsepp'
|
||||||
|
author_url = 'https://github.com/intgr'
|
||||||
|
|
||||||
|
options = [
|
||||||
|
{
|
||||||
|
'name': 'passphrase',
|
||||||
|
'type': 'str',
|
||||||
|
'required': False,
|
||||||
|
'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.',
|
||||||
|
'validation': ''
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'alias',
|
||||||
|
'type': 'str',
|
||||||
|
'required': False,
|
||||||
|
'helpMessage': 'Enter the alias you wish to use for the keystore.',
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
def export(self, body, chain, key, options, **kwargs):
|
||||||
|
"""
|
||||||
|
Generates a Java Keystore
|
||||||
|
"""
|
||||||
|
|
||||||
|
if self.get_option('passphrase', options):
|
||||||
|
passphrase = self.get_option('passphrase', options)
|
||||||
|
else:
|
||||||
|
passphrase = Fernet.generate_key().decode('utf-8')
|
||||||
|
|
||||||
|
if self.get_option('alias', options):
|
||||||
|
alias = self.get_option('alias', options)
|
||||||
|
else:
|
||||||
|
alias = common_name(parse_certificate(body))
|
||||||
|
|
||||||
|
raw = create_keystore(body, chain, key, alias, passphrase)
|
||||||
|
|
||||||
|
return 'jks', passphrase, raw
|
|
@ -0,0 +1,96 @@
|
||||||
|
import pytest
|
||||||
|
from jks import KeyStore, TrustedCertEntry, PrivateKeyEntry
|
||||||
|
|
||||||
|
from lemur.tests.vectors import INTERNAL_CERTIFICATE_A_STR, SAN_CERT_STR, INTERMEDIATE_CERT_STR, ROOTCA_CERT_STR, \
|
||||||
|
SAN_CERT_KEY
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_truststore(app):
|
||||||
|
from lemur.plugins.base import plugins
|
||||||
|
|
||||||
|
p = plugins.get('java-truststore-jks')
|
||||||
|
options = [
|
||||||
|
{'name': 'passphrase', 'value': 'hunter2'},
|
||||||
|
{'name': 'alias', 'value': 'AzureDiamond'},
|
||||||
|
]
|
||||||
|
chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR
|
||||||
|
ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options)
|
||||||
|
|
||||||
|
assert ext == 'jks'
|
||||||
|
assert password == 'hunter2'
|
||||||
|
assert isinstance(raw, bytes)
|
||||||
|
|
||||||
|
ks = KeyStore.loads(raw, 'hunter2')
|
||||||
|
assert ks.store_type == 'jks'
|
||||||
|
# JKS lower-cases alias strings
|
||||||
|
assert ks.entries.keys() == {'azurediamond_cert', 'azurediamond_cert_1', 'azurediamond_cert_2'}
|
||||||
|
assert isinstance(ks.entries['azurediamond_cert'], TrustedCertEntry)
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_truststore_defaults(app):
|
||||||
|
from lemur.plugins.base import plugins
|
||||||
|
|
||||||
|
p = plugins.get('java-truststore-jks')
|
||||||
|
options = []
|
||||||
|
ext, password, raw = p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options)
|
||||||
|
|
||||||
|
assert ext == 'jks'
|
||||||
|
assert isinstance(password, str)
|
||||||
|
assert isinstance(raw, bytes)
|
||||||
|
|
||||||
|
ks = KeyStore.loads(raw, password)
|
||||||
|
assert ks.store_type == 'jks'
|
||||||
|
# JKS lower-cases alias strings
|
||||||
|
assert ks.entries.keys() == {'acommonname_cert'}
|
||||||
|
assert isinstance(ks.entries['acommonname_cert'], TrustedCertEntry)
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_keystore(app):
|
||||||
|
from lemur.plugins.base import plugins
|
||||||
|
|
||||||
|
p = plugins.get('java-keystore-jks')
|
||||||
|
options = [
|
||||||
|
{'name': 'passphrase', 'value': 'hunter2'},
|
||||||
|
{'name': 'alias', 'value': 'AzureDiamond'},
|
||||||
|
]
|
||||||
|
|
||||||
|
chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
p.export(INTERNAL_CERTIFICATE_A_STR, chain, '', options)
|
||||||
|
|
||||||
|
ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options)
|
||||||
|
|
||||||
|
assert ext == 'jks'
|
||||||
|
assert password == 'hunter2'
|
||||||
|
assert isinstance(raw, bytes)
|
||||||
|
|
||||||
|
ks = KeyStore.loads(raw, password)
|
||||||
|
assert ks.store_type == 'jks'
|
||||||
|
# JKS lower-cases alias strings
|
||||||
|
assert ks.entries.keys() == {'azurediamond'}
|
||||||
|
entry = ks.entries['azurediamond']
|
||||||
|
assert isinstance(entry, PrivateKeyEntry)
|
||||||
|
assert len(entry.cert_chain) == 3 # Cert and chain were provided
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_keystore_defaults(app):
|
||||||
|
from lemur.plugins.base import plugins
|
||||||
|
|
||||||
|
p = plugins.get('java-keystore-jks')
|
||||||
|
options = []
|
||||||
|
|
||||||
|
with pytest.raises(Exception):
|
||||||
|
p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options)
|
||||||
|
|
||||||
|
ext, password, raw = p.export(SAN_CERT_STR, '', SAN_CERT_KEY, options)
|
||||||
|
|
||||||
|
assert ext == 'jks'
|
||||||
|
assert isinstance(password, str)
|
||||||
|
assert isinstance(raw, bytes)
|
||||||
|
|
||||||
|
ks = KeyStore.loads(raw, password)
|
||||||
|
assert ks.store_type == 'jks'
|
||||||
|
assert ks.entries.keys() == {'san.example.org'}
|
||||||
|
entry = ks.entries['san.example.org']
|
||||||
|
assert isinstance(entry, PrivateKeyEntry)
|
||||||
|
assert len(entry.cert_chain) == 1 # Only cert itself, no chain was provided
|
|
@ -9,6 +9,8 @@
|
||||||
|
|
||||||
.. moduleauthor:: Christopher Jolley <chris@alwaysjolley.com>
|
.. moduleauthor:: Christopher Jolley <chris@alwaysjolley.com>
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
|
import re
|
||||||
import hvac
|
import hvac
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
|
@ -37,6 +39,17 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||||
'validation': '^https?://[a-zA-Z0-9.:-]+$',
|
'validation': '^https?://[a-zA-Z0-9.:-]+$',
|
||||||
'helpMessage': 'Valid URL to Hashi Vault instance'
|
'helpMessage': 'Valid URL to Hashi Vault instance'
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
'name': 'vaultKvApiVersion',
|
||||||
|
'type': 'select',
|
||||||
|
'value': '2',
|
||||||
|
'available': [
|
||||||
|
'1',
|
||||||
|
'2'
|
||||||
|
],
|
||||||
|
'required': True,
|
||||||
|
'helpMessage': 'Version of the Vault KV API to use'
|
||||||
|
},
|
||||||
{
|
{
|
||||||
'name': 'vaultAuthTokenFile',
|
'name': 'vaultAuthTokenFile',
|
||||||
'type': 'str',
|
'type': 'str',
|
||||||
|
@ -76,6 +89,14 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||||
],
|
],
|
||||||
'required': True,
|
'required': True,
|
||||||
'helpMessage': 'Bundle the chain into the certificate'
|
'helpMessage': 'Bundle the chain into the certificate'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'sanFilter',
|
||||||
|
'type': 'str',
|
||||||
|
'value': '.*',
|
||||||
|
'required': False,
|
||||||
|
'validation': '.*',
|
||||||
|
'helpMessage': 'Valid regex filter'
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -98,17 +119,35 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||||
path = self.get_option('vaultPath', options)
|
path = self.get_option('vaultPath', options)
|
||||||
bundle = self.get_option('bundleChain', options)
|
bundle = self.get_option('bundleChain', options)
|
||||||
obj_name = self.get_option('objectName', options)
|
obj_name = self.get_option('objectName', options)
|
||||||
|
api_version = self.get_option('vaultKvApiVersion', options)
|
||||||
|
san_filter = self.get_option('sanFilter', options)
|
||||||
|
|
||||||
|
san_list = get_san_list(body)
|
||||||
|
if san_filter:
|
||||||
|
for san in san_list:
|
||||||
|
try:
|
||||||
|
if not re.match(san_filter, san, flags=re.IGNORECASE):
|
||||||
|
current_app.logger.exception(
|
||||||
|
"Exception uploading secret to vault: invalid SAN: {}".format(san),
|
||||||
|
exc_info=True)
|
||||||
|
os._exit(1)
|
||||||
|
except re.error:
|
||||||
|
current_app.logger.exception(
|
||||||
|
"Exception compiling regex filter: invalid filter",
|
||||||
|
exc_info=True)
|
||||||
|
|
||||||
with open(token_file, 'r') as file:
|
with open(token_file, 'r') as file:
|
||||||
token = file.readline().rstrip('\n')
|
token = file.readline().rstrip('\n')
|
||||||
|
|
||||||
client = hvac.Client(url=url, token=token)
|
client = hvac.Client(url=url, token=token)
|
||||||
|
client.secrets.kv.default_kv_version = api_version
|
||||||
|
|
||||||
if obj_name:
|
if obj_name:
|
||||||
path = '{0}/{1}'.format(path, obj_name)
|
path = '{0}/{1}'.format(path, obj_name)
|
||||||
else:
|
else:
|
||||||
path = '{0}/{1}'.format(path, cname)
|
path = '{0}/{1}'.format(path, cname)
|
||||||
|
|
||||||
secret = get_secret(url, token, mount, path)
|
secret = get_secret(client, mount, path)
|
||||||
secret['data'][cname] = {}
|
secret['data'][cname] = {}
|
||||||
|
|
||||||
if bundle == 'Nginx' and cert_chain:
|
if bundle == 'Nginx' and cert_chain:
|
||||||
|
@ -119,12 +158,12 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||||
else:
|
else:
|
||||||
secret['data'][cname]['crt'] = body
|
secret['data'][cname]['crt'] = body
|
||||||
secret['data'][cname]['key'] = private_key
|
secret['data'][cname]['key'] = private_key
|
||||||
san_list = get_san_list(body)
|
|
||||||
if isinstance(san_list, list):
|
if isinstance(san_list, list):
|
||||||
secret['data'][cname]['san'] = san_list
|
secret['data'][cname]['san'] = san_list
|
||||||
try:
|
try:
|
||||||
client.secrets.kv.v1.create_or_update_secret(
|
client.secrets.kv.create_or_update_secret(
|
||||||
path=path, mount_point=mount, secret=secret['data'])
|
path=path, mount_point=mount, secret=secret['data']
|
||||||
|
)
|
||||||
except ConnectionError as err:
|
except ConnectionError as err:
|
||||||
current_app.logger.exception(
|
current_app.logger.exception(
|
||||||
"Exception uploading secret to vault: {0}".format(err), exc_info=True)
|
"Exception uploading secret to vault: {0}".format(err), exc_info=True)
|
||||||
|
@ -144,12 +183,14 @@ def get_san_list(body):
|
||||||
return san_list
|
return san_list
|
||||||
|
|
||||||
|
|
||||||
def get_secret(url, token, mount, path):
|
def get_secret(client, mount, path):
|
||||||
""" retreiive existing data from mount path and return dictionary """
|
""" retreiive existing data from mount path and return dictionary """
|
||||||
result = {'data': {}}
|
result = {'data': {}}
|
||||||
try:
|
try:
|
||||||
client = hvac.Client(url=url, token=token)
|
if client.secrets.kv.default_kv_version == '1':
|
||||||
result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount)
|
result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount)
|
||||||
|
else:
|
||||||
|
result = client.secrets.kv.v2.read_secret_version(path=path, mount_point=mount)
|
||||||
except ConnectionError:
|
except ConnectionError:
|
||||||
pass
|
pass
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -18,4 +18,14 @@ def get_plugin_option(name, options):
|
||||||
"""
|
"""
|
||||||
for o in options:
|
for o in options:
|
||||||
if o.get('name') == name:
|
if o.get('name') == name:
|
||||||
return o['value']
|
return o.get('value', o.get('default'))
|
||||||
|
|
||||||
|
|
||||||
|
def set_plugin_option(name, value, options):
|
||||||
|
"""
|
||||||
|
Set value for option name for options dict.
|
||||||
|
:param options:
|
||||||
|
"""
|
||||||
|
for o in options:
|
||||||
|
if o.get('name') == name:
|
||||||
|
o.update({'value': value})
|
||||||
|
|
|
@ -6,6 +6,7 @@
|
||||||
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
|
||||||
"""
|
"""
|
||||||
import arrow
|
import arrow
|
||||||
|
import copy
|
||||||
|
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
|
@ -21,6 +22,7 @@ from lemur.common.utils import find_matching_certificates_by_hash, parse_certifi
|
||||||
from lemur.common.defaults import serial
|
from lemur.common.defaults import serial
|
||||||
|
|
||||||
from lemur.plugins.base import plugins
|
from lemur.plugins.base import plugins
|
||||||
|
from lemur.plugins.utils import get_plugin_option, set_plugin_option
|
||||||
|
|
||||||
|
|
||||||
def certificate_create(certificate, source):
|
def certificate_create(certificate, source):
|
||||||
|
@ -256,3 +258,35 @@ def render(args):
|
||||||
query = database.filter(query, Source, terms)
|
query = database.filter(query, Source, terms)
|
||||||
|
|
||||||
return database.sort_and_page(query, Source, args)
|
return database.sort_and_page(query, Source, args)
|
||||||
|
|
||||||
|
|
||||||
|
def add_aws_destination_to_sources(dst):
|
||||||
|
"""
|
||||||
|
Given a destination check, if it can be added as sources, and included it if not already a source
|
||||||
|
We identify qualified destinations based on the sync_as_source attributed of the plugin.
|
||||||
|
The destination sync_as_source_name reveals the name of the suitable source-plugin.
|
||||||
|
We rely on account numbers to avoid duplicates.
|
||||||
|
:return: true for success and false for not adding the destination as source
|
||||||
|
"""
|
||||||
|
# a set of all accounts numbers available as sources
|
||||||
|
src_accounts = set()
|
||||||
|
sources = get_all()
|
||||||
|
for src in sources:
|
||||||
|
src_accounts.add(get_plugin_option('accountNumber', src.options))
|
||||||
|
|
||||||
|
# check
|
||||||
|
destination_plugin = plugins.get(dst.plugin_name)
|
||||||
|
account_number = get_plugin_option('accountNumber', dst.options)
|
||||||
|
if account_number is not None and \
|
||||||
|
destination_plugin.sync_as_source is not None and \
|
||||||
|
destination_plugin.sync_as_source and \
|
||||||
|
(account_number not in src_accounts):
|
||||||
|
src_options = copy.deepcopy(plugins.get(destination_plugin.sync_as_source_name).options)
|
||||||
|
set_plugin_option('accountNumber', account_number, src_options)
|
||||||
|
create(label=dst.label,
|
||||||
|
plugin_name=destination_plugin.sync_as_source_name,
|
||||||
|
options=src_options,
|
||||||
|
description=dst.description)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
|
@ -62,6 +62,19 @@
|
||||||
a valid certificate.</p>
|
a valid certificate.</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
<div class="form-group"
|
||||||
|
ng-class="{'has-error': uploadForm.csr.$invalid&&uploadForm.csr.$dirty, 'has-success': !uploadForm.csr.$invalid&&uploadForm.csr.$dirty}">
|
||||||
|
<label class="control-label col-sm-2">
|
||||||
|
Certificate Signing Request (CSR)
|
||||||
|
</label>
|
||||||
|
<div class="col-sm-10">
|
||||||
|
<textarea name="csr" ng-model="certificate.csr" placeholder="PEM encoded string..."
|
||||||
|
class="form-control"
|
||||||
|
ng-pattern="/^-----BEGIN CERTIFICATE REQUEST-----/"></textarea>
|
||||||
|
<p ng-show="uploadForm.csr.$invalid && !uploadForm.csr.$pristine"
|
||||||
|
class="help-block">Enter a valid certificate signing request.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<div class="form-group"
|
<div class="form-group"
|
||||||
ng-class="{'has-error': uploadForm.owner.$invalid&&uploadform.intermediateCert.$dirty, 'has-success': !uploadForm.intermediateCert.$invalid&&uploadForm.intermediateCert.$dirty}">
|
ng-class="{'has-error': uploadForm.owner.$invalid&&uploadform.intermediateCert.$dirty, 'has-success': !uploadForm.intermediateCert.$invalid&&uploadForm.intermediateCert.$dirty}">
|
||||||
<label class="control-label col-sm-2">
|
<label class="control-label col-sm-2">
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
angular.module('lemur')
|
||||||
|
.controller('PendingCertificateUploadController', function ($scope, $uibModalInstance, PendingCertificateApi, PendingCertificateService, toaster, uploadId) {
|
||||||
|
PendingCertificateApi.get(uploadId).then(function (pendingCertificate) {
|
||||||
|
$scope.pendingCertificate = pendingCertificate;
|
||||||
|
});
|
||||||
|
|
||||||
|
$scope.upload = PendingCertificateService.upload;
|
||||||
|
$scope.save = function (pendingCertificate) {
|
||||||
|
PendingCertificateService.upload(pendingCertificate).then(
|
||||||
|
function () {
|
||||||
|
toaster.pop({
|
||||||
|
type: 'success',
|
||||||
|
title: pendingCertificate.name,
|
||||||
|
body: 'Successfully uploaded!'
|
||||||
|
});
|
||||||
|
$uibModalInstance.close();
|
||||||
|
},
|
||||||
|
function (response) {
|
||||||
|
toaster.pop({
|
||||||
|
type: 'error',
|
||||||
|
title: pendingCertificate.name,
|
||||||
|
body: 'Failed to upload ' + response.data.message,
|
||||||
|
timeout: 100000
|
||||||
|
});
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
$scope.cancel = function () {
|
||||||
|
$uibModalInstance.dismiss('cancel');
|
||||||
|
};
|
||||||
|
|
||||||
|
});
|
|
@ -0,0 +1,41 @@
|
||||||
|
<div class="modal-header">
|
||||||
|
<button type="button" class="close" ng-click="cancel()" aria-label="Close"><span aria-hidden="true">×</span></button>
|
||||||
|
<h3 class="modal-title">Import certificate <span class="text-muted"><small>{{ pendingCertificate.name }}</small></span></h3>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="modal-body">
|
||||||
|
<form name="uploadForm" class="form-horizontal" role="form" novalidate>
|
||||||
|
<div class="form-group"
|
||||||
|
ng-class="{'has-error': uploadForm.publicCert.$invalid, 'has-success': !uploadForm.publicCert.$invalid&&uploadForm.publicCert.$dirty}">
|
||||||
|
<label class="control-label col-sm-2">
|
||||||
|
Public Certificate
|
||||||
|
</label>
|
||||||
|
<div class="col-sm-10">
|
||||||
|
<textarea name="publicCert" ng-model="pendingCertificate.body" placeholder="PEM encoded string..."
|
||||||
|
class="form-control" ng-pattern="/^-----BEGIN CERTIFICATE-----/" required></textarea>
|
||||||
|
<p ng-show="uploadForm.publicCert.$invalid && !uploadForm.publicCert.$pristine" class="help-block">Enter
|
||||||
|
a valid certificate.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="form-group"
|
||||||
|
ng-class="{'has-error': uploadForm.owner.$invalid&&uploadform.intermediateCert.$dirty, 'has-success': !uploadForm.intermediateCert.$invalid&&uploadForm.intermediateCert.$dirty}">
|
||||||
|
<label class="control-label col-sm-2">
|
||||||
|
Intermediate Certificate
|
||||||
|
</label>
|
||||||
|
<div class="col-sm-10">
|
||||||
|
<textarea name="intermediateCert" ng-model="pendingCertificate.chain"
|
||||||
|
placeholder="PEM encoded string..." class="form-control"
|
||||||
|
ng-pattern="/^-----BEGIN CERTIFICATE-----/"></textarea>
|
||||||
|
<p ng-show="uploadForm.intermediateCert.$invalid && !uploadForm.intemediateCert.$pristine"
|
||||||
|
class="help-block">Enter a valid certificate.</p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
<div class="modal-footer">
|
||||||
|
<button type="submit" ng-click="save(pendingCertificate)" ng-disabled="uploadForm.$invalid" class="btn btn-success">
|
||||||
|
Import
|
||||||
|
</button>
|
||||||
|
<button ng-click="cancel()" class="btn btn-danger">Cancel</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
|
@ -245,5 +245,9 @@ angular.module('lemur')
|
||||||
return pending_certificate.customOperation('remove', null, {}, {'Content-Type': 'application/json'}, options);
|
return pending_certificate.customOperation('remove', null, {}, {'Content-Type': 'application/json'}, options);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
PendingCertificateService.upload = function (pending_certificate) {
|
||||||
|
return pending_certificate.customPOST({'body': pending_certificate.body, 'chain': pending_certificate.chain}, 'upload');
|
||||||
|
};
|
||||||
|
|
||||||
return PendingCertificateService;
|
return PendingCertificateService;
|
||||||
});
|
});
|
||||||
|
|
|
@ -99,4 +99,23 @@ angular.module('lemur')
|
||||||
$scope.pendingCertificateTable.reload();
|
$scope.pendingCertificateTable.reload();
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
|
$scope.upload = function (pendingCertificateId) {
|
||||||
|
var uibModalInstance = $uibModal.open({
|
||||||
|
animation: true,
|
||||||
|
controller: 'PendingCertificateUploadController',
|
||||||
|
templateUrl: '/angular/pending_certificates/pending_certificate/upload.tpl.html',
|
||||||
|
size: 'lg',
|
||||||
|
backdrop: 'static',
|
||||||
|
resolve: {
|
||||||
|
uploadId: function () {
|
||||||
|
return pendingCertificateId;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
uibModalInstance.result.then(function () {
|
||||||
|
$scope.pendingCertificateTable.reload();
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
|
@ -51,6 +51,7 @@
|
||||||
<ul class="dropdown-menu">
|
<ul class="dropdown-menu">
|
||||||
<li><a href ng-click="edit(pendingCertificate.id)">Edit</a></li>
|
<li><a href ng-click="edit(pendingCertificate.id)">Edit</a></li>
|
||||||
<li><a href ng-click="cancel(pendingCertificate.id)">Cancel</a></li>
|
<li><a href ng-click="cancel(pendingCertificate.id)">Cancel</a></li>
|
||||||
|
<li><a href ng-click="upload(pendingCertificate.id)">Upload</a></li>
|
||||||
</ul>
|
</ul>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -7,17 +7,18 @@ from cryptography.hazmat.backends import default_backend
|
||||||
from cryptography.hazmat.primitives import hashes
|
from cryptography.hazmat.primitives import hashes
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
from flask_principal import identity_changed, Identity
|
from flask_principal import identity_changed, Identity
|
||||||
|
from sqlalchemy.sql import text
|
||||||
|
|
||||||
from lemur import create_app
|
from lemur import create_app
|
||||||
from lemur.common.utils import parse_private_key
|
from lemur.common.utils import parse_private_key
|
||||||
from lemur.database import db as _db
|
from lemur.database import db as _db
|
||||||
from lemur.auth.service import create_token
|
from lemur.auth.service import create_token
|
||||||
from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY
|
from lemur.tests.vectors import SAN_CERT_KEY, INTERMEDIATE_KEY, ROOTCA_CERT_STR, ROOTCA_KEY
|
||||||
|
|
||||||
from .factories import ApiKeyFactory, AuthorityFactory, NotificationFactory, DestinationFactory, \
|
from .factories import ApiKeyFactory, AuthorityFactory, NotificationFactory, DestinationFactory, \
|
||||||
CertificateFactory, UserFactory, RoleFactory, SourceFactory, EndpointFactory, \
|
CertificateFactory, UserFactory, RoleFactory, SourceFactory, EndpointFactory, \
|
||||||
RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory, InvalidCertificateFactory, \
|
RotationPolicyFactory, PendingCertificateFactory, AsyncAuthorityFactory, InvalidCertificateFactory, \
|
||||||
CryptoAuthorityFactory
|
CryptoAuthorityFactory, CACertificateFactory
|
||||||
|
|
||||||
|
|
||||||
def pytest_runtest_setup(item):
|
def pytest_runtest_setup(item):
|
||||||
|
@ -55,6 +56,7 @@ def app(request):
|
||||||
@pytest.yield_fixture(scope="session")
|
@pytest.yield_fixture(scope="session")
|
||||||
def db(app, request):
|
def db(app, request):
|
||||||
_db.drop_all()
|
_db.drop_all()
|
||||||
|
_db.engine.execute(text('CREATE EXTENSION IF NOT EXISTS pg_trgm'))
|
||||||
_db.create_all()
|
_db.create_all()
|
||||||
|
|
||||||
_db.app = app
|
_db.app = app
|
||||||
|
@ -170,6 +172,25 @@ def pending_certificate(session):
|
||||||
return p
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pending_certificate_from_full_chain_ca(session):
|
||||||
|
u = UserFactory()
|
||||||
|
a = AuthorityFactory()
|
||||||
|
p = PendingCertificateFactory(user=u, authority=a)
|
||||||
|
session.commit()
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def pending_certificate_from_partial_chain_ca(session):
|
||||||
|
u = UserFactory()
|
||||||
|
c = CACertificateFactory(body=ROOTCA_CERT_STR, private_key=ROOTCA_KEY, chain=None)
|
||||||
|
a = AuthorityFactory(authority_certificate=c)
|
||||||
|
p = PendingCertificateFactory(user=u, authority=a)
|
||||||
|
session.commit()
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def invalid_certificate(session):
|
def invalid_certificate(session):
|
||||||
u = UserFactory()
|
u = UserFactory()
|
||||||
|
|
|
@ -18,7 +18,7 @@ from lemur.domains.models import Domain
|
||||||
|
|
||||||
|
|
||||||
from lemur.tests.vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN, CSR_STR, \
|
from lemur.tests.vectors import VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, VALID_USER_HEADER_TOKEN, CSR_STR, \
|
||||||
INTERMEDIATE_CERT_STR, SAN_CERT_STR, SAN_CERT_KEY, ROOTCA_KEY, ROOTCA_CERT_STR
|
INTERMEDIATE_CERT_STR, SAN_CERT_STR, SAN_CERT_CSR, SAN_CERT_KEY, ROOTCA_KEY, ROOTCA_CERT_STR
|
||||||
|
|
||||||
|
|
||||||
def test_get_or_increase_name(session, certificate):
|
def test_get_or_increase_name(session, certificate):
|
||||||
|
@ -284,6 +284,31 @@ def test_certificate_input_with_extensions(client, authority):
|
||||||
assert not errors
|
assert not errors
|
||||||
|
|
||||||
|
|
||||||
|
def test_certificate_input_schema_parse_csr(authority):
|
||||||
|
from lemur.certificates.schemas import CertificateInputSchema
|
||||||
|
|
||||||
|
test_san_dns = 'foobar.com'
|
||||||
|
extensions = {'sub_alt_names': {'names': x509.SubjectAlternativeName([x509.DNSName(test_san_dns)])}}
|
||||||
|
csr, private_key = create_csr(owner='joe@example.com', common_name='ACommonName', organization='test',
|
||||||
|
organizational_unit='Meters', country='NL', state='Noord-Holland', location='Amsterdam',
|
||||||
|
key_type='RSA2048', extensions=extensions)
|
||||||
|
|
||||||
|
input_data = {
|
||||||
|
'commonName': 'test.example.com',
|
||||||
|
'owner': 'jim@example.com',
|
||||||
|
'authority': {'id': authority.id},
|
||||||
|
'description': 'testtestest',
|
||||||
|
'csr': csr,
|
||||||
|
'dnsProvider': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
data, errors = CertificateInputSchema().load(input_data)
|
||||||
|
|
||||||
|
for san in data['extensions']['sub_alt_names']['names']:
|
||||||
|
assert san.value == test_san_dns
|
||||||
|
assert not errors
|
||||||
|
|
||||||
|
|
||||||
def test_certificate_out_of_range_date(client, authority):
|
def test_certificate_out_of_range_date(client, authority):
|
||||||
from lemur.certificates.schemas import CertificateInputSchema
|
from lemur.certificates.schemas import CertificateInputSchema
|
||||||
input_data = {
|
input_data = {
|
||||||
|
@ -456,6 +481,7 @@ def test_certificate_upload_schema_ok(client):
|
||||||
'body': SAN_CERT_STR,
|
'body': SAN_CERT_STR,
|
||||||
'privateKey': SAN_CERT_KEY,
|
'privateKey': SAN_CERT_KEY,
|
||||||
'chain': INTERMEDIATE_CERT_STR,
|
'chain': INTERMEDIATE_CERT_STR,
|
||||||
|
'csr': SAN_CERT_CSR,
|
||||||
'external_id': '1234',
|
'external_id': '1234',
|
||||||
}
|
}
|
||||||
data, errors = CertificateUploadInputSchema().load(data)
|
data, errors = CertificateUploadInputSchema().load(data)
|
||||||
|
|
|
@ -2,6 +2,7 @@ import json
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from marshmallow import ValidationError
|
||||||
from lemur.pending_certificates.views import * # noqa
|
from lemur.pending_certificates.views import * # noqa
|
||||||
from .vectors import CSR_STR, INTERMEDIATE_CERT_STR, VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, \
|
from .vectors import CSR_STR, INTERMEDIATE_CERT_STR, VALID_ADMIN_API_TOKEN, VALID_ADMIN_HEADER_TOKEN, \
|
||||||
VALID_USER_HEADER_TOKEN, WILDCARD_CERT_STR
|
VALID_USER_HEADER_TOKEN, WILDCARD_CERT_STR
|
||||||
|
@ -50,3 +51,44 @@ def test_pending_cancel(client, pending_certificate, token, status):
|
||||||
assert client.delete(api.url_for(PendingCertificates, pending_certificate_id=pending_certificate.id),
|
assert client.delete(api.url_for(PendingCertificates, pending_certificate_id=pending_certificate.id),
|
||||||
data=json.dumps({'note': "unit test", 'send_email': False}),
|
data=json.dumps({'note': "unit test", 'send_email': False}),
|
||||||
headers=token).status_code == status
|
headers=token).status_code == status
|
||||||
|
|
||||||
|
|
||||||
|
def test_pending_upload(pending_certificate_from_full_chain_ca):
|
||||||
|
from lemur.pending_certificates.service import upload
|
||||||
|
from lemur.certificates.service import get
|
||||||
|
|
||||||
|
cert = {'body': WILDCARD_CERT_STR,
|
||||||
|
'chain': None,
|
||||||
|
'external_id': None
|
||||||
|
}
|
||||||
|
|
||||||
|
pending_cert = upload(pending_certificate_from_full_chain_ca.id, **cert)
|
||||||
|
assert pending_cert.resolved
|
||||||
|
assert get(pending_cert.resolved_cert_id)
|
||||||
|
|
||||||
|
|
||||||
|
def test_pending_upload_with_chain(pending_certificate_from_partial_chain_ca):
|
||||||
|
from lemur.pending_certificates.service import upload
|
||||||
|
from lemur.certificates.service import get
|
||||||
|
|
||||||
|
cert = {'body': WILDCARD_CERT_STR,
|
||||||
|
'chain': INTERMEDIATE_CERT_STR,
|
||||||
|
'external_id': None
|
||||||
|
}
|
||||||
|
|
||||||
|
pending_cert = upload(pending_certificate_from_partial_chain_ca.id, **cert)
|
||||||
|
assert pending_cert.resolved
|
||||||
|
assert get(pending_cert.resolved_cert_id)
|
||||||
|
|
||||||
|
|
||||||
|
def test_invalid_pending_upload_with_chain(pending_certificate_from_partial_chain_ca):
|
||||||
|
from lemur.pending_certificates.service import upload
|
||||||
|
|
||||||
|
cert = {'body': WILDCARD_CERT_STR,
|
||||||
|
'chain': None,
|
||||||
|
'external_id': None
|
||||||
|
}
|
||||||
|
with pytest.raises(ValidationError) as err:
|
||||||
|
upload(pending_certificate_from_partial_chain_ca.id, **cert)
|
||||||
|
assert str(err.value).startswith(
|
||||||
|
'Incorrect chain certificate(s) provided: \'*.wild.example.org\' is not signed by \'LemurTrust Unittests Root CA 2018')
|
||||||
|
|
|
@ -137,6 +137,26 @@ eMVHHbWm1CpGO294R+vMBv4jcuhIBOx63KZE4VaoJuaazF6TE5czDw==
|
||||||
|
|
||||||
|
|
||||||
#: CN=san.example.org, issued by LemurTrust Unittests Class 1 CA 2018
|
#: CN=san.example.org, issued by LemurTrust Unittests Class 1 CA 2018
|
||||||
|
SAN_CERT_CSR = """\
|
||||||
|
-----BEGIN CERTIFICATE REQUEST-----
|
||||||
|
MIICvTCCAaUCAQAweDELMAkGA1UEBhMCRUUxDDAKBgNVBAgMA04vQTEOMAwGA1UE
|
||||||
|
BwwFRWFydGgxGDAWBgNVBAoMD0RhbmllbCBTYW4gJiBjbzEXMBUGA1UECwwOS2Fy
|
||||||
|
YXRlIExlc3NvbnMxGDAWBgNVBAMMD3Nhbi5leGFtcGxlLm9yZzCCASIwDQYJKoZI
|
||||||
|
hvcNAQEBBQADggEPADCCAQoCggEBAMia9BcpypZUU9xJoknzdEp+AevQE93XSAyl
|
||||||
|
IlXji80ZlYS/T/mVWtu6hNwz2IJDBFh6nPaHT1Ud/AI4YanDMa+fF4KJxzlkKPbY
|
||||||
|
quWx4EOjTZ2sFBBCivwxlo1So8r5Hf4NZ9Ewu4AIma3zmk+dzxJTpnWbTIFJGsDG
|
||||||
|
LwJO9iu6uqf79VdYkGELCusq3dyF2j2DNDiGHoRcQYFMMhDKR6uYmCTYvwjf0+sf
|
||||||
|
6k1zk2EK1X+ZWUyjP+Nl2NB6bpL0TydF75fuplWROczceiO6BKO4YT2uNPdF4BAH
|
||||||
|
p/kQCkqnjw5FCX7PONRT4wTW/AjDkt5WOgY+AB90zQBPxvXWbUMCAwEAAaAAMA0G
|
||||||
|
CSqGSIb3DQEBCwUAA4IBAQAFYgEafwRmsqdK1i1xrLFYbNNLkzmAZyL+6gXUBVIJ
|
||||||
|
TbGVVWSNNIcEmHIX8O9X4lN52qDYWOsxH/OKPVxpXqoHm/ztczFlte76wOYg+VAS
|
||||||
|
yK8DwQRP/+n+j6J40o1cZwnilPWqHgee5zbIL7lpCVxuFDofWpskwP5PLbxibFq8
|
||||||
|
4TWynhjKKUw4+q4h4iCHG3PQhbV0ExWOyqX05QyDtJdkEwgJUWz1m9caHU2Jl7kX
|
||||||
|
5bWKOtXORpCYA7ed3WqktKQIxBD6vCVbQ+LuLZPYeWzGHYjfOejL6usD32KmNa2E
|
||||||
|
ZhDsC0fjqSX0FJKz6gOhP88bkbbapyHuGB71o2dwhCKV
|
||||||
|
-----END CERTIFICATE REQUEST-----
|
||||||
|
"""
|
||||||
|
|
||||||
SAN_CERT_STR = """\
|
SAN_CERT_STR = """\
|
||||||
-----BEGIN CERTIFICATE-----
|
-----BEGIN CERTIFICATE-----
|
||||||
MIIESjCCAzKgAwIBAgIRAK/y20+NLU2OgPo4KuJ8IzMwDQYJKoZIhvcNAQELBQAw
|
MIIESjCCAzKgAwIBAgIRAK/y20+NLU2OgPo4KuJ8IzMwDQYJKoZIhvcNAQELBQAw
|
||||||
|
|
|
@ -7,18 +7,18 @@
|
||||||
aspy.yaml==1.2.0 # via pre-commit
|
aspy.yaml==1.2.0 # via pre-commit
|
||||||
bleach==3.1.0 # via readme-renderer
|
bleach==3.1.0 # via readme-renderer
|
||||||
certifi==2019.3.9 # via requests
|
certifi==2019.3.9 # via requests
|
||||||
cfgv==1.5.0 # via pre-commit
|
cfgv==1.6.0 # via pre-commit
|
||||||
chardet==3.0.4 # via requests
|
chardet==3.0.4 # via requests
|
||||||
docutils==0.14 # via readme-renderer
|
docutils==0.14 # via readme-renderer
|
||||||
flake8==3.5.0
|
flake8==3.5.0
|
||||||
identify==1.4.0 # via pre-commit
|
identify==1.4.1 # via pre-commit
|
||||||
idna==2.8 # via requests
|
idna==2.8 # via requests
|
||||||
importlib-metadata==0.8 # via pre-commit
|
importlib-metadata==0.9 # via pre-commit
|
||||||
invoke==1.2.0
|
invoke==1.2.0
|
||||||
mccabe==0.6.1 # via flake8
|
mccabe==0.6.1 # via flake8
|
||||||
nodeenv==1.3.3
|
nodeenv==1.3.3
|
||||||
pkginfo==1.5.0.1 # via twine
|
pkginfo==1.5.0.1 # via twine
|
||||||
pre-commit==1.14.4
|
pre-commit==1.15.2
|
||||||
pycodestyle==2.3.1 # via flake8
|
pycodestyle==2.3.1 # via flake8
|
||||||
pyflakes==1.6.0 # via flake8
|
pyflakes==1.6.0 # via flake8
|
||||||
pygments==2.3.1 # via readme-renderer
|
pygments==2.3.1 # via readme-renderer
|
||||||
|
@ -30,7 +30,7 @@ six==1.12.0 # via bleach, cfgv, pre-commit, readme-renderer
|
||||||
toml==0.10.0 # via pre-commit
|
toml==0.10.0 # via pre-commit
|
||||||
tqdm==4.31.1 # via twine
|
tqdm==4.31.1 # via twine
|
||||||
twine==1.13.0
|
twine==1.13.0
|
||||||
urllib3==1.24.1 # via requests
|
urllib3==1.24.2 # via requests
|
||||||
virtualenv==16.4.3 # via pre-commit
|
virtualenv==16.4.3 # via pre-commit
|
||||||
webencodings==0.5.1 # via bleach
|
webencodings==0.5.1 # via bleach
|
||||||
zipp==0.3.3 # via importlib-metadata
|
zipp==0.3.3 # via importlib-metadata
|
||||||
|
|
|
@ -4,10 +4,10 @@
|
||||||
#
|
#
|
||||||
# pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index
|
# pip-compile --output-file requirements-docs.txt requirements-docs.in -U --no-index
|
||||||
#
|
#
|
||||||
acme==0.32.0
|
acme==0.33.1
|
||||||
alabaster==0.7.12 # via sphinx
|
alabaster==0.7.12 # via sphinx
|
||||||
alembic-autogenerate-enums==0.0.2
|
alembic-autogenerate-enums==0.0.2
|
||||||
alembic==1.0.8
|
alembic==1.0.9
|
||||||
amqp==2.4.2
|
amqp==2.4.2
|
||||||
aniso8601==6.0.0
|
aniso8601==6.0.0
|
||||||
arrow==0.13.1
|
arrow==0.13.1
|
||||||
|
@ -15,14 +15,14 @@ asn1crypto==0.24.0
|
||||||
asyncpool==1.0
|
asyncpool==1.0
|
||||||
babel==2.6.0 # via sphinx
|
babel==2.6.0 # via sphinx
|
||||||
bcrypt==3.1.6
|
bcrypt==3.1.6
|
||||||
billiard==3.5.0.5
|
billiard==3.6.0.0
|
||||||
blinker==1.4
|
blinker==1.4
|
||||||
boto3==1.9.120
|
boto3==1.9.134
|
||||||
botocore==1.12.120
|
botocore==1.12.134
|
||||||
celery[redis]==4.2.2
|
celery[redis]==4.3.0
|
||||||
certifi==2019.3.9
|
certifi==2019.3.9
|
||||||
certsrv==2.1.1
|
certsrv==2.1.1
|
||||||
cffi==1.12.2
|
cffi==1.12.3
|
||||||
chardet==3.0.4
|
chardet==3.0.4
|
||||||
click==7.0
|
click==7.0
|
||||||
cloudflare==2.1.0
|
cloudflare==2.1.0
|
||||||
|
@ -42,43 +42,46 @@ flask-sqlalchemy==2.3.2
|
||||||
flask==1.0.2
|
flask==1.0.2
|
||||||
future==0.17.1
|
future==0.17.1
|
||||||
gunicorn==19.9.0
|
gunicorn==19.9.0
|
||||||
hvac==0.7.2
|
hvac==0.8.2
|
||||||
idna==2.8
|
idna==2.8
|
||||||
imagesize==1.1.0 # via sphinx
|
imagesize==1.1.0 # via sphinx
|
||||||
inflection==0.3.1
|
inflection==0.3.1
|
||||||
itsdangerous==1.1.0
|
itsdangerous==1.1.0
|
||||||
jinja2==2.10
|
javaobj-py3==0.2.4
|
||||||
|
jinja2==2.10.1
|
||||||
jmespath==0.9.4
|
jmespath==0.9.4
|
||||||
josepy==1.1.0
|
josepy==1.1.0
|
||||||
jsonlines==1.2.0
|
jsonlines==1.2.0
|
||||||
kombu==4.3.0
|
kombu==4.5.0
|
||||||
lockfile==0.12.2
|
lockfile==0.12.2
|
||||||
mako==1.0.8
|
mako==1.0.9
|
||||||
markupsafe==1.1.1
|
markupsafe==1.1.1
|
||||||
marshmallow-sqlalchemy==0.16.1
|
marshmallow-sqlalchemy==0.16.2
|
||||||
marshmallow==2.19.1
|
marshmallow==2.19.2
|
||||||
mock==2.0.0
|
mock==2.0.0
|
||||||
ndg-httpsclient==0.5.1
|
ndg-httpsclient==0.5.1
|
||||||
packaging==19.0 # via sphinx
|
packaging==19.0 # via sphinx
|
||||||
paramiko==2.4.2
|
paramiko==2.4.2
|
||||||
pbr==5.1.3
|
pbr==5.1.3
|
||||||
pem==19.1.0
|
pem==19.1.0
|
||||||
psycopg2==2.7.7
|
psycopg2==2.8.2
|
||||||
pyasn1-modules==0.2.4
|
pyasn1-modules==0.2.4
|
||||||
pyasn1==0.4.5
|
pyasn1==0.4.5
|
||||||
pycparser==2.19
|
pycparser==2.19
|
||||||
|
pycryptodomex==3.8.1
|
||||||
pygments==2.3.1 # via sphinx
|
pygments==2.3.1 # via sphinx
|
||||||
|
pyjks==19.0.0
|
||||||
pyjwt==1.7.1
|
pyjwt==1.7.1
|
||||||
pynacl==1.3.0
|
pynacl==1.3.0
|
||||||
pyopenssl==19.0.0
|
pyopenssl==19.0.0
|
||||||
pyparsing==2.3.1 # via packaging
|
pyparsing==2.4.0 # via packaging
|
||||||
pyrfc3339==1.1
|
pyrfc3339==1.1
|
||||||
python-dateutil==2.8.0
|
python-dateutil==2.8.0
|
||||||
python-editor==1.0.4
|
python-editor==1.0.4
|
||||||
pytz==2018.9
|
pytz==2019.1
|
||||||
pyyaml==5.1
|
pyyaml==5.1
|
||||||
raven[flask]==6.10.0
|
raven[flask]==6.10.0
|
||||||
redis==2.10.6
|
redis==3.2.1
|
||||||
requests-toolbelt==0.9.1
|
requests-toolbelt==0.9.1
|
||||||
requests[security]==2.21.0
|
requests[security]==2.21.0
|
||||||
retrying==1.3.3
|
retrying==1.3.3
|
||||||
|
@ -86,13 +89,19 @@ s3transfer==0.2.0
|
||||||
six==1.12.0
|
six==1.12.0
|
||||||
snowballstemmer==1.2.1 # via sphinx
|
snowballstemmer==1.2.1 # via sphinx
|
||||||
sphinx-rtd-theme==0.4.3
|
sphinx-rtd-theme==0.4.3
|
||||||
sphinx==1.8.5
|
sphinx==2.0.1
|
||||||
|
sphinxcontrib-applehelp==1.0.1 # via sphinx
|
||||||
|
sphinxcontrib-devhelp==1.0.1 # via sphinx
|
||||||
|
sphinxcontrib-htmlhelp==1.0.2 # via sphinx
|
||||||
sphinxcontrib-httpdomain==1.7.0
|
sphinxcontrib-httpdomain==1.7.0
|
||||||
sphinxcontrib-websupport==1.1.0 # via sphinx
|
sphinxcontrib-jsmath==1.0.1 # via sphinx
|
||||||
|
sphinxcontrib-qthelp==1.0.2 # via sphinx
|
||||||
|
sphinxcontrib-serializinghtml==1.1.3 # via sphinx
|
||||||
sqlalchemy-utils==0.33.11
|
sqlalchemy-utils==0.33.11
|
||||||
sqlalchemy==1.3.1
|
sqlalchemy==1.3.3
|
||||||
tabulate==0.8.3
|
tabulate==0.8.3
|
||||||
urllib3==1.24.1
|
twofish==0.3.0
|
||||||
|
urllib3==1.24.2
|
||||||
vine==1.3.0
|
vine==1.3.0
|
||||||
werkzeug==0.15.1
|
werkzeug==0.15.2
|
||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
|
|
|
@ -7,58 +7,63 @@
|
||||||
asn1crypto==0.24.0 # via cryptography
|
asn1crypto==0.24.0 # via cryptography
|
||||||
atomicwrites==1.3.0 # via pytest
|
atomicwrites==1.3.0 # via pytest
|
||||||
attrs==19.1.0 # via pytest
|
attrs==19.1.0 # via pytest
|
||||||
aws-xray-sdk==0.95 # via moto
|
aws-sam-translator==1.10.0 # via cfn-lint
|
||||||
boto3==1.9.120 # via moto
|
aws-xray-sdk==2.4.2 # via moto
|
||||||
|
boto3==1.9.134 # via aws-sam-translator, moto
|
||||||
boto==2.49.0 # via moto
|
boto==2.49.0 # via moto
|
||||||
botocore==1.12.120 # via boto3, moto, s3transfer
|
botocore==1.12.134 # via aws-xray-sdk, boto3, moto, s3transfer
|
||||||
certifi==2019.3.9 # via requests
|
certifi==2019.3.9 # via requests
|
||||||
cffi==1.12.2 # via cryptography
|
cffi==1.12.3 # via cryptography
|
||||||
|
cfn-lint==0.19.1 # via moto
|
||||||
chardet==3.0.4 # via requests
|
chardet==3.0.4 # via requests
|
||||||
click==7.0 # via flask
|
click==7.0 # via flask
|
||||||
coverage==4.5.3
|
coverage==4.5.3
|
||||||
cryptography==2.6.1 # via moto
|
cryptography==2.6.1 # via moto
|
||||||
docker-pycreds==0.4.0 # via docker
|
docker-pycreds==0.4.0 # via docker
|
||||||
docker==3.7.1 # via moto
|
docker==3.7.2 # via moto
|
||||||
docutils==0.14 # via botocore
|
docutils==0.14 # via botocore
|
||||||
ecdsa==0.13 # via python-jose
|
ecdsa==0.13.2 # via python-jose
|
||||||
factory-boy==2.11.1
|
factory-boy==2.11.1
|
||||||
faker==1.0.4
|
faker==1.0.5
|
||||||
flask==1.0.2 # via pytest-flask
|
flask==1.0.2 # via pytest-flask
|
||||||
freezegun==0.3.11
|
freezegun==0.3.11
|
||||||
future==0.17.1 # via python-jose
|
future==0.17.1 # via aws-xray-sdk, python-jose
|
||||||
idna==2.8 # via requests
|
idna==2.8 # via moto, requests
|
||||||
itsdangerous==1.1.0 # via flask
|
itsdangerous==1.1.0 # via flask
|
||||||
jinja2==2.10 # via flask, moto
|
jinja2==2.10.1 # via flask, moto
|
||||||
jmespath==0.9.4 # via boto3, botocore
|
jmespath==0.9.4 # via boto3, botocore
|
||||||
jsondiff==1.1.1 # via moto
|
jsondiff==1.1.2 # via moto
|
||||||
|
jsonpatch==1.23 # via cfn-lint
|
||||||
jsonpickle==1.1 # via aws-xray-sdk
|
jsonpickle==1.1 # via aws-xray-sdk
|
||||||
|
jsonpointer==2.0 # via jsonpatch
|
||||||
|
jsonschema==2.6.0 # via aws-sam-translator, cfn-lint
|
||||||
markupsafe==1.1.1 # via jinja2
|
markupsafe==1.1.1 # via jinja2
|
||||||
mock==2.0.0 # via moto
|
mock==2.0.0 # via moto
|
||||||
more-itertools==6.0.0 # via pytest
|
more-itertools==7.0.0 # via pytest
|
||||||
moto==1.3.7
|
moto==1.3.8
|
||||||
nose==1.3.7
|
nose==1.3.7
|
||||||
pbr==5.1.3 # via mock
|
pbr==5.1.3 # via mock
|
||||||
pluggy==0.9.0 # via pytest
|
pluggy==0.9.0 # via pytest
|
||||||
py==1.8.0 # via pytest
|
py==1.8.0 # via pytest
|
||||||
pyaml==18.11.0 # via moto
|
pyasn1==0.4.5 # via rsa
|
||||||
pycparser==2.19 # via cffi
|
pycparser==2.19 # via cffi
|
||||||
pycryptodome==3.8.0 # via python-jose
|
|
||||||
pyflakes==2.1.1
|
pyflakes==2.1.1
|
||||||
pytest-flask==0.14.0
|
pytest-flask==0.14.0
|
||||||
pytest-mock==1.10.2
|
pytest-mock==1.10.4
|
||||||
pytest==4.3.1
|
pytest==4.4.1
|
||||||
python-dateutil==2.8.0 # via botocore, faker, freezegun, moto
|
python-dateutil==2.8.0 # via botocore, faker, freezegun, moto
|
||||||
python-jose==2.0.2 # via moto
|
python-jose==3.0.1 # via moto
|
||||||
pytz==2018.9 # via moto
|
pytz==2019.1 # via moto
|
||||||
pyyaml==5.1
|
pyyaml==5.1
|
||||||
requests-mock==1.5.2
|
requests-mock==1.5.2
|
||||||
requests==2.21.0 # via aws-xray-sdk, docker, moto, requests-mock, responses
|
requests==2.21.0 # via cfn-lint, docker, moto, requests-mock, responses
|
||||||
responses==0.10.6 # via moto
|
responses==0.10.6 # via moto
|
||||||
|
rsa==4.0 # via python-jose
|
||||||
s3transfer==0.2.0 # via boto3
|
s3transfer==0.2.0 # via boto3
|
||||||
six==1.12.0 # via cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client
|
six==1.12.0 # via aws-sam-translator, cfn-lint, cryptography, docker, docker-pycreds, faker, freezegun, mock, moto, pytest, python-dateutil, python-jose, requests-mock, responses, websocket-client
|
||||||
text-unidecode==1.2 # via faker
|
text-unidecode==1.2 # via faker
|
||||||
urllib3==1.24.1 # via botocore, requests
|
urllib3==1.24.2 # via botocore, requests
|
||||||
websocket-client==0.56.0 # via docker
|
websocket-client==0.56.0 # via docker
|
||||||
werkzeug==0.15.1 # via flask, moto, pytest-flask
|
werkzeug==0.15.2 # via flask, moto, pytest-flask
|
||||||
wrapt==1.11.1 # via aws-xray-sdk
|
wrapt==1.11.1 # via aws-xray-sdk
|
||||||
xmltodict==0.12.0 # via moto
|
xmltodict==0.12.0 # via moto
|
||||||
|
|
|
@ -27,7 +27,7 @@ gunicorn
|
||||||
hvac # required for the vault destination plugin
|
hvac # required for the vault destination plugin
|
||||||
inflection
|
inflection
|
||||||
jinja2
|
jinja2
|
||||||
kombu==4.3.0 # kombu 4.4.0 requires redis 3
|
kombu
|
||||||
lockfile
|
lockfile
|
||||||
marshmallow-sqlalchemy
|
marshmallow-sqlalchemy
|
||||||
marshmallow
|
marshmallow
|
||||||
|
@ -35,15 +35,16 @@ ndg-httpsclient
|
||||||
paramiko # required for the SFTP destination plugin
|
paramiko # required for the SFTP destination plugin
|
||||||
pem
|
pem
|
||||||
psycopg2
|
psycopg2
|
||||||
|
pyjks >= 19 # pyjks < 19 depends on pycryptodome, which conflicts with dyn's usage of pycrypto
|
||||||
pyjwt
|
pyjwt
|
||||||
pyOpenSSL
|
pyOpenSSL
|
||||||
|
pyyaml>=4.2b1 #high severity alert
|
||||||
python_ldap
|
python_ldap
|
||||||
raven[flask]
|
raven[flask]
|
||||||
redis<3 # redis>=3 is not compatible with celery
|
redis
|
||||||
requests
|
requests
|
||||||
retrying
|
retrying
|
||||||
six
|
six
|
||||||
SQLAlchemy-Utils
|
SQLAlchemy-Utils
|
||||||
tabulate
|
tabulate
|
||||||
xmltodict
|
xmltodict
|
||||||
pyyaml>=4.2b1 #high severity alert
|
|
||||||
|
|
|
@ -4,23 +4,23 @@
|
||||||
#
|
#
|
||||||
# pip-compile --output-file requirements.txt requirements.in -U --no-index
|
# pip-compile --output-file requirements.txt requirements.in -U --no-index
|
||||||
#
|
#
|
||||||
acme==0.32.0
|
acme==0.33.1
|
||||||
alembic-autogenerate-enums==0.0.2
|
alembic-autogenerate-enums==0.0.2
|
||||||
alembic==1.0.8 # via flask-migrate
|
alembic==1.0.9 # via flask-migrate
|
||||||
amqp==2.4.2 # via kombu
|
amqp==2.4.2 # via kombu
|
||||||
aniso8601==6.0.0 # via flask-restful
|
aniso8601==6.0.0 # via flask-restful
|
||||||
arrow==0.13.1
|
arrow==0.13.1
|
||||||
asn1crypto==0.24.0 # via cryptography
|
asn1crypto==0.24.0 # via cryptography
|
||||||
asyncpool==1.0
|
asyncpool==1.0
|
||||||
bcrypt==3.1.6 # via flask-bcrypt, paramiko
|
bcrypt==3.1.6 # via flask-bcrypt, paramiko
|
||||||
billiard==3.5.0.5 # via celery
|
billiard==3.6.0.0 # via celery
|
||||||
blinker==1.4 # via flask-mail, flask-principal, raven
|
blinker==1.4 # via flask-mail, flask-principal, raven
|
||||||
boto3==1.9.120
|
boto3==1.9.134
|
||||||
botocore==1.12.120
|
botocore==1.12.134
|
||||||
celery[redis]==4.2.2
|
celery[redis]==4.3.0
|
||||||
certifi==2019.3.9
|
certifi==2019.3.9
|
||||||
certsrv==2.1.1
|
certsrv==2.1.1
|
||||||
cffi==1.12.2 # via bcrypt, cryptography, pynacl
|
cffi==1.12.3 # via bcrypt, cryptography, pynacl
|
||||||
chardet==3.0.4 # via requests
|
chardet==3.0.4 # via requests
|
||||||
click==7.0 # via flask
|
click==7.0 # via flask
|
||||||
cloudflare==2.1.0
|
cloudflare==2.1.0
|
||||||
|
@ -40,29 +40,32 @@ flask-sqlalchemy==2.3.2
|
||||||
flask==1.0.2
|
flask==1.0.2
|
||||||
future==0.17.1
|
future==0.17.1
|
||||||
gunicorn==19.9.0
|
gunicorn==19.9.0
|
||||||
hvac==0.7.2
|
hvac==0.8.2
|
||||||
idna==2.8 # via requests
|
idna==2.8 # via requests
|
||||||
inflection==0.3.1
|
inflection==0.3.1
|
||||||
itsdangerous==1.1.0 # via flask
|
itsdangerous==1.1.0 # via flask
|
||||||
jinja2==2.10
|
javaobj-py3==0.2.4 # via pyjks
|
||||||
|
jinja2==2.10.1
|
||||||
jmespath==0.9.4 # via boto3, botocore
|
jmespath==0.9.4 # via boto3, botocore
|
||||||
josepy==1.1.0 # via acme
|
josepy==1.1.0 # via acme
|
||||||
jsonlines==1.2.0 # via cloudflare
|
jsonlines==1.2.0 # via cloudflare
|
||||||
kombu==4.3.0
|
kombu==4.5.0
|
||||||
lockfile==0.12.2
|
lockfile==0.12.2
|
||||||
mako==1.0.8 # via alembic
|
mako==1.0.9 # via alembic
|
||||||
markupsafe==1.1.1 # via jinja2, mako
|
markupsafe==1.1.1 # via jinja2, mako
|
||||||
marshmallow-sqlalchemy==0.16.1
|
marshmallow-sqlalchemy==0.16.2
|
||||||
marshmallow==2.19.1
|
marshmallow==2.19.2
|
||||||
mock==2.0.0 # via acme
|
mock==2.0.0 # via acme
|
||||||
ndg-httpsclient==0.5.1
|
ndg-httpsclient==0.5.1
|
||||||
paramiko==2.4.2
|
paramiko==2.4.2
|
||||||
pbr==5.1.3 # via mock
|
pbr==5.1.3 # via mock
|
||||||
pem==19.1.0
|
pem==19.1.0
|
||||||
psycopg2==2.7.7
|
psycopg2==2.8.2
|
||||||
pyasn1-modules==0.2.4 # via python-ldap
|
pyasn1-modules==0.2.4 # via pyjks, python-ldap
|
||||||
pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, python-ldap
|
pyasn1==0.4.5 # via ndg-httpsclient, paramiko, pyasn1-modules, pyjks, python-ldap
|
||||||
pycparser==2.19 # via cffi
|
pycparser==2.19 # via cffi
|
||||||
|
pycryptodomex==3.8.1 # via pyjks
|
||||||
|
pyjks==19.0.0
|
||||||
pyjwt==1.7.1
|
pyjwt==1.7.1
|
||||||
pynacl==1.3.0 # via paramiko
|
pynacl==1.3.0 # via paramiko
|
||||||
pyopenssl==19.0.0
|
pyopenssl==19.0.0
|
||||||
|
@ -70,19 +73,20 @@ pyrfc3339==1.1 # via acme
|
||||||
python-dateutil==2.8.0 # via alembic, arrow, botocore
|
python-dateutil==2.8.0 # via alembic, arrow, botocore
|
||||||
python-editor==1.0.4 # via alembic
|
python-editor==1.0.4 # via alembic
|
||||||
python-ldap==3.2.0
|
python-ldap==3.2.0
|
||||||
pytz==2018.9 # via acme, celery, flask-restful, pyrfc3339
|
pytz==2019.1 # via acme, celery, flask-restful, pyrfc3339
|
||||||
pyyaml==5.1
|
pyyaml==5.1
|
||||||
raven[flask]==6.10.0
|
raven[flask]==6.10.0
|
||||||
redis==2.10.6
|
redis==3.2.1
|
||||||
requests-toolbelt==0.9.1 # via acme
|
requests-toolbelt==0.9.1 # via acme
|
||||||
requests[security]==2.21.0
|
requests[security]==2.21.0
|
||||||
retrying==1.3.3
|
retrying==1.3.3
|
||||||
s3transfer==0.2.0 # via boto3
|
s3transfer==0.2.0 # via boto3
|
||||||
six==1.12.0
|
six==1.12.0
|
||||||
sqlalchemy-utils==0.33.11
|
sqlalchemy-utils==0.33.11
|
||||||
sqlalchemy==1.3.1 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils
|
sqlalchemy==1.3.3 # via alembic, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-utils
|
||||||
tabulate==0.8.3
|
tabulate==0.8.3
|
||||||
urllib3==1.24.1 # via botocore, requests
|
twofish==0.3.0 # via pyjks
|
||||||
vine==1.3.0 # via amqp
|
urllib3==1.24.2 # via botocore, requests
|
||||||
werkzeug==0.15.1 # via flask
|
vine==1.3.0 # via amqp, celery
|
||||||
|
werkzeug==0.15.2 # via flask
|
||||||
xmltodict==0.12.0
|
xmltodict==0.12.0
|
||||||
|
|
4
setup.py
4
setup.py
|
@ -143,8 +143,8 @@ setup(
|
||||||
'aws_s3 = lemur.plugins.lemur_aws.plugin:S3DestinationPlugin',
|
'aws_s3 = lemur.plugins.lemur_aws.plugin:S3DestinationPlugin',
|
||||||
'email_notification = lemur.plugins.lemur_email.plugin:EmailNotificationPlugin',
|
'email_notification = lemur.plugins.lemur_email.plugin:EmailNotificationPlugin',
|
||||||
'slack_notification = lemur.plugins.lemur_slack.plugin:SlackNotificationPlugin',
|
'slack_notification = lemur.plugins.lemur_slack.plugin:SlackNotificationPlugin',
|
||||||
'java_truststore_export = lemur.plugins.lemur_java.plugin:JavaTruststoreExportPlugin',
|
'java_truststore_export = lemur.plugins.lemur_jks.plugin:JavaTruststoreExportPlugin',
|
||||||
'java_keystore_export = lemur.plugins.lemur_java.plugin:JavaKeystoreExportPlugin',
|
'java_keystore_export = lemur.plugins.lemur_jks.plugin:JavaKeystoreExportPlugin',
|
||||||
'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin',
|
'openssl_export = lemur.plugins.lemur_openssl.plugin:OpenSSLExportPlugin',
|
||||||
'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin',
|
'atlas_metric = lemur.plugins.lemur_atlas.plugin:AtlasMetricPlugin',
|
||||||
'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin',
|
'kubernetes_destination = lemur.plugins.lemur_kubernetes.plugin:KubernetesDestinationPlugin',
|
||||||
|
|
Loading…
Reference in New Issue