Merge branch 'master' into vault_regex
fixed conflicts: lemur/plugins/lemur_vault_dest/plugin.py
This commit is contained in:
@ -18,7 +18,9 @@ class PluginManager(InstanceManager):
|
||||
return sum(1 for i in self.all())
|
||||
|
||||
def all(self, version=1, plugin_type=None):
|
||||
for plugin in sorted(super(PluginManager, self).all(), key=lambda x: x.get_title()):
|
||||
for plugin in sorted(
|
||||
super(PluginManager, self).all(), key=lambda x: x.get_title()
|
||||
):
|
||||
if not plugin.type == plugin_type and plugin_type:
|
||||
continue
|
||||
if not plugin.is_enabled():
|
||||
@ -36,29 +38,34 @@ class PluginManager(InstanceManager):
|
||||
return plugin
|
||||
current_app.logger.error(
|
||||
"Unable to find slug: {} in self.all version 1: {} or version 2: {}".format(
|
||||
slug, self.all(version=1), self.all(version=2))
|
||||
slug, self.all(version=1), self.all(version=2)
|
||||
)
|
||||
)
|
||||
raise KeyError(slug)
|
||||
|
||||
def first(self, func_name, *args, **kwargs):
|
||||
version = kwargs.pop('version', 1)
|
||||
version = kwargs.pop("version", 1)
|
||||
for plugin in self.all(version=version):
|
||||
try:
|
||||
result = getattr(plugin, func_name)(*args, **kwargs)
|
||||
except Exception as e:
|
||||
current_app.logger.error('Error processing %s() on %r: %s', func_name, plugin.__class__, e, extra={
|
||||
'func_arg': args,
|
||||
'func_kwargs': kwargs,
|
||||
}, exc_info=True)
|
||||
current_app.logger.error(
|
||||
"Error processing %s() on %r: %s",
|
||||
func_name,
|
||||
plugin.__class__,
|
||||
e,
|
||||
extra={"func_arg": args, "func_kwargs": kwargs},
|
||||
exc_info=True,
|
||||
)
|
||||
continue
|
||||
|
||||
if result is not None:
|
||||
return result
|
||||
|
||||
def register(self, cls):
|
||||
self.add('%s.%s' % (cls.__module__, cls.__name__))
|
||||
self.add("%s.%s" % (cls.__module__, cls.__name__))
|
||||
return cls
|
||||
|
||||
def unregister(self, cls):
|
||||
self.remove('%s.%s' % (cls.__module__, cls.__name__))
|
||||
self.remove("%s.%s" % (cls.__module__, cls.__name__))
|
||||
return cls
|
||||
|
@ -18,7 +18,7 @@ class PluginMount(type):
|
||||
if new_cls.title is None:
|
||||
new_cls.title = new_cls.__name__
|
||||
if not new_cls.slug:
|
||||
new_cls.slug = new_cls.title.replace(' ', '-').lower()
|
||||
new_cls.slug = new_cls.title.replace(" ", "-").lower()
|
||||
return new_cls
|
||||
|
||||
|
||||
@ -36,6 +36,7 @@ class IPlugin(local):
|
||||
As a general rule all inherited methods should allow ``**kwargs`` to ensure
|
||||
ease of future compatibility.
|
||||
"""
|
||||
|
||||
# Generic plugin information
|
||||
title = None
|
||||
slug = None
|
||||
@ -72,7 +73,7 @@ class IPlugin(local):
|
||||
Returns a string representing the configuration keyspace prefix for this plugin.
|
||||
"""
|
||||
if not self.conf_key:
|
||||
self.conf_key = self.get_conf_title().lower().replace(' ', '_')
|
||||
self.conf_key = self.get_conf_title().lower().replace(" ", "_")
|
||||
return self.conf_key
|
||||
|
||||
def get_conf_title(self):
|
||||
@ -111,8 +112,8 @@ class IPlugin(local):
|
||||
@staticmethod
|
||||
def get_option(name, options):
|
||||
for o in options:
|
||||
if o.get('name') == name:
|
||||
return o.get('value', o.get('default'))
|
||||
if o.get("name") == name:
|
||||
return o.get("value", o.get("default"))
|
||||
|
||||
|
||||
class Plugin(IPlugin):
|
||||
@ -121,5 +122,6 @@ class Plugin(IPlugin):
|
||||
control when or how the plugin gets instantiated, nor is it guaranteed that
|
||||
it will happen, or happen more than once.
|
||||
"""
|
||||
|
||||
__version__ = 1
|
||||
__metaclass__ = PluginMount
|
||||
|
@ -10,10 +10,10 @@ from lemur.plugins.base import Plugin, plugins
|
||||
|
||||
|
||||
class DestinationPlugin(Plugin):
|
||||
type = 'destination'
|
||||
type = "destination"
|
||||
requires_key = True
|
||||
sync_as_source = False
|
||||
sync_as_source_name = ''
|
||||
sync_as_source_name = ""
|
||||
|
||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||
raise NotImplementedError
|
||||
@ -22,10 +22,10 @@ class DestinationPlugin(Plugin):
|
||||
class ExportDestinationPlugin(DestinationPlugin):
|
||||
default_options = [
|
||||
{
|
||||
'name': 'exportPlugin',
|
||||
'type': 'export-plugin',
|
||||
'required': True,
|
||||
'helpMessage': 'Export plugin to use before sending data to destination.'
|
||||
"name": "exportPlugin",
|
||||
"type": "export-plugin",
|
||||
"required": True,
|
||||
"helpMessage": "Export plugin to use before sending data to destination.",
|
||||
}
|
||||
]
|
||||
|
||||
@ -34,15 +34,17 @@ class ExportDestinationPlugin(DestinationPlugin):
|
||||
return self.default_options + self.additional_options
|
||||
|
||||
def export(self, body, private_key, cert_chain, options):
|
||||
export_plugin = self.get_option('exportPlugin', options)
|
||||
export_plugin = self.get_option("exportPlugin", options)
|
||||
|
||||
if export_plugin:
|
||||
plugin = plugins.get(export_plugin['slug'])
|
||||
extension, passphrase, data = plugin.export(body, cert_chain, private_key, export_plugin['plugin_options'])
|
||||
plugin = plugins.get(export_plugin["slug"])
|
||||
extension, passphrase, data = plugin.export(
|
||||
body, cert_chain, private_key, export_plugin["plugin_options"]
|
||||
)
|
||||
return [(extension, passphrase, data)]
|
||||
|
||||
data = body + '\n' + cert_chain + '\n' + private_key
|
||||
return [('.pem', '', data)]
|
||||
data = body + "\n" + cert_chain + "\n" + private_key
|
||||
return [(".pem", "", data)]
|
||||
|
||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
@ -14,7 +14,8 @@ class ExportPlugin(Plugin):
|
||||
This is the base class from which all supported
|
||||
exporters will inherit from.
|
||||
"""
|
||||
type = 'export'
|
||||
|
||||
type = "export"
|
||||
requires_key = True
|
||||
|
||||
def export(self, body, chain, key, options, **kwargs):
|
||||
|
@ -14,7 +14,8 @@ class IssuerPlugin(Plugin):
|
||||
This is the base class from which all of the supported
|
||||
issuers will inherit from.
|
||||
"""
|
||||
type = 'issuer'
|
||||
|
||||
type = "issuer"
|
||||
|
||||
def create_certificate(self, csr, issuer_options):
|
||||
raise NotImplementedError
|
||||
|
@ -10,7 +10,9 @@ from lemur.plugins.base import Plugin
|
||||
|
||||
|
||||
class MetricPlugin(Plugin):
|
||||
type = 'metric'
|
||||
type = "metric"
|
||||
|
||||
def submit(self, metric_name, metric_type, metric_value, metric_tags=None, options=None):
|
||||
def submit(
|
||||
self, metric_name, metric_type, metric_value, metric_tags=None, options=None
|
||||
):
|
||||
raise NotImplementedError
|
||||
|
@ -14,7 +14,8 @@ class NotificationPlugin(Plugin):
|
||||
This is the base class from which all of the supported
|
||||
issuers will inherit from.
|
||||
"""
|
||||
type = 'notification'
|
||||
|
||||
type = "notification"
|
||||
|
||||
def send(self, notification_type, message, targets, options, **kwargs):
|
||||
raise NotImplementedError
|
||||
@ -26,22 +27,23 @@ class ExpirationNotificationPlugin(NotificationPlugin):
|
||||
It contains some default options that are needed for all expiration
|
||||
notification plugins.
|
||||
"""
|
||||
|
||||
default_options = [
|
||||
{
|
||||
'name': 'interval',
|
||||
'type': 'int',
|
||||
'required': True,
|
||||
'validation': '^\d+$',
|
||||
'helpMessage': 'Number of days to be alert before expiration.',
|
||||
"name": "interval",
|
||||
"type": "int",
|
||||
"required": True,
|
||||
"validation": "^\d+$",
|
||||
"helpMessage": "Number of days to be alert before expiration.",
|
||||
},
|
||||
{
|
||||
'name': 'unit',
|
||||
'type': 'select',
|
||||
'required': True,
|
||||
'validation': '',
|
||||
'available': ['days', 'weeks', 'months'],
|
||||
'helpMessage': 'Interval unit',
|
||||
}
|
||||
"name": "unit",
|
||||
"type": "select",
|
||||
"required": True,
|
||||
"validation": "",
|
||||
"available": ["days", "weeks", "months"],
|
||||
"helpMessage": "Interval unit",
|
||||
},
|
||||
]
|
||||
|
||||
@property
|
||||
|
@ -10,15 +10,15 @@ from lemur.plugins.base import Plugin
|
||||
|
||||
|
||||
class SourcePlugin(Plugin):
|
||||
type = 'source'
|
||||
type = "source"
|
||||
|
||||
default_options = [
|
||||
{
|
||||
'name': 'pollRate',
|
||||
'type': 'int',
|
||||
'required': False,
|
||||
'helpMessage': 'Rate in seconds to poll source for new information.',
|
||||
'default': '60',
|
||||
"name": "pollRate",
|
||||
"type": "int",
|
||||
"required": False,
|
||||
"helpMessage": "Rate in seconds to poll source for new information.",
|
||||
"default": "60",
|
||||
}
|
||||
]
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -5,24 +5,24 @@ from flask import current_app
|
||||
|
||||
|
||||
def cf_api_call():
|
||||
cf_key = current_app.config.get('ACME_CLOUDFLARE_KEY', '')
|
||||
cf_email = current_app.config.get('ACME_CLOUDFLARE_EMAIL', '')
|
||||
cf_key = current_app.config.get("ACME_CLOUDFLARE_KEY", "")
|
||||
cf_email = current_app.config.get("ACME_CLOUDFLARE_EMAIL", "")
|
||||
return CloudFlare.CloudFlare(email=cf_email, token=cf_key)
|
||||
|
||||
|
||||
def find_zone_id(host):
|
||||
elements = host.split('.')
|
||||
elements = host.split(".")
|
||||
cf = cf_api_call()
|
||||
|
||||
n = 1
|
||||
|
||||
while n < 5:
|
||||
n = n + 1
|
||||
domain = '.'.join(elements[-n:])
|
||||
domain = ".".join(elements[-n:])
|
||||
current_app.logger.debug("Trying to get ID for zone {0}".format(domain))
|
||||
|
||||
try:
|
||||
zone = cf.zones.get(params={'name': domain, 'per_page': 1})
|
||||
zone = cf.zones.get(params={"name": domain, "per_page": 1})
|
||||
except Exception as e:
|
||||
current_app.logger.error("Cloudflare API error: %s" % e)
|
||||
pass
|
||||
@ -31,10 +31,10 @@ def find_zone_id(host):
|
||||
break
|
||||
|
||||
if len(zone) == 0:
|
||||
current_app.logger.error('No zone found')
|
||||
current_app.logger.error("No zone found")
|
||||
return
|
||||
else:
|
||||
return zone[0]['id']
|
||||
return zone[0]["id"]
|
||||
|
||||
|
||||
def wait_for_dns_change(change_id, account_number=None):
|
||||
@ -42,8 +42,8 @@ def wait_for_dns_change(change_id, account_number=None):
|
||||
zone_id, record_id = change_id
|
||||
while True:
|
||||
r = cf.zones.get(zone_id, record_id)
|
||||
current_app.logger.debug("Record status: %s" % r['status'])
|
||||
if r['status'] == 'active':
|
||||
current_app.logger.debug("Record status: %s" % r["status"])
|
||||
if r["status"] == "active":
|
||||
break
|
||||
time.sleep(1)
|
||||
return
|
||||
@ -55,15 +55,19 @@ def create_txt_record(host, value, account_number):
|
||||
if not zone_id:
|
||||
return
|
||||
|
||||
txt_record = {'name': host, 'type': 'TXT', 'content': value}
|
||||
txt_record = {"name": host, "type": "TXT", "content": value}
|
||||
|
||||
current_app.logger.debug("Creating TXT record {0} with value {1}".format(host, value))
|
||||
current_app.logger.debug(
|
||||
"Creating TXT record {0} with value {1}".format(host, value)
|
||||
)
|
||||
|
||||
try:
|
||||
r = cf.zones.dns_records.post(zone_id, data=txt_record)
|
||||
except Exception as e:
|
||||
current_app.logger.error('/zones.dns_records.post %s: %s' % (txt_record['name'], e))
|
||||
return zone_id, r['id']
|
||||
current_app.logger.error(
|
||||
"/zones.dns_records.post %s: %s" % (txt_record["name"], e)
|
||||
)
|
||||
return zone_id, r["id"]
|
||||
|
||||
|
||||
def delete_txt_record(change_ids, account_number, host, value):
|
||||
@ -74,4 +78,4 @@ def delete_txt_record(change_ids, account_number, host, value):
|
||||
try:
|
||||
cf.zones.dns_records.delete(zone_id, record_id)
|
||||
except Exception as e:
|
||||
current_app.logger.error('/zones.dns_records.post: %s' % e)
|
||||
current_app.logger.error("/zones.dns_records.post: %s" % e)
|
||||
|
@ -5,7 +5,12 @@ import dns.exception
|
||||
import dns.name
|
||||
import dns.query
|
||||
import dns.resolver
|
||||
from dyn.tm.errors import DynectCreateError, DynectDeleteError, DynectGetError, DynectUpdateError
|
||||
from dyn.tm.errors import (
|
||||
DynectCreateError,
|
||||
DynectDeleteError,
|
||||
DynectGetError,
|
||||
DynectUpdateError,
|
||||
)
|
||||
from dyn.tm.session import DynectSession
|
||||
from dyn.tm.zones import Node, Zone, get_all_zones
|
||||
from flask import current_app
|
||||
@ -16,13 +21,13 @@ from lemur.extensions import metrics, sentry
|
||||
def get_dynect_session():
|
||||
try:
|
||||
dynect_session = DynectSession(
|
||||
current_app.config.get('ACME_DYN_CUSTOMER_NAME', ''),
|
||||
current_app.config.get('ACME_DYN_USERNAME', ''),
|
||||
current_app.config.get('ACME_DYN_PASSWORD', ''),
|
||||
current_app.config.get("ACME_DYN_CUSTOMER_NAME", ""),
|
||||
current_app.config.get("ACME_DYN_USERNAME", ""),
|
||||
current_app.config.get("ACME_DYN_PASSWORD", ""),
|
||||
)
|
||||
except Exception as e:
|
||||
sentry.captureException()
|
||||
metrics.send('get_dynect_session_fail', 'counter', 1)
|
||||
metrics.send("get_dynect_session_fail", "counter", 1)
|
||||
current_app.logger.debug("Unable to establish connection to Dyn", exc_info=True)
|
||||
raise
|
||||
return dynect_session
|
||||
@ -33,17 +38,17 @@ def _has_dns_propagated(name, token):
|
||||
try:
|
||||
dns_resolver = dns.resolver.Resolver()
|
||||
dns_resolver.nameservers = [get_authoritative_nameserver(name)]
|
||||
dns_response = dns_resolver.query(name, 'TXT')
|
||||
dns_response = dns_resolver.query(name, "TXT")
|
||||
for rdata in dns_response:
|
||||
for txt_record in rdata.strings:
|
||||
txt_records.append(txt_record.decode("utf-8"))
|
||||
except dns.exception.DNSException:
|
||||
metrics.send('has_dns_propagated_fail', 'counter', 1)
|
||||
metrics.send("has_dns_propagated_fail", "counter", 1)
|
||||
return False
|
||||
|
||||
for txt_record in txt_records:
|
||||
if txt_record == token:
|
||||
metrics.send('has_dns_propagated_success', 'counter', 1)
|
||||
metrics.send("has_dns_propagated_success", "counter", 1)
|
||||
return True
|
||||
|
||||
return False
|
||||
@ -56,18 +61,19 @@ def wait_for_dns_change(change_id, account_number=None):
|
||||
status = _has_dns_propagated(fqdn, token)
|
||||
current_app.logger.debug("Record status for fqdn: {}: {}".format(fqdn, status))
|
||||
if status:
|
||||
metrics.send('wait_for_dns_change_success', 'counter', 1)
|
||||
metrics.send("wait_for_dns_change_success", "counter", 1)
|
||||
break
|
||||
time.sleep(10)
|
||||
if not status:
|
||||
# TODO: Delete associated DNS text record here
|
||||
metrics.send('wait_for_dns_change_fail', 'counter', 1)
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"fqdn": str(fqdn), "txt_record": str(token)}
|
||||
metrics.send("wait_for_dns_change_fail", "counter", 1)
|
||||
sentry.captureException(extra={"fqdn": str(fqdn), "txt_record": str(token)})
|
||||
metrics.send(
|
||||
"wait_for_dns_change_error",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"fqdn": fqdn, "txt_record": token},
|
||||
)
|
||||
metrics.send('wait_for_dns_change_error', 'counter', 1,
|
||||
metric_tags={'fqdn': fqdn, 'txt_record': token})
|
||||
return
|
||||
|
||||
|
||||
@ -84,7 +90,7 @@ def get_zone_name(domain):
|
||||
if z.name.count(".") > zone_name.count("."):
|
||||
zone_name = z.name
|
||||
if not zone_name:
|
||||
metrics.send('dyn_no_zone_name', 'counter', 1)
|
||||
metrics.send("dyn_no_zone_name", "counter", 1)
|
||||
raise Exception("No Dyn zone found for domain: {}".format(domain))
|
||||
return zone_name
|
||||
|
||||
@ -101,23 +107,28 @@ def get_zones(account_number):
|
||||
def create_txt_record(domain, token, account_number):
|
||||
get_dynect_session()
|
||||
zone_name = get_zone_name(domain)
|
||||
zone_parts = len(zone_name.split('.'))
|
||||
node_name = '.'.join(domain.split('.')[:-zone_parts])
|
||||
zone_parts = len(zone_name.split("."))
|
||||
node_name = ".".join(domain.split(".")[:-zone_parts])
|
||||
fqdn = "{0}.{1}".format(node_name, zone_name)
|
||||
zone = Zone(zone_name)
|
||||
|
||||
try:
|
||||
zone.add_record(node_name, record_type='TXT', txtdata="\"{}\"".format(token), ttl=5)
|
||||
zone.add_record(
|
||||
node_name, record_type="TXT", txtdata='"{}"'.format(token), ttl=5
|
||||
)
|
||||
zone.publish()
|
||||
current_app.logger.debug("TXT record created: {0}, token: {1}".format(fqdn, token))
|
||||
current_app.logger.debug(
|
||||
"TXT record created: {0}, token: {1}".format(fqdn, token)
|
||||
)
|
||||
except (DynectCreateError, DynectUpdateError) as e:
|
||||
if "Cannot duplicate existing record data" in e.message:
|
||||
current_app.logger.debug(
|
||||
"Unable to add record. Domain: {}. Token: {}. "
|
||||
"Record already exists: {}".format(domain, token, e), exc_info=True
|
||||
"Record already exists: {}".format(domain, token, e),
|
||||
exc_info=True,
|
||||
)
|
||||
else:
|
||||
metrics.send('create_txt_record_error', 'counter', 1)
|
||||
metrics.send("create_txt_record_error", "counter", 1)
|
||||
sentry.captureException()
|
||||
raise
|
||||
|
||||
@ -132,18 +143,17 @@ def delete_txt_record(change_id, account_number, domain, token):
|
||||
return
|
||||
|
||||
zone_name = get_zone_name(domain)
|
||||
zone_parts = len(zone_name.split('.'))
|
||||
node_name = '.'.join(domain.split('.')[:-zone_parts])
|
||||
zone_parts = len(zone_name.split("."))
|
||||
node_name = ".".join(domain.split(".")[:-zone_parts])
|
||||
fqdn = "{0}.{1}".format(node_name, zone_name)
|
||||
|
||||
zone = Zone(zone_name)
|
||||
node = Node(zone_name, fqdn)
|
||||
|
||||
try:
|
||||
all_txt_records = node.get_all_records_by_type('TXT')
|
||||
all_txt_records = node.get_all_records_by_type("TXT")
|
||||
except DynectGetError:
|
||||
sentry.captureException()
|
||||
metrics.send('delete_txt_record_geterror', 'counter', 1)
|
||||
metrics.send("delete_txt_record_geterror", "counter", 1)
|
||||
# No Text Records remain or host is not in the zone anymore because all records have been deleted.
|
||||
return
|
||||
for txt_record in all_txt_records:
|
||||
@ -154,22 +164,36 @@ def delete_txt_record(change_id, account_number, domain, token):
|
||||
except DynectDeleteError:
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name),
|
||||
"txt_record": str(txt_record.txtdata)}
|
||||
"fqdn": str(fqdn),
|
||||
"zone_name": str(zone_name),
|
||||
"node_name": str(node_name),
|
||||
"txt_record": str(txt_record.txtdata),
|
||||
}
|
||||
)
|
||||
metrics.send(
|
||||
"delete_txt_record_deleteerror",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"fqdn": fqdn, "txt_record": txt_record.txtdata},
|
||||
)
|
||||
metrics.send('delete_txt_record_deleteerror', 'counter', 1,
|
||||
metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata})
|
||||
|
||||
try:
|
||||
zone.publish()
|
||||
except DynectUpdateError:
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name),
|
||||
"txt_record": str(txt_record.txtdata)}
|
||||
"fqdn": str(fqdn),
|
||||
"zone_name": str(zone_name),
|
||||
"node_name": str(node_name),
|
||||
"txt_record": str(txt_record.txtdata),
|
||||
}
|
||||
)
|
||||
metrics.send(
|
||||
"delete_txt_record_publish_error",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"fqdn": str(fqdn), "txt_record": str(txt_record.txtdata)},
|
||||
)
|
||||
metrics.send('delete_txt_record_publish_error', 'counter', 1,
|
||||
metric_tags={'fqdn': str(fqdn), 'txt_record': str(txt_record.txtdata)})
|
||||
|
||||
|
||||
def delete_acme_txt_records(domain):
|
||||
@ -181,18 +205,21 @@ def delete_acme_txt_records(domain):
|
||||
if not domain.startswith(acme_challenge_string):
|
||||
current_app.logger.debug(
|
||||
"delete_acme_txt_records: Domain {} doesn't start with string {}. "
|
||||
"Cowardly refusing to delete TXT records".format(domain, acme_challenge_string))
|
||||
"Cowardly refusing to delete TXT records".format(
|
||||
domain, acme_challenge_string
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
zone_name = get_zone_name(domain)
|
||||
zone_parts = len(zone_name.split('.'))
|
||||
node_name = '.'.join(domain.split('.')[:-zone_parts])
|
||||
zone_parts = len(zone_name.split("."))
|
||||
node_name = ".".join(domain.split(".")[:-zone_parts])
|
||||
fqdn = "{0}.{1}".format(node_name, zone_name)
|
||||
|
||||
zone = Zone(zone_name)
|
||||
node = Node(zone_name, fqdn)
|
||||
|
||||
all_txt_records = node.get_all_records_by_type('TXT')
|
||||
all_txt_records = node.get_all_records_by_type("TXT")
|
||||
for txt_record in all_txt_records:
|
||||
current_app.logger.debug("Deleting TXT record name: {0}".format(fqdn))
|
||||
try:
|
||||
@ -200,16 +227,23 @@ def delete_acme_txt_records(domain):
|
||||
except DynectDeleteError:
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"fqdn": str(fqdn), "zone_name": str(zone_name), "node_name": str(node_name),
|
||||
"txt_record": str(txt_record.txtdata)}
|
||||
"fqdn": str(fqdn),
|
||||
"zone_name": str(zone_name),
|
||||
"node_name": str(node_name),
|
||||
"txt_record": str(txt_record.txtdata),
|
||||
}
|
||||
)
|
||||
metrics.send(
|
||||
"delete_txt_record_deleteerror",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"fqdn": fqdn, "txt_record": txt_record.txtdata},
|
||||
)
|
||||
metrics.send('delete_txt_record_deleteerror', 'counter', 1,
|
||||
metric_tags={'fqdn': fqdn, 'txt_record': txt_record.txtdata})
|
||||
zone.publish()
|
||||
|
||||
|
||||
def get_authoritative_nameserver(domain):
|
||||
if current_app.config.get('ACME_DYN_GET_AUTHORATATIVE_NAMESERVER'):
|
||||
if current_app.config.get("ACME_DYN_GET_AUTHORATATIVE_NAMESERVER"):
|
||||
n = dns.name.from_text(domain)
|
||||
|
||||
depth = 2
|
||||
@ -220,7 +254,7 @@ def get_authoritative_nameserver(domain):
|
||||
while not last:
|
||||
s = n.split(depth)
|
||||
|
||||
last = s[0].to_unicode() == u'@'
|
||||
last = s[0].to_unicode() == u"@"
|
||||
sub = s[1]
|
||||
|
||||
query = dns.message.make_query(sub, dns.rdatatype.NS)
|
||||
@ -228,11 +262,11 @@ def get_authoritative_nameserver(domain):
|
||||
|
||||
rcode = response.rcode()
|
||||
if rcode != dns.rcode.NOERROR:
|
||||
metrics.send('get_authoritative_nameserver_error', 'counter', 1)
|
||||
metrics.send("get_authoritative_nameserver_error", "counter", 1)
|
||||
if rcode == dns.rcode.NXDOMAIN:
|
||||
raise Exception('%s does not exist.' % sub)
|
||||
raise Exception("%s does not exist." % sub)
|
||||
else:
|
||||
raise Exception('Error %s' % dns.rcode.to_text(rcode))
|
||||
raise Exception("Error %s" % dns.rcode.to_text(rcode))
|
||||
|
||||
if len(response.authority) > 0:
|
||||
rrset = response.authority[0]
|
||||
|
@ -17,7 +17,7 @@ import time
|
||||
|
||||
import OpenSSL.crypto
|
||||
import josepy as jose
|
||||
from acme import challenges, messages
|
||||
from acme import challenges, errors, messages
|
||||
from acme.client import BackwardsCompatibleClientV2, ClientNetwork
|
||||
from acme.errors import PollError, TimeoutError, WildcardUnsupportedError
|
||||
from acme.messages import Error as AcmeError
|
||||
@ -48,7 +48,7 @@ class AcmeHandler(object):
|
||||
try:
|
||||
self.all_dns_providers = dns_provider_service.get_all_dns_providers()
|
||||
except Exception as e:
|
||||
metrics.send('AcmeHandler_init_error', 'counter', 1)
|
||||
metrics.send("AcmeHandler_init_error", "counter", 1)
|
||||
sentry.captureException()
|
||||
current_app.logger.error(f"Unable to fetch DNS Providers: {e}")
|
||||
self.all_dns_providers = []
|
||||
@ -67,45 +67,60 @@ class AcmeHandler(object):
|
||||
return host.replace("*.", "")
|
||||
|
||||
def maybe_add_extension(self, host, dns_provider_options):
|
||||
if dns_provider_options and dns_provider_options.get("acme_challenge_extension"):
|
||||
if dns_provider_options and dns_provider_options.get(
|
||||
"acme_challenge_extension"
|
||||
):
|
||||
host = host + dns_provider_options.get("acme_challenge_extension")
|
||||
return host
|
||||
|
||||
def start_dns_challenge(self, acme_client, account_number, host, dns_provider, order, dns_provider_options):
|
||||
def start_dns_challenge(
|
||||
self,
|
||||
acme_client,
|
||||
account_number,
|
||||
host,
|
||||
dns_provider,
|
||||
order,
|
||||
dns_provider_options,
|
||||
):
|
||||
current_app.logger.debug("Starting DNS challenge for {0}".format(host))
|
||||
|
||||
change_ids = []
|
||||
|
||||
host_to_validate = self.maybe_remove_wildcard(host)
|
||||
dns_challenges = self.find_dns_challenge(host_to_validate, order.authorizations)
|
||||
host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options)
|
||||
host_to_validate = self.maybe_add_extension(
|
||||
host_to_validate, dns_provider_options
|
||||
)
|
||||
|
||||
if not dns_challenges:
|
||||
sentry.captureException()
|
||||
metrics.send('start_dns_challenge_error_no_dns_challenges', 'counter', 1)
|
||||
metrics.send("start_dns_challenge_error_no_dns_challenges", "counter", 1)
|
||||
raise Exception("Unable to determine DNS challenges from authorizations")
|
||||
|
||||
for dns_challenge in dns_challenges:
|
||||
change_id = dns_provider.create_txt_record(
|
||||
dns_challenge.validation_domain_name(host_to_validate),
|
||||
dns_challenge.validation(acme_client.client.net.key),
|
||||
account_number
|
||||
account_number,
|
||||
)
|
||||
change_ids.append(change_id)
|
||||
|
||||
return AuthorizationRecord(
|
||||
host,
|
||||
order.authorizations,
|
||||
dns_challenges,
|
||||
change_ids
|
||||
host, order.authorizations, dns_challenges, change_ids
|
||||
)
|
||||
|
||||
def complete_dns_challenge(self, acme_client, authz_record):
|
||||
current_app.logger.debug("Finalizing DNS challenge for {0}".format(authz_record.authz[0].body.identifier.value))
|
||||
current_app.logger.debug(
|
||||
"Finalizing DNS challenge for {0}".format(
|
||||
authz_record.authz[0].body.identifier.value
|
||||
)
|
||||
)
|
||||
dns_providers = self.dns_providers_for_domain.get(authz_record.host)
|
||||
if not dns_providers:
|
||||
metrics.send('complete_dns_challenge_error_no_dnsproviders', 'counter', 1)
|
||||
raise Exception("No DNS providers found for domain: {}".format(authz_record.host))
|
||||
metrics.send("complete_dns_challenge_error_no_dnsproviders", "counter", 1)
|
||||
raise Exception(
|
||||
"No DNS providers found for domain: {}".format(authz_record.host)
|
||||
)
|
||||
|
||||
for dns_provider in dns_providers:
|
||||
# Grab account number (For Route53)
|
||||
@ -114,13 +129,17 @@ class AcmeHandler(object):
|
||||
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
||||
for change_id in authz_record.change_id:
|
||||
try:
|
||||
dns_provider_plugin.wait_for_dns_change(change_id, account_number=account_number)
|
||||
dns_provider_plugin.wait_for_dns_change(
|
||||
change_id, account_number=account_number
|
||||
)
|
||||
except Exception:
|
||||
metrics.send('complete_dns_challenge_error', 'counter', 1)
|
||||
metrics.send("complete_dns_challenge_error", "counter", 1)
|
||||
sentry.captureException()
|
||||
current_app.logger.debug(
|
||||
f"Unable to resolve DNS challenge for change_id: {change_id}, account_id: "
|
||||
f"{account_number}", exc_info=True)
|
||||
f"{account_number}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise
|
||||
|
||||
for dns_challenge in authz_record.dns_challenge:
|
||||
@ -129,11 +148,11 @@ class AcmeHandler(object):
|
||||
verified = response.simple_verify(
|
||||
dns_challenge.chall,
|
||||
authz_record.host,
|
||||
acme_client.client.net.key.public_key()
|
||||
acme_client.client.net.key.public_key(),
|
||||
)
|
||||
|
||||
if not verified:
|
||||
metrics.send('complete_dns_challenge_verification_error', 'counter', 1)
|
||||
metrics.send("complete_dns_challenge_verification_error", "counter", 1)
|
||||
raise ValueError("Failed verification")
|
||||
|
||||
time.sleep(5)
|
||||
@ -152,16 +171,30 @@ class AcmeHandler(object):
|
||||
|
||||
except (AcmeError, TimeoutError):
|
||||
sentry.captureException(extra={"order_url": str(order.uri)})
|
||||
metrics.send('request_certificate_error', 'counter', 1)
|
||||
current_app.logger.error(f"Unable to resolve Acme order: {order.uri}", exc_info=True)
|
||||
metrics.send("request_certificate_error", "counter", 1)
|
||||
current_app.logger.error(
|
||||
f"Unable to resolve Acme order: {order.uri}", exc_info=True
|
||||
)
|
||||
raise
|
||||
except errors.ValidationError:
|
||||
if order.fullchain_pem:
|
||||
orderr = order
|
||||
else:
|
||||
raise
|
||||
|
||||
pem_certificate = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_PEM,
|
||||
OpenSSL.crypto.load_certificate(OpenSSL.crypto.FILETYPE_PEM,
|
||||
orderr.fullchain_pem)).decode()
|
||||
pem_certificate_chain = orderr.fullchain_pem[len(pem_certificate):].lstrip()
|
||||
pem_certificate = OpenSSL.crypto.dump_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM,
|
||||
OpenSSL.crypto.load_certificate(
|
||||
OpenSSL.crypto.FILETYPE_PEM, orderr.fullchain_pem
|
||||
),
|
||||
).decode()
|
||||
pem_certificate_chain = orderr.fullchain_pem[
|
||||
len(pem_certificate) : # noqa
|
||||
].lstrip()
|
||||
|
||||
current_app.logger.debug("{0} {1}".format(type(pem_certificate), type(pem_certificate_chain)))
|
||||
current_app.logger.debug(
|
||||
"{0} {1}".format(type(pem_certificate), type(pem_certificate_chain))
|
||||
)
|
||||
return pem_certificate, pem_certificate_chain
|
||||
|
||||
def setup_acme_client(self, authority):
|
||||
@ -171,30 +204,40 @@ class AcmeHandler(object):
|
||||
|
||||
for option in json.loads(authority.options):
|
||||
options[option["name"]] = option.get("value")
|
||||
email = options.get('email', current_app.config.get('ACME_EMAIL'))
|
||||
tel = options.get('telephone', current_app.config.get('ACME_TEL'))
|
||||
directory_url = options.get('acme_url', current_app.config.get('ACME_DIRECTORY_URL'))
|
||||
email = options.get("email", current_app.config.get("ACME_EMAIL"))
|
||||
tel = options.get("telephone", current_app.config.get("ACME_TEL"))
|
||||
directory_url = options.get(
|
||||
"acme_url", current_app.config.get("ACME_DIRECTORY_URL")
|
||||
)
|
||||
|
||||
existing_key = options.get('acme_private_key', current_app.config.get('ACME_PRIVATE_KEY'))
|
||||
existing_regr = options.get('acme_regr', current_app.config.get('ACME_REGR'))
|
||||
existing_key = options.get(
|
||||
"acme_private_key", current_app.config.get("ACME_PRIVATE_KEY")
|
||||
)
|
||||
existing_regr = options.get("acme_regr", current_app.config.get("ACME_REGR"))
|
||||
|
||||
if existing_key and existing_regr:
|
||||
# Reuse the same account for each certificate issuance
|
||||
key = jose.JWK.json_loads(existing_key)
|
||||
regr = messages.RegistrationResource.json_loads(existing_regr)
|
||||
current_app.logger.debug("Connecting with directory at {0}".format(directory_url))
|
||||
current_app.logger.debug(
|
||||
"Connecting with directory at {0}".format(directory_url)
|
||||
)
|
||||
net = ClientNetwork(key, account=regr)
|
||||
client = BackwardsCompatibleClientV2(net, key, directory_url)
|
||||
return client, {}
|
||||
else:
|
||||
# Create an account for each certificate issuance
|
||||
key = jose.JWKRSA(key=generate_private_key('RSA2048'))
|
||||
key = jose.JWKRSA(key=generate_private_key("RSA2048"))
|
||||
|
||||
current_app.logger.debug("Connecting with directory at {0}".format(directory_url))
|
||||
current_app.logger.debug(
|
||||
"Connecting with directory at {0}".format(directory_url)
|
||||
)
|
||||
|
||||
net = ClientNetwork(key, account=None, timeout=3600)
|
||||
client = BackwardsCompatibleClientV2(net, key, directory_url)
|
||||
registration = client.new_account_and_tos(messages.NewRegistration.from_data(email=email))
|
||||
registration = client.new_account_and_tos(
|
||||
messages.NewRegistration.from_data(email=email)
|
||||
)
|
||||
current_app.logger.debug("Connected: {0}".format(registration.uri))
|
||||
|
||||
return client, registration
|
||||
@ -207,9 +250,9 @@ class AcmeHandler(object):
|
||||
"""
|
||||
current_app.logger.debug("Fetching domains")
|
||||
|
||||
domains = [options['common_name']]
|
||||
if options.get('extensions'):
|
||||
for name in options['extensions']['sub_alt_names']['names']:
|
||||
domains = [options["common_name"]]
|
||||
if options.get("extensions"):
|
||||
for name in options["extensions"]["sub_alt_names"]["names"]:
|
||||
domains.append(name)
|
||||
|
||||
current_app.logger.debug("Got these domains: {0}".format(domains))
|
||||
@ -220,16 +263,22 @@ class AcmeHandler(object):
|
||||
|
||||
for domain in order_info.domains:
|
||||
if not self.dns_providers_for_domain.get(domain):
|
||||
metrics.send('get_authorizations_no_dns_provider_for_domain', 'counter', 1)
|
||||
metrics.send(
|
||||
"get_authorizations_no_dns_provider_for_domain", "counter", 1
|
||||
)
|
||||
raise Exception("No DNS providers found for domain: {}".format(domain))
|
||||
for dns_provider in self.dns_providers_for_domain[domain]:
|
||||
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
||||
dns_provider_options = json.loads(dns_provider.credentials)
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
authz_record = self.start_dns_challenge(acme_client, account_number, domain,
|
||||
dns_provider_plugin,
|
||||
order,
|
||||
dns_provider.options)
|
||||
authz_record = self.start_dns_challenge(
|
||||
acme_client,
|
||||
account_number,
|
||||
domain,
|
||||
dns_provider_plugin,
|
||||
order,
|
||||
dns_provider.options,
|
||||
)
|
||||
authorizations.append(authz_record)
|
||||
return authorizations
|
||||
|
||||
@ -263,16 +312,20 @@ class AcmeHandler(object):
|
||||
dns_providers = self.dns_providers_for_domain.get(authz_record.host)
|
||||
for dns_provider in dns_providers:
|
||||
# Grab account number (For Route53)
|
||||
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
||||
dns_provider_plugin = self.get_dns_provider(
|
||||
dns_provider.provider_type
|
||||
)
|
||||
dns_provider_options = json.loads(dns_provider.credentials)
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
host_to_validate = self.maybe_remove_wildcard(authz_record.host)
|
||||
host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options)
|
||||
host_to_validate = self.maybe_add_extension(
|
||||
host_to_validate, dns_provider_options
|
||||
)
|
||||
dns_provider_plugin.delete_txt_record(
|
||||
authz_record.change_id,
|
||||
account_number,
|
||||
dns_challenge.validation_domain_name(host_to_validate),
|
||||
dns_challenge.validation(acme_client.client.net.key)
|
||||
dns_challenge.validation(acme_client.client.net.key),
|
||||
)
|
||||
|
||||
return authorizations
|
||||
@ -297,7 +350,9 @@ class AcmeHandler(object):
|
||||
account_number = dns_provider_options.get("account_id")
|
||||
dns_challenges = authz_record.dns_challenge
|
||||
host_to_validate = self.maybe_remove_wildcard(authz_record.host)
|
||||
host_to_validate = self.maybe_add_extension(host_to_validate, dns_provider_options)
|
||||
host_to_validate = self.maybe_add_extension(
|
||||
host_to_validate, dns_provider_options
|
||||
)
|
||||
dns_provider_plugin = self.get_dns_provider(dns_provider.provider_type)
|
||||
for dns_challenge in dns_challenges:
|
||||
try:
|
||||
@ -305,21 +360,17 @@ class AcmeHandler(object):
|
||||
authz_record.change_id,
|
||||
account_number,
|
||||
dns_challenge.validation_domain_name(host_to_validate),
|
||||
dns_challenge.validation(acme_client.client.net.key)
|
||||
dns_challenge.validation(acme_client.client.net.key),
|
||||
)
|
||||
except Exception as e:
|
||||
# If this fails, it's most likely because the record doesn't exist (It was already cleaned up)
|
||||
# or we're not authorized to modify it.
|
||||
metrics.send('cleanup_dns_challenges_error', 'counter', 1)
|
||||
metrics.send("cleanup_dns_challenges_error", "counter", 1)
|
||||
sentry.captureException()
|
||||
pass
|
||||
|
||||
def get_dns_provider(self, type):
|
||||
provider_types = {
|
||||
'cloudflare': cloudflare,
|
||||
'dyn': dyn,
|
||||
'route53': route53,
|
||||
}
|
||||
provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53}
|
||||
provider = provider_types.get(type)
|
||||
if not provider:
|
||||
raise UnknownProvider("No such DNS provider: {}".format(type))
|
||||
@ -327,41 +378,43 @@ class AcmeHandler(object):
|
||||
|
||||
|
||||
class ACMEIssuerPlugin(IssuerPlugin):
|
||||
title = 'Acme'
|
||||
slug = 'acme-issuer'
|
||||
description = 'Enables the creation of certificates via ACME CAs (including Let\'s Encrypt)'
|
||||
title = "Acme"
|
||||
slug = "acme-issuer"
|
||||
description = (
|
||||
"Enables the creation of certificates via ACME CAs (including Let's Encrypt)"
|
||||
)
|
||||
version = acme.VERSION
|
||||
|
||||
author = 'Netflix'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Netflix"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'acme_url',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/',
|
||||
'helpMessage': 'Must be a valid web url starting with http[s]://',
|
||||
"name": "acme_url",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "/^http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+$/",
|
||||
"helpMessage": "Must be a valid web url starting with http[s]://",
|
||||
},
|
||||
{
|
||||
'name': 'telephone',
|
||||
'type': 'str',
|
||||
'default': '',
|
||||
'helpMessage': 'Telephone to use'
|
||||
"name": "telephone",
|
||||
"type": "str",
|
||||
"default": "",
|
||||
"helpMessage": "Telephone to use",
|
||||
},
|
||||
{
|
||||
'name': 'email',
|
||||
'type': 'str',
|
||||
'default': '',
|
||||
'validation': '/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/',
|
||||
'helpMessage': 'Email to use'
|
||||
"name": "email",
|
||||
"type": "str",
|
||||
"default": "",
|
||||
"validation": "/^?([-a-zA-Z0-9.`?{}]+@\w+\.\w+)$/",
|
||||
"helpMessage": "Email to use",
|
||||
},
|
||||
{
|
||||
'name': 'certificate',
|
||||
'type': 'textarea',
|
||||
'default': '',
|
||||
'validation': '/^-----BEGIN CERTIFICATE-----/',
|
||||
'helpMessage': 'Certificate to use'
|
||||
"name": "certificate",
|
||||
"type": "textarea",
|
||||
"default": "",
|
||||
"validation": "/^-----BEGIN CERTIFICATE-----/",
|
||||
"helpMessage": "Certificate to use",
|
||||
},
|
||||
]
|
||||
|
||||
@ -371,11 +424,7 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||
def get_dns_provider(self, type):
|
||||
self.acme = AcmeHandler()
|
||||
|
||||
provider_types = {
|
||||
'cloudflare': cloudflare,
|
||||
'dyn': dyn,
|
||||
'route53': route53,
|
||||
}
|
||||
provider_types = {"cloudflare": cloudflare, "dyn": dyn, "route53": route53}
|
||||
provider = provider_types.get(type)
|
||||
if not provider:
|
||||
raise UnknownProvider("No such DNS provider: {}".format(type))
|
||||
@ -406,24 +455,31 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||
try:
|
||||
order = acme_client.new_order(pending_cert.csr)
|
||||
except WildcardUnsupportedError:
|
||||
metrics.send('get_ordered_certificate_wildcard_unsupported', 'counter', 1)
|
||||
raise Exception("The currently selected ACME CA endpoint does"
|
||||
" not support issuing wildcard certificates.")
|
||||
metrics.send("get_ordered_certificate_wildcard_unsupported", "counter", 1)
|
||||
raise Exception(
|
||||
"The currently selected ACME CA endpoint does"
|
||||
" not support issuing wildcard certificates."
|
||||
)
|
||||
try:
|
||||
authorizations = self.acme.get_authorizations(acme_client, order, order_info)
|
||||
authorizations = self.acme.get_authorizations(
|
||||
acme_client, order, order_info
|
||||
)
|
||||
except ClientError:
|
||||
sentry.captureException()
|
||||
metrics.send('get_ordered_certificate_error', 'counter', 1)
|
||||
current_app.logger.error(f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True)
|
||||
metrics.send("get_ordered_certificate_error", "counter", 1)
|
||||
current_app.logger.error(
|
||||
f"Unable to resolve pending cert: {pending_cert.name}", exc_info=True
|
||||
)
|
||||
return False
|
||||
|
||||
authorizations = self.acme.finalize_authorizations(acme_client, authorizations)
|
||||
pem_certificate, pem_certificate_chain = self.acme.request_certificate(
|
||||
acme_client, authorizations, order)
|
||||
acme_client, authorizations, order
|
||||
)
|
||||
cert = {
|
||||
'body': "\n".join(str(pem_certificate).splitlines()),
|
||||
'chain': "\n".join(str(pem_certificate_chain).splitlines()),
|
||||
'external_id': str(pending_cert.external_id)
|
||||
"body": "\n".join(str(pem_certificate).splitlines()),
|
||||
"chain": "\n".join(str(pem_certificate_chain).splitlines()),
|
||||
"external_id": str(pending_cert.external_id),
|
||||
}
|
||||
return cert
|
||||
|
||||
@ -433,10 +489,14 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||
certs = []
|
||||
for pending_cert in pending_certs:
|
||||
try:
|
||||
acme_client, registration = self.acme.setup_acme_client(pending_cert.authority)
|
||||
acme_client, registration = self.acme.setup_acme_client(
|
||||
pending_cert.authority
|
||||
)
|
||||
order_info = authorization_service.get(pending_cert.external_id)
|
||||
if pending_cert.dns_provider_id:
|
||||
dns_provider = dns_provider_service.get(pending_cert.dns_provider_id)
|
||||
dns_provider = dns_provider_service.get(
|
||||
pending_cert.dns_provider_id
|
||||
)
|
||||
|
||||
for domain in order_info.domains:
|
||||
# Currently, we only support specifying one DNS provider per certificate, even if that
|
||||
@ -450,70 +510,79 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||
order = acme_client.new_order(pending_cert.csr)
|
||||
except WildcardUnsupportedError:
|
||||
sentry.captureException()
|
||||
metrics.send('get_ordered_certificates_wildcard_unsupported_error', 'counter', 1)
|
||||
raise Exception("The currently selected ACME CA endpoint does"
|
||||
" not support issuing wildcard certificates.")
|
||||
metrics.send(
|
||||
"get_ordered_certificates_wildcard_unsupported_error",
|
||||
"counter",
|
||||
1,
|
||||
)
|
||||
raise Exception(
|
||||
"The currently selected ACME CA endpoint does"
|
||||
" not support issuing wildcard certificates."
|
||||
)
|
||||
|
||||
authorizations = self.acme.get_authorizations(acme_client, order, order_info)
|
||||
authorizations = self.acme.get_authorizations(
|
||||
acme_client, order, order_info
|
||||
)
|
||||
|
||||
pending.append({
|
||||
"acme_client": acme_client,
|
||||
"authorizations": authorizations,
|
||||
"pending_cert": pending_cert,
|
||||
"order": order,
|
||||
})
|
||||
pending.append(
|
||||
{
|
||||
"acme_client": acme_client,
|
||||
"authorizations": authorizations,
|
||||
"pending_cert": pending_cert,
|
||||
"order": order,
|
||||
}
|
||||
)
|
||||
except (ClientError, ValueError, Exception) as e:
|
||||
sentry.captureException()
|
||||
metrics.send('get_ordered_certificates_pending_creation_error', 'counter', 1)
|
||||
current_app.logger.error(f"Unable to resolve pending cert: {pending_cert}", exc_info=True)
|
||||
metrics.send(
|
||||
"get_ordered_certificates_pending_creation_error", "counter", 1
|
||||
)
|
||||
current_app.logger.error(
|
||||
f"Unable to resolve pending cert: {pending_cert}", exc_info=True
|
||||
)
|
||||
|
||||
error = e
|
||||
if globals().get("order") and order:
|
||||
error += f" Order uri: {order.uri}"
|
||||
certs.append({
|
||||
"cert": False,
|
||||
"pending_cert": pending_cert,
|
||||
"last_error": e,
|
||||
})
|
||||
certs.append(
|
||||
{"cert": False, "pending_cert": pending_cert, "last_error": e}
|
||||
)
|
||||
|
||||
for entry in pending:
|
||||
try:
|
||||
entry["authorizations"] = self.acme.finalize_authorizations(
|
||||
entry["acme_client"],
|
||||
entry["authorizations"],
|
||||
entry["acme_client"], entry["authorizations"]
|
||||
)
|
||||
pem_certificate, pem_certificate_chain = self.acme.request_certificate(
|
||||
entry["acme_client"],
|
||||
entry["authorizations"],
|
||||
entry["order"]
|
||||
entry["acme_client"], entry["authorizations"], entry["order"]
|
||||
)
|
||||
|
||||
cert = {
|
||||
'body': "\n".join(str(pem_certificate).splitlines()),
|
||||
'chain': "\n".join(str(pem_certificate_chain).splitlines()),
|
||||
'external_id': str(entry["pending_cert"].external_id)
|
||||
"body": "\n".join(str(pem_certificate).splitlines()),
|
||||
"chain": "\n".join(str(pem_certificate_chain).splitlines()),
|
||||
"external_id": str(entry["pending_cert"].external_id),
|
||||
}
|
||||
certs.append({
|
||||
"cert": cert,
|
||||
"pending_cert": entry["pending_cert"],
|
||||
})
|
||||
certs.append({"cert": cert, "pending_cert": entry["pending_cert"]})
|
||||
except (PollError, AcmeError, Exception) as e:
|
||||
sentry.captureException()
|
||||
metrics.send('get_ordered_certificates_resolution_error', 'counter', 1)
|
||||
metrics.send("get_ordered_certificates_resolution_error", "counter", 1)
|
||||
order_url = order.uri
|
||||
error = f"{e}. Order URI: {order_url}"
|
||||
current_app.logger.error(
|
||||
f"Unable to resolve pending cert: {pending_cert}. "
|
||||
f"Check out {order_url} for more information.", exc_info=True)
|
||||
certs.append({
|
||||
"cert": False,
|
||||
"pending_cert": entry["pending_cert"],
|
||||
"last_error": error,
|
||||
})
|
||||
f"Check out {order_url} for more information.",
|
||||
exc_info=True,
|
||||
)
|
||||
certs.append(
|
||||
{
|
||||
"cert": False,
|
||||
"pending_cert": entry["pending_cert"],
|
||||
"last_error": error,
|
||||
}
|
||||
)
|
||||
# Ensure DNS records get deleted
|
||||
self.acme.cleanup_dns_challenges(
|
||||
entry["acme_client"],
|
||||
entry["authorizations"],
|
||||
entry["acme_client"], entry["authorizations"]
|
||||
)
|
||||
return certs
|
||||
|
||||
@ -526,20 +595,26 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||
:return: :raise Exception:
|
||||
"""
|
||||
self.acme = AcmeHandler()
|
||||
authority = issuer_options.get('authority')
|
||||
create_immediately = issuer_options.get('create_immediately', False)
|
||||
authority = issuer_options.get("authority")
|
||||
create_immediately = issuer_options.get("create_immediately", False)
|
||||
acme_client, registration = self.acme.setup_acme_client(authority)
|
||||
dns_provider = issuer_options.get('dns_provider', {})
|
||||
dns_provider = issuer_options.get("dns_provider", {})
|
||||
|
||||
if dns_provider:
|
||||
dns_provider_options = dns_provider.options
|
||||
credentials = json.loads(dns_provider.credentials)
|
||||
current_app.logger.debug("Using DNS provider: {0}".format(dns_provider.provider_type))
|
||||
dns_provider_plugin = __import__(dns_provider.provider_type, globals(), locals(), [], 1)
|
||||
current_app.logger.debug(
|
||||
"Using DNS provider: {0}".format(dns_provider.provider_type)
|
||||
)
|
||||
dns_provider_plugin = __import__(
|
||||
dns_provider.provider_type, globals(), locals(), [], 1
|
||||
)
|
||||
account_number = credentials.get("account_id")
|
||||
provider_type = dns_provider.provider_type
|
||||
if provider_type == "route53" and not account_number:
|
||||
error = "Route53 DNS Provider {} does not have an account number configured.".format(dns_provider.name)
|
||||
error = "Route53 DNS Provider {} does not have an account number configured.".format(
|
||||
dns_provider.name
|
||||
)
|
||||
current_app.logger.error(error)
|
||||
raise InvalidConfiguration(error)
|
||||
else:
|
||||
@ -558,16 +633,29 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||
else:
|
||||
authz_domains.append(d.value)
|
||||
|
||||
dns_authorization = authorization_service.create(account_number, authz_domains,
|
||||
provider_type)
|
||||
dns_authorization = authorization_service.create(
|
||||
account_number, authz_domains, provider_type
|
||||
)
|
||||
# Return id of the DNS Authorization
|
||||
return None, None, dns_authorization.id
|
||||
|
||||
authorizations = self.acme.get_authorizations(acme_client, account_number, domains, dns_provider_plugin,
|
||||
dns_provider_options)
|
||||
self.acme.finalize_authorizations(acme_client, account_number, dns_provider_plugin, authorizations,
|
||||
dns_provider_options)
|
||||
pem_certificate, pem_certificate_chain = self.acme.request_certificate(acme_client, authorizations, csr)
|
||||
authorizations = self.acme.get_authorizations(
|
||||
acme_client,
|
||||
account_number,
|
||||
domains,
|
||||
dns_provider_plugin,
|
||||
dns_provider_options,
|
||||
)
|
||||
self.acme.finalize_authorizations(
|
||||
acme_client,
|
||||
account_number,
|
||||
dns_provider_plugin,
|
||||
authorizations,
|
||||
dns_provider_options,
|
||||
)
|
||||
pem_certificate, pem_certificate_chain = self.acme.request_certificate(
|
||||
acme_client, authorizations, csr
|
||||
)
|
||||
# TODO add external ID (if possible)
|
||||
return pem_certificate, pem_certificate_chain, None
|
||||
|
||||
@ -580,18 +668,18 @@ class ACMEIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return:
|
||||
"""
|
||||
role = {'username': '', 'password': '', 'name': 'acme'}
|
||||
plugin_options = options.get('plugin', {}).get('plugin_options')
|
||||
role = {"username": "", "password": "", "name": "acme"}
|
||||
plugin_options = options.get("plugin", {}).get("plugin_options")
|
||||
if not plugin_options:
|
||||
error = "Invalid options for lemur_acme plugin: {}".format(options)
|
||||
current_app.logger.error(error)
|
||||
raise InvalidConfiguration(error)
|
||||
# Define static acme_root based off configuration variable by default. However, if user has passed a
|
||||
# certificate, use this certificate as the root.
|
||||
acme_root = current_app.config.get('ACME_ROOT')
|
||||
acme_root = current_app.config.get("ACME_ROOT")
|
||||
for option in plugin_options:
|
||||
if option.get('name') == 'certificate':
|
||||
acme_root = option.get('value')
|
||||
if option.get("name") == "certificate":
|
||||
acme_root = option.get("value")
|
||||
return acme_root, "", [role]
|
||||
|
||||
def cancel_ordered_certificate(self, pending_cert, **kwargs):
|
||||
|
@ -3,7 +3,7 @@ import time
|
||||
from lemur.plugins.lemur_aws.sts import sts_client
|
||||
|
||||
|
||||
@sts_client('route53')
|
||||
@sts_client("route53")
|
||||
def wait_for_dns_change(change_id, client=None):
|
||||
_, change_id = change_id
|
||||
|
||||
@ -14,7 +14,7 @@ def wait_for_dns_change(change_id, client=None):
|
||||
time.sleep(5)
|
||||
|
||||
|
||||
@sts_client('route53')
|
||||
@sts_client("route53")
|
||||
def find_zone_id(domain, client=None):
|
||||
paginator = client.get_paginator("list_hosted_zones")
|
||||
zones = []
|
||||
@ -25,34 +25,35 @@ def find_zone_id(domain, client=None):
|
||||
zones.append((zone["Name"], zone["Id"]))
|
||||
|
||||
if not zones:
|
||||
raise ValueError(
|
||||
"Unable to find a Route53 hosted zone for {}".format(domain)
|
||||
)
|
||||
raise ValueError("Unable to find a Route53 hosted zone for {}".format(domain))
|
||||
return zones[0][1]
|
||||
|
||||
|
||||
@sts_client('route53')
|
||||
@sts_client("route53")
|
||||
def get_zones(client=None):
|
||||
paginator = client.get_paginator("list_hosted_zones")
|
||||
zones = []
|
||||
for page in paginator.paginate():
|
||||
for zone in page["HostedZones"]:
|
||||
zones.append(zone["Name"][:-1]) # We need [:-1] to strip out the trailing dot.
|
||||
zones.append(
|
||||
zone["Name"][:-1]
|
||||
) # We need [:-1] to strip out the trailing dot.
|
||||
return zones
|
||||
|
||||
|
||||
@sts_client('route53')
|
||||
@sts_client("route53")
|
||||
def change_txt_record(action, zone_id, domain, value, client=None):
|
||||
current_txt_records = []
|
||||
try:
|
||||
current_records = client.list_resource_record_sets(
|
||||
HostedZoneId=zone_id,
|
||||
StartRecordName=domain,
|
||||
StartRecordType='TXT',
|
||||
MaxItems="1")["ResourceRecordSets"]
|
||||
StartRecordType="TXT",
|
||||
MaxItems="1",
|
||||
)["ResourceRecordSets"]
|
||||
|
||||
for record in current_records:
|
||||
if record.get('Type') == 'TXT':
|
||||
if record.get("Type") == "TXT":
|
||||
current_txt_records.extend(record.get("ResourceRecords", []))
|
||||
except Exception as e:
|
||||
# Current Resource Record does not exist
|
||||
@ -72,7 +73,9 @@ def change_txt_record(action, zone_id, domain, value, client=None):
|
||||
# If we want to delete one record out of many, we'll update the record to not include the deleted value instead.
|
||||
# This allows us to support concurrent issuance.
|
||||
current_txt_records = [
|
||||
record for record in current_txt_records if not (record.get('Value') == '"{}"'.format(value))
|
||||
record
|
||||
for record in current_txt_records
|
||||
if not (record.get("Value") == '"{}"'.format(value))
|
||||
]
|
||||
action = "UPSERT"
|
||||
|
||||
@ -87,10 +90,10 @@ def change_txt_record(action, zone_id, domain, value, client=None):
|
||||
"Type": "TXT",
|
||||
"TTL": 300,
|
||||
"ResourceRecords": current_txt_records,
|
||||
}
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
)
|
||||
return response["ChangeInfo"]["Id"]
|
||||
|
||||
@ -98,11 +101,7 @@ def change_txt_record(action, zone_id, domain, value, client=None):
|
||||
def create_txt_record(host, value, account_number):
|
||||
zone_id = find_zone_id(host, account_number=account_number)
|
||||
change_id = change_txt_record(
|
||||
"UPSERT",
|
||||
zone_id,
|
||||
host,
|
||||
value,
|
||||
account_number=account_number
|
||||
"UPSERT", zone_id, host, value, account_number=account_number
|
||||
)
|
||||
|
||||
return zone_id, change_id
|
||||
@ -113,11 +112,7 @@ def delete_txt_record(change_ids, account_number, host, value):
|
||||
zone_id, _ = change_id
|
||||
try:
|
||||
change_txt_record(
|
||||
"DELETE",
|
||||
zone_id,
|
||||
host,
|
||||
value,
|
||||
account_number=account_number
|
||||
"DELETE", zone_id, host, value, account_number=account_number
|
||||
)
|
||||
except Exception as e:
|
||||
if "but it was not found" in e.response.get("Error", {}).get("Message"):
|
||||
|
@ -6,8 +6,7 @@ from lemur.plugins.lemur_acme import plugin
|
||||
|
||||
|
||||
class TestAcme(unittest.TestCase):
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.dns_provider_service')
|
||||
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
|
||||
def setUp(self, mock_dns_provider_service):
|
||||
self.ACMEIssuerPlugin = plugin.ACMEIssuerPlugin()
|
||||
self.acme = plugin.AcmeHandler()
|
||||
@ -15,14 +14,17 @@ class TestAcme(unittest.TestCase):
|
||||
mock_dns_provider.name = "cloudflare"
|
||||
mock_dns_provider.credentials = "{}"
|
||||
mock_dns_provider.provider_type = "cloudflare"
|
||||
self.acme.dns_providers_for_domain = {"www.test.com": [mock_dns_provider],
|
||||
"test.fakedomain.net": [mock_dns_provider]}
|
||||
self.acme.dns_providers_for_domain = {
|
||||
"www.test.com": [mock_dns_provider],
|
||||
"test.fakedomain.net": [mock_dns_provider],
|
||||
}
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.len', return_value=1)
|
||||
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
|
||||
def test_find_dns_challenge(self, mock_len):
|
||||
assert mock_len
|
||||
|
||||
from acme import challenges
|
||||
|
||||
c = challenges.DNS01()
|
||||
|
||||
mock_authz = Mock()
|
||||
@ -37,11 +39,13 @@ class TestAcme(unittest.TestCase):
|
||||
a = plugin.AuthorizationRecord("host", "authz", "challenge", "id")
|
||||
self.assertEqual(type(a), plugin.AuthorizationRecord)
|
||||
|
||||
@patch('acme.client.Client')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.len', return_value=1)
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge')
|
||||
def test_start_dns_challenge(self, mock_find_dns_challenge, mock_len, mock_app, mock_acme):
|
||||
@patch("acme.client.Client")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.len", return_value=1)
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge")
|
||||
def test_start_dns_challenge(
|
||||
self, mock_find_dns_challenge, mock_len, mock_app, mock_acme
|
||||
):
|
||||
assert mock_len
|
||||
mock_order = Mock()
|
||||
mock_app.logger.debug = Mock()
|
||||
@ -49,6 +53,7 @@ class TestAcme(unittest.TestCase):
|
||||
mock_authz.body.resolved_combinations = []
|
||||
mock_entry = MagicMock()
|
||||
from acme import challenges
|
||||
|
||||
c = challenges.DNS01()
|
||||
mock_entry.chall = TestAcme.test_complete_dns_challenge_fail
|
||||
mock_authz.body.resolved_combinations.append(mock_entry)
|
||||
@ -60,13 +65,17 @@ class TestAcme(unittest.TestCase):
|
||||
iterable = mock_find_dns_challenge.return_value
|
||||
iterator = iter(values)
|
||||
iterable.__iter__.return_value = iterator
|
||||
result = self.acme.start_dns_challenge(mock_acme, "accountid", "host", mock_dns_provider, mock_order, {})
|
||||
result = self.acme.start_dns_challenge(
|
||||
mock_acme, "accountid", "host", mock_dns_provider, mock_order, {}
|
||||
)
|
||||
self.assertEqual(type(result), plugin.AuthorizationRecord)
|
||||
|
||||
@patch('acme.client.Client')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change')
|
||||
def test_complete_dns_challenge_success(self, mock_wait_for_dns_change, mock_current_app, mock_acme):
|
||||
@patch("acme.client.Client")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
|
||||
def test_complete_dns_challenge_success(
|
||||
self, mock_wait_for_dns_change, mock_current_app, mock_acme
|
||||
):
|
||||
mock_dns_provider = Mock()
|
||||
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
|
||||
mock_authz = Mock()
|
||||
@ -84,10 +93,12 @@ class TestAcme(unittest.TestCase):
|
||||
mock_authz.dns_challenge.append(dns_challenge)
|
||||
self.acme.complete_dns_challenge(mock_acme, mock_authz)
|
||||
|
||||
@patch('acme.client.Client')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change')
|
||||
def test_complete_dns_challenge_fail(self, mock_wait_for_dns_change, mock_current_app, mock_acme):
|
||||
@patch("acme.client.Client")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.cloudflare.wait_for_dns_change")
|
||||
def test_complete_dns_challenge_fail(
|
||||
self, mock_wait_for_dns_change, mock_current_app, mock_acme
|
||||
):
|
||||
mock_dns_provider = Mock()
|
||||
mock_dns_provider.wait_for_dns_change = Mock(return_value=True)
|
||||
|
||||
@ -105,16 +116,22 @@ class TestAcme(unittest.TestCase):
|
||||
dns_challenge = Mock()
|
||||
mock_authz.dns_challenge.append(dns_challenge)
|
||||
self.assertRaises(
|
||||
ValueError,
|
||||
self.acme.complete_dns_challenge(mock_acme, mock_authz)
|
||||
ValueError, self.acme.complete_dns_challenge(mock_acme, mock_authz)
|
||||
)
|
||||
|
||||
@patch('acme.client.Client')
|
||||
@patch('OpenSSL.crypto', return_value="mock_cert")
|
||||
@patch('josepy.util.ComparableX509')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
def test_request_certificate(self, mock_current_app, mock_find_dns_challenge, mock_jose, mock_crypto, mock_acme):
|
||||
@patch("acme.client.Client")
|
||||
@patch("OpenSSL.crypto", return_value="mock_cert")
|
||||
@patch("josepy.util.ComparableX509")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.find_dns_challenge")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
def test_request_certificate(
|
||||
self,
|
||||
mock_current_app,
|
||||
mock_find_dns_challenge,
|
||||
mock_jose,
|
||||
mock_crypto,
|
||||
mock_acme,
|
||||
):
|
||||
mock_cert_response = Mock()
|
||||
mock_cert_response.body = "123"
|
||||
mock_cert_response_full = [mock_cert_response, True]
|
||||
@ -124,7 +141,7 @@ class TestAcme(unittest.TestCase):
|
||||
mock_authz_record.authz = Mock()
|
||||
mock_authz.append(mock_authz_record)
|
||||
mock_acme.fetch_chain = Mock(return_value="mock_chain")
|
||||
mock_crypto.dump_certificate = Mock(return_value=b'chain')
|
||||
mock_crypto.dump_certificate = Mock(return_value=b"chain")
|
||||
mock_order = Mock()
|
||||
self.acme.request_certificate(mock_acme, [], mock_order)
|
||||
|
||||
@ -134,8 +151,8 @@ class TestAcme(unittest.TestCase):
|
||||
with self.assertRaises(Exception):
|
||||
self.acme.setup_acme_client(mock_authority)
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.BackwardsCompatibleClientV2')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch("lemur.plugins.lemur_acme.plugin.BackwardsCompatibleClientV2")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
def test_setup_acme_client_success(self, mock_current_app, mock_acme):
|
||||
mock_authority = Mock()
|
||||
mock_authority.options = '[{"name": "mock_name", "value": "mock_value"}]'
|
||||
@ -150,31 +167,29 @@ class TestAcme(unittest.TestCase):
|
||||
assert result_client
|
||||
assert result_registration
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
def test_get_domains_single(self, mock_current_app):
|
||||
options = {
|
||||
"common_name": "test.netflix.net"
|
||||
}
|
||||
options = {"common_name": "test.netflix.net"}
|
||||
result = self.acme.get_domains(options)
|
||||
self.assertEqual(result, [options["common_name"]])
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
def test_get_domains_multiple(self, mock_current_app):
|
||||
options = {
|
||||
"common_name": "test.netflix.net",
|
||||
"extensions": {
|
||||
"sub_alt_names": {
|
||||
"names": [
|
||||
"test2.netflix.net",
|
||||
"test3.netflix.net"
|
||||
]
|
||||
}
|
||||
}
|
||||
"sub_alt_names": {"names": ["test2.netflix.net", "test3.netflix.net"]}
|
||||
},
|
||||
}
|
||||
result = self.acme.get_domains(options)
|
||||
self.assertEqual(result, [options["common_name"], "test2.netflix.net", "test3.netflix.net"])
|
||||
self.assertEqual(
|
||||
result, [options["common_name"], "test2.netflix.net", "test3.netflix.net"]
|
||||
)
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.start_dns_challenge', return_value="test")
|
||||
@patch(
|
||||
"lemur.plugins.lemur_acme.plugin.AcmeHandler.start_dns_challenge",
|
||||
return_value="test",
|
||||
)
|
||||
def test_get_authorizations(self, mock_start_dns_challenge):
|
||||
mock_order = Mock()
|
||||
mock_order.body.identifiers = []
|
||||
@ -183,10 +198,15 @@ class TestAcme(unittest.TestCase):
|
||||
mock_order_info = Mock()
|
||||
mock_order_info.account_number = 1
|
||||
mock_order_info.domains = ["test.fakedomain.net"]
|
||||
result = self.acme.get_authorizations("acme_client", mock_order, mock_order_info)
|
||||
result = self.acme.get_authorizations(
|
||||
"acme_client", mock_order, mock_order_info
|
||||
)
|
||||
self.assertEqual(result, ["test"])
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.complete_dns_challenge', return_value="test")
|
||||
@patch(
|
||||
"lemur.plugins.lemur_acme.plugin.AcmeHandler.complete_dns_challenge",
|
||||
return_value="test",
|
||||
)
|
||||
def test_finalize_authorizations(self, mock_complete_dns_challenge):
|
||||
mock_authz = []
|
||||
mock_authz_record = MagicMock()
|
||||
@ -202,28 +222,28 @@ class TestAcme(unittest.TestCase):
|
||||
result = self.acme.finalize_authorizations(mock_acme_client, mock_authz)
|
||||
self.assertEqual(result, mock_authz)
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
def test_create_authority(self, mock_current_app):
|
||||
mock_current_app.config = Mock()
|
||||
options = {
|
||||
"plugin": {
|
||||
"plugin_options": [{
|
||||
"name": "certificate",
|
||||
"value": "123"
|
||||
}]
|
||||
}
|
||||
"plugin": {"plugin_options": [{"name": "certificate", "value": "123"}]}
|
||||
}
|
||||
acme_root, b, role = self.ACMEIssuerPlugin.create_authority(options)
|
||||
self.assertEqual(acme_root, "123")
|
||||
self.assertEqual(b, "")
|
||||
self.assertEqual(role, [{'username': '', 'password': '', 'name': 'acme'}])
|
||||
self.assertEqual(role, [{"username": "", "password": "", "name": "acme"}])
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.dyn.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.cloudflare.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.dns_provider_service')
|
||||
def test_get_dns_provider(self, mock_dns_provider_service, mock_current_app_cloudflare, mock_current_app_dyn,
|
||||
mock_current_app):
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.dyn.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.cloudflare.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
|
||||
def test_get_dns_provider(
|
||||
self,
|
||||
mock_dns_provider_service,
|
||||
mock_current_app_cloudflare,
|
||||
mock_current_app_dyn,
|
||||
mock_current_app,
|
||||
):
|
||||
provider = plugin.ACMEIssuerPlugin()
|
||||
route53 = provider.get_dns_provider("route53")
|
||||
assert route53
|
||||
@ -232,16 +252,23 @@ class TestAcme(unittest.TestCase):
|
||||
dyn = provider.get_dns_provider("dyn")
|
||||
assert dyn
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.authorization_service')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.dns_provider_service')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate')
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.authorization_service")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
|
||||
def test_get_ordered_certificate(
|
||||
self, mock_request_certificate, mock_finalize_authorizations, mock_get_authorizations,
|
||||
mock_dns_provider_service, mock_authorization_service, mock_current_app, mock_acme):
|
||||
self,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_dns_provider_service,
|
||||
mock_authorization_service,
|
||||
mock_current_app,
|
||||
mock_acme,
|
||||
):
|
||||
mock_client = Mock()
|
||||
mock_acme.return_value = (mock_client, "")
|
||||
mock_request_certificate.return_value = ("pem_certificate", "chain")
|
||||
@ -253,24 +280,26 @@ class TestAcme(unittest.TestCase):
|
||||
provider.get_dns_provider = Mock()
|
||||
result = provider.get_ordered_certificate(mock_cert)
|
||||
self.assertEqual(
|
||||
result,
|
||||
{
|
||||
'body': "pem_certificate",
|
||||
'chain': "chain",
|
||||
'external_id': "1"
|
||||
}
|
||||
result, {"body": "pem_certificate", "chain": "chain", "external_id": "1"}
|
||||
)
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.authorization_service')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.dns_provider_service')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate')
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.authorization_service")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
|
||||
def test_get_ordered_certificates(
|
||||
self, mock_request_certificate, mock_finalize_authorizations, mock_get_authorizations,
|
||||
mock_dns_provider_service, mock_authorization_service, mock_current_app, mock_acme):
|
||||
self,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_dns_provider_service,
|
||||
mock_authorization_service,
|
||||
mock_current_app,
|
||||
mock_acme,
|
||||
):
|
||||
mock_client = Mock()
|
||||
mock_acme.return_value = (mock_client, "")
|
||||
mock_request_certificate.return_value = ("pem_certificate", "chain")
|
||||
@ -285,19 +314,32 @@ class TestAcme(unittest.TestCase):
|
||||
provider.get_dns_provider = Mock()
|
||||
result = provider.get_ordered_certificates([mock_cert, mock_cert2])
|
||||
self.assertEqual(len(result), 2)
|
||||
self.assertEqual(result[0]['cert'], {'body': 'pem_certificate', 'chain': 'chain', 'external_id': '1'})
|
||||
self.assertEqual(result[1]['cert'], {'body': 'pem_certificate', 'chain': 'chain', 'external_id': '2'})
|
||||
self.assertEqual(
|
||||
result[0]["cert"],
|
||||
{"body": "pem_certificate", "chain": "chain", "external_id": "1"},
|
||||
)
|
||||
self.assertEqual(
|
||||
result[1]["cert"],
|
||||
{"body": "pem_certificate", "chain": "chain", "external_id": "2"},
|
||||
)
|
||||
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.dns_provider_service')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.current_app')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate')
|
||||
@patch('lemur.plugins.lemur_acme.plugin.authorization_service')
|
||||
def test_create_certificate(self, mock_authorization_service, mock_request_certificate,
|
||||
mock_finalize_authorizations, mock_get_authorizations,
|
||||
mock_current_app, mock_dns_provider_service, mock_acme):
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.setup_acme_client")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.dns_provider_service")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.current_app")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.get_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.finalize_authorizations")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.AcmeHandler.request_certificate")
|
||||
@patch("lemur.plugins.lemur_acme.plugin.authorization_service")
|
||||
def test_create_certificate(
|
||||
self,
|
||||
mock_authorization_service,
|
||||
mock_request_certificate,
|
||||
mock_finalize_authorizations,
|
||||
mock_get_authorizations,
|
||||
mock_current_app,
|
||||
mock_dns_provider_service,
|
||||
mock_acme,
|
||||
):
|
||||
provider = plugin.ACMEIssuerPlugin()
|
||||
mock_authority = Mock()
|
||||
|
||||
@ -310,9 +352,9 @@ class TestAcme(unittest.TestCase):
|
||||
mock_dns_provider_service.get.return_value = mock_dns_provider
|
||||
|
||||
issuer_options = {
|
||||
'authority': mock_authority,
|
||||
'dns_provider': mock_dns_provider,
|
||||
"common_name": "test.netflix.net"
|
||||
"authority": mock_authority,
|
||||
"dns_provider": mock_dns_provider,
|
||||
"common_name": "test.netflix.net",
|
||||
}
|
||||
csr = "123"
|
||||
mock_request_certificate.return_value = ("pem_certificate", "chain")
|
||||
|
@ -1,6 +1,5 @@
|
||||
"""Set the version information."""
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -7,13 +7,13 @@ from flask import current_app
|
||||
|
||||
|
||||
class ADCSIssuerPlugin(IssuerPlugin):
|
||||
title = 'ADCS'
|
||||
slug = 'adcs-issuer'
|
||||
description = 'Enables the creation of certificates by ADCS (Active Directory Certificate Services)'
|
||||
title = "ADCS"
|
||||
slug = "adcs-issuer"
|
||||
description = "Enables the creation of certificates by ADCS (Active Directory Certificate Services)"
|
||||
version = ADCS.VERSION
|
||||
|
||||
author = 'sirferl'
|
||||
author_url = 'https://github.com/sirferl/lemur'
|
||||
author = "sirferl"
|
||||
author_url = "https://github.com/sirferl/lemur"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize the issuer with the appropriate details."""
|
||||
@ -30,66 +30,80 @@ class ADCSIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return:
|
||||
"""
|
||||
adcs_root = current_app.config.get('ADCS_ROOT')
|
||||
adcs_issuing = current_app.config.get('ADCS_ISSUING')
|
||||
role = {'username': '', 'password': '', 'name': 'adcs'}
|
||||
adcs_root = current_app.config.get("ADCS_ROOT")
|
||||
adcs_issuing = current_app.config.get("ADCS_ISSUING")
|
||||
role = {"username": "", "password": "", "name": "adcs"}
|
||||
return adcs_root, adcs_issuing, [role]
|
||||
|
||||
def create_certificate(self, csr, issuer_options):
|
||||
adcs_server = current_app.config.get('ADCS_SERVER')
|
||||
adcs_user = current_app.config.get('ADCS_USER')
|
||||
adcs_pwd = current_app.config.get('ADCS_PWD')
|
||||
adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD')
|
||||
adcs_template = current_app.config.get('ADCS_TEMPLATE')
|
||||
ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method)
|
||||
adcs_server = current_app.config.get("ADCS_SERVER")
|
||||
adcs_user = current_app.config.get("ADCS_USER")
|
||||
adcs_pwd = current_app.config.get("ADCS_PWD")
|
||||
adcs_auth_method = current_app.config.get("ADCS_AUTH_METHOD")
|
||||
adcs_template = current_app.config.get("ADCS_TEMPLATE")
|
||||
ca_server = Certsrv(
|
||||
adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method
|
||||
)
|
||||
current_app.logger.info("Requesting CSR: {0}".format(csr))
|
||||
current_app.logger.info("Issuer options: {0}".format(issuer_options))
|
||||
cert, req_id = ca_server.get_cert(csr, adcs_template, encoding='b64').decode('utf-8').replace('\r\n', '\n')
|
||||
chain = ca_server.get_ca_cert(encoding='b64').decode('utf-8').replace('\r\n', '\n')
|
||||
cert, req_id = (
|
||||
ca_server.get_cert(csr, adcs_template, encoding="b64")
|
||||
.decode("utf-8")
|
||||
.replace("\r\n", "\n")
|
||||
)
|
||||
chain = (
|
||||
ca_server.get_ca_cert(encoding="b64").decode("utf-8").replace("\r\n", "\n")
|
||||
)
|
||||
return cert, chain, req_id
|
||||
|
||||
def revoke_certificate(self, certificate, comments):
|
||||
raise NotImplementedError('Not implemented\n', self, certificate, comments)
|
||||
raise NotImplementedError("Not implemented\n", self, certificate, comments)
|
||||
|
||||
def get_ordered_certificate(self, order_id):
|
||||
raise NotImplementedError('Not implemented\n', self, order_id)
|
||||
raise NotImplementedError("Not implemented\n", self, order_id)
|
||||
|
||||
def canceled_ordered_certificate(self, pending_cert, **kwargs):
|
||||
raise NotImplementedError('Not implemented\n', self, pending_cert, **kwargs)
|
||||
raise NotImplementedError("Not implemented\n", self, pending_cert, **kwargs)
|
||||
|
||||
|
||||
class ADCSSourcePlugin(SourcePlugin):
|
||||
title = 'ADCS'
|
||||
slug = 'adcs-source'
|
||||
description = 'Enables the collecion of certificates'
|
||||
title = "ADCS"
|
||||
slug = "adcs-source"
|
||||
description = "Enables the collecion of certificates"
|
||||
version = ADCS.VERSION
|
||||
|
||||
author = 'sirferl'
|
||||
author_url = 'https://github.com/sirferl/lemur'
|
||||
author = "sirferl"
|
||||
author_url = "https://github.com/sirferl/lemur"
|
||||
options = [
|
||||
{
|
||||
'name': 'dummy',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': '/^[0-9]{12,12}$/',
|
||||
'helpMessage': 'Just to prevent error'
|
||||
"name": "dummy",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "/^[0-9]{12,12}$/",
|
||||
"helpMessage": "Just to prevent error",
|
||||
}
|
||||
]
|
||||
|
||||
def get_certificates(self, options, **kwargs):
|
||||
adcs_server = current_app.config.get('ADCS_SERVER')
|
||||
adcs_user = current_app.config.get('ADCS_USER')
|
||||
adcs_pwd = current_app.config.get('ADCS_PWD')
|
||||
adcs_auth_method = current_app.config.get('ADCS_AUTH_METHOD')
|
||||
adcs_start = current_app.config.get('ADCS_START')
|
||||
adcs_stop = current_app.config.get('ADCS_STOP')
|
||||
ca_server = Certsrv(adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method)
|
||||
adcs_server = current_app.config.get("ADCS_SERVER")
|
||||
adcs_user = current_app.config.get("ADCS_USER")
|
||||
adcs_pwd = current_app.config.get("ADCS_PWD")
|
||||
adcs_auth_method = current_app.config.get("ADCS_AUTH_METHOD")
|
||||
adcs_start = current_app.config.get("ADCS_START")
|
||||
adcs_stop = current_app.config.get("ADCS_STOP")
|
||||
ca_server = Certsrv(
|
||||
adcs_server, adcs_user, adcs_pwd, auth_method=adcs_auth_method
|
||||
)
|
||||
out_certlist = []
|
||||
for id in range(adcs_start, adcs_stop):
|
||||
try:
|
||||
cert = ca_server.get_existing_cert(id, encoding='b64').decode('utf-8').replace('\r\n', '\n')
|
||||
cert = (
|
||||
ca_server.get_existing_cert(id, encoding="b64")
|
||||
.decode("utf-8")
|
||||
.replace("\r\n", "\n")
|
||||
)
|
||||
except Exception as err:
|
||||
if '{0}'.format(err).find("CERTSRV_E_PROPERTY_EMPTY"):
|
||||
if "{0}".format(err).find("CERTSRV_E_PROPERTY_EMPTY"):
|
||||
# this error indicates end of certificate list(?), so we stop
|
||||
break
|
||||
else:
|
||||
@ -101,16 +115,16 @@ class ADCSSourcePlugin(SourcePlugin):
|
||||
# loop through extensions to see if we find "TLS Web Server Authentication"
|
||||
for e_id in range(0, pubkey.get_extension_count() - 1):
|
||||
try:
|
||||
extension = '{0}'.format(pubkey.get_extension(e_id))
|
||||
extension = "{0}".format(pubkey.get_extension(e_id))
|
||||
except Exception:
|
||||
extensionn = ''
|
||||
extensionn = ""
|
||||
if extension.find("TLS Web Server Authentication") != -1:
|
||||
out_certlist.append({
|
||||
'name': format(pubkey.get_subject().CN),
|
||||
'body': cert})
|
||||
out_certlist.append(
|
||||
{"name": format(pubkey.get_subject().CN), "body": cert}
|
||||
)
|
||||
break
|
||||
return out_certlist
|
||||
|
||||
def get_endpoints(self, options, **kwargs):
|
||||
# There are no endpoints in the ADCS
|
||||
raise NotImplementedError('Not implemented\n', self, options, **kwargs)
|
||||
raise NotImplementedError("Not implemented\n", self, options, **kwargs)
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -26,44 +26,41 @@ def millis_since_epoch():
|
||||
|
||||
|
||||
class AtlasMetricPlugin(MetricPlugin):
|
||||
title = 'Atlas'
|
||||
slug = 'atlas-metric'
|
||||
description = 'Adds support for sending key metrics to Atlas'
|
||||
title = "Atlas"
|
||||
slug = "atlas-metric"
|
||||
description = "Adds support for sending key metrics to Atlas"
|
||||
version = atlas.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'sidecar_host',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'help_message': 'If no host is provided localhost is assumed',
|
||||
'default': 'localhost'
|
||||
"name": "sidecar_host",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"help_message": "If no host is provided localhost is assumed",
|
||||
"default": "localhost",
|
||||
},
|
||||
{
|
||||
'name': 'sidecar_port',
|
||||
'type': 'int',
|
||||
'required': False,
|
||||
'default': 8078
|
||||
}
|
||||
{"name": "sidecar_port", "type": "int", "required": False, "default": 8078},
|
||||
]
|
||||
|
||||
metric_data = {}
|
||||
sidecar_host = None
|
||||
sidecar_port = None
|
||||
|
||||
def submit(self, metric_name, metric_type, metric_value, metric_tags=None, options=None):
|
||||
def submit(
|
||||
self, metric_name, metric_type, metric_value, metric_tags=None, options=None
|
||||
):
|
||||
if not options:
|
||||
options = self.options
|
||||
|
||||
# TODO marshmallow schema?
|
||||
valid_types = ['COUNTER', 'GAUGE', 'TIMER']
|
||||
valid_types = ["COUNTER", "GAUGE", "TIMER"]
|
||||
if metric_type.upper() not in valid_types:
|
||||
raise Exception(
|
||||
"Invalid Metric Type for Atlas: '{metric}' choose from: {options}".format(
|
||||
metric=metric_type, options=','.join(valid_types)
|
||||
metric=metric_type, options=",".join(valid_types)
|
||||
)
|
||||
)
|
||||
|
||||
@ -73,31 +70,35 @@ class AtlasMetricPlugin(MetricPlugin):
|
||||
"Invalid Metric Tags for Atlas: Tags must be in dict format"
|
||||
)
|
||||
|
||||
if metric_value == "NaN" or isinstance(metric_value, int) or isinstance(metric_value, float):
|
||||
self.metric_data['value'] = metric_value
|
||||
if (
|
||||
metric_value == "NaN"
|
||||
or isinstance(metric_value, int)
|
||||
or isinstance(metric_value, float)
|
||||
):
|
||||
self.metric_data["value"] = metric_value
|
||||
else:
|
||||
raise Exception(
|
||||
"Invalid Metric Value for Atlas: Metric must be a number"
|
||||
)
|
||||
raise Exception("Invalid Metric Value for Atlas: Metric must be a number")
|
||||
|
||||
self.metric_data['type'] = metric_type.upper()
|
||||
self.metric_data['name'] = str(metric_name)
|
||||
self.metric_data['tags'] = metric_tags
|
||||
self.metric_data['timestamp'] = millis_since_epoch()
|
||||
self.metric_data["type"] = metric_type.upper()
|
||||
self.metric_data["name"] = str(metric_name)
|
||||
self.metric_data["tags"] = metric_tags
|
||||
self.metric_data["timestamp"] = millis_since_epoch()
|
||||
|
||||
self.sidecar_host = self.get_option('sidecar_host', options)
|
||||
self.sidecar_port = self.get_option('sidecar_port', options)
|
||||
self.sidecar_host = self.get_option("sidecar_host", options)
|
||||
self.sidecar_port = self.get_option("sidecar_port", options)
|
||||
|
||||
try:
|
||||
res = requests.post(
|
||||
'http://{host}:{port}/metrics'.format(
|
||||
host=self.sidecar_host,
|
||||
port=self.sidecar_port),
|
||||
data=json.dumps([self.metric_data])
|
||||
"http://{host}:{port}/metrics".format(
|
||||
host=self.sidecar_host, port=self.sidecar_port
|
||||
),
|
||||
data=json.dumps([self.metric_data]),
|
||||
)
|
||||
|
||||
if res.status_code != 200:
|
||||
current_app.logger.warning("Failed to publish altas metric. {0}".format(res.content))
|
||||
current_app.logger.warning(
|
||||
"Failed to publish altas metric. {0}".format(res.content)
|
||||
)
|
||||
|
||||
except ConnectionError:
|
||||
current_app.logger.warning(
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -8,16 +8,16 @@
|
||||
from lemur.plugins.lemur_aws.sts import sts_client
|
||||
|
||||
|
||||
@sts_client('ec2')
|
||||
@sts_client("ec2")
|
||||
def get_regions(**kwargs):
|
||||
regions = kwargs['client'].describe_regions()
|
||||
return [x['RegionName'] for x in regions['Regions']]
|
||||
regions = kwargs["client"].describe_regions()
|
||||
return [x["RegionName"] for x in regions["Regions"]]
|
||||
|
||||
|
||||
@sts_client('ec2')
|
||||
@sts_client("ec2")
|
||||
def get_all_instances(**kwargs):
|
||||
"""
|
||||
Fetches all instance objects for a given account and region.
|
||||
"""
|
||||
paginator = kwargs['client'].get_paginator('describe_instances')
|
||||
paginator = kwargs["client"].get_paginator("describe_instances")
|
||||
return paginator.paginate()
|
||||
|
@ -27,15 +27,14 @@ def retry_throttled(exception):
|
||||
raise exception
|
||||
except Exception as e:
|
||||
current_app.logger.error("ELB retry_throttled triggered", exc_info=True)
|
||||
metrics.send('elb_retry', 'counter', 1,
|
||||
metric_tags={"exception": e})
|
||||
metrics.send("elb_retry", "counter", 1, metric_tags={"exception": e})
|
||||
sentry.captureException()
|
||||
|
||||
if isinstance(exception, botocore.exceptions.ClientError):
|
||||
if exception.response['Error']['Code'] == 'LoadBalancerNotFound':
|
||||
if exception.response["Error"]["Code"] == "LoadBalancerNotFound":
|
||||
return False
|
||||
|
||||
if exception.response['Error']['Code'] == 'CertificateNotFound':
|
||||
if exception.response["Error"]["Code"] == "CertificateNotFound":
|
||||
return False
|
||||
return True
|
||||
|
||||
@ -56,7 +55,7 @@ def is_valid(listener_tuple):
|
||||
:param listener_tuple:
|
||||
"""
|
||||
lb_port, i_port, lb_protocol, arn = listener_tuple
|
||||
if lb_protocol.lower() in ['ssl', 'https']:
|
||||
if lb_protocol.lower() in ["ssl", "https"]:
|
||||
if not arn:
|
||||
raise InvalidListener
|
||||
|
||||
@ -75,14 +74,14 @@ def get_all_elbs(**kwargs):
|
||||
while True:
|
||||
response = get_elbs(**kwargs)
|
||||
|
||||
elbs += response['LoadBalancerDescriptions']
|
||||
elbs += response["LoadBalancerDescriptions"]
|
||||
|
||||
if not response.get('NextMarker'):
|
||||
if not response.get("NextMarker"):
|
||||
return elbs
|
||||
else:
|
||||
kwargs.update(dict(Marker=response['NextMarker']))
|
||||
kwargs.update(dict(Marker=response["NextMarker"]))
|
||||
except Exception as e: # noqa
|
||||
metrics.send('get_all_elbs_error', 'counter', 1)
|
||||
metrics.send("get_all_elbs_error", "counter", 1)
|
||||
sentry.captureException()
|
||||
raise
|
||||
|
||||
@ -99,19 +98,19 @@ def get_all_elbs_v2(**kwargs):
|
||||
try:
|
||||
while True:
|
||||
response = get_elbs_v2(**kwargs)
|
||||
elbs += response['LoadBalancers']
|
||||
elbs += response["LoadBalancers"]
|
||||
|
||||
if not response.get('NextMarker'):
|
||||
if not response.get("NextMarker"):
|
||||
return elbs
|
||||
else:
|
||||
kwargs.update(dict(Marker=response['NextMarker']))
|
||||
kwargs.update(dict(Marker=response["NextMarker"]))
|
||||
except Exception as e: # noqa
|
||||
metrics.send('get_all_elbs_v2_error', 'counter', 1)
|
||||
metrics.send("get_all_elbs_v2_error", "counter", 1)
|
||||
sentry.captureException()
|
||||
raise
|
||||
|
||||
|
||||
@sts_client('elbv2')
|
||||
@sts_client("elbv2")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs):
|
||||
"""
|
||||
@ -121,38 +120,51 @@ def get_listener_arn_from_endpoint(endpoint_name, endpoint_port, **kwargs):
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
client = kwargs.pop('client')
|
||||
client = kwargs.pop("client")
|
||||
elbs = client.describe_load_balancers(Names=[endpoint_name])
|
||||
for elb in elbs['LoadBalancers']:
|
||||
listeners = client.describe_listeners(LoadBalancerArn=elb['LoadBalancerArn'])
|
||||
for listener in listeners['Listeners']:
|
||||
if listener['Port'] == endpoint_port:
|
||||
return listener['ListenerArn']
|
||||
for elb in elbs["LoadBalancers"]:
|
||||
listeners = client.describe_listeners(
|
||||
LoadBalancerArn=elb["LoadBalancerArn"]
|
||||
)
|
||||
for listener in listeners["Listeners"]:
|
||||
if listener["Port"] == endpoint_port:
|
||||
return listener["ListenerArn"]
|
||||
except Exception as e: # noqa
|
||||
metrics.send('get_listener_arn_from_endpoint_error', 'counter', 1,
|
||||
metric_tags={"error": e, "endpoint_name": endpoint_name, "endpoint_port": endpoint_port})
|
||||
sentry.captureException(extra={"endpoint_name": str(endpoint_name),
|
||||
"endpoint_port": str(endpoint_port)})
|
||||
metrics.send(
|
||||
"get_listener_arn_from_endpoint_error",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"error": e,
|
||||
"endpoint_name": endpoint_name,
|
||||
"endpoint_port": endpoint_port,
|
||||
},
|
||||
)
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"endpoint_name": str(endpoint_name),
|
||||
"endpoint_port": str(endpoint_port),
|
||||
}
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
@sts_client('elb')
|
||||
@sts_client("elb")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def get_elbs(**kwargs):
|
||||
"""
|
||||
Fetches one page elb objects for a given account and region.
|
||||
"""
|
||||
try:
|
||||
client = kwargs.pop('client')
|
||||
client = kwargs.pop("client")
|
||||
return client.describe_load_balancers(**kwargs)
|
||||
except Exception as e: # noqa
|
||||
metrics.send('get_elbs_error', 'counter', 1,
|
||||
metric_tags={"error": e})
|
||||
metrics.send("get_elbs_error", "counter", 1, metric_tags={"error": e})
|
||||
sentry.captureException()
|
||||
raise
|
||||
|
||||
|
||||
@sts_client('elbv2')
|
||||
@sts_client("elbv2")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def get_elbs_v2(**kwargs):
|
||||
"""
|
||||
@ -162,16 +174,15 @@ def get_elbs_v2(**kwargs):
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
client = kwargs.pop('client')
|
||||
client = kwargs.pop("client")
|
||||
return client.describe_load_balancers(**kwargs)
|
||||
except Exception as e: # noqa
|
||||
metrics.send('get_elbs_v2_error', 'counter', 1,
|
||||
metric_tags={"error": e})
|
||||
metrics.send("get_elbs_v2_error", "counter", 1, metric_tags={"error": e})
|
||||
sentry.captureException()
|
||||
raise
|
||||
|
||||
|
||||
@sts_client('elbv2')
|
||||
@sts_client("elbv2")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def describe_listeners_v2(**kwargs):
|
||||
"""
|
||||
@ -181,16 +192,17 @@ def describe_listeners_v2(**kwargs):
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
client = kwargs.pop('client')
|
||||
client = kwargs.pop("client")
|
||||
return client.describe_listeners(**kwargs)
|
||||
except Exception as e: # noqa
|
||||
metrics.send('describe_listeners_v2_error', 'counter', 1,
|
||||
metric_tags={"error": e})
|
||||
metrics.send(
|
||||
"describe_listeners_v2_error", "counter", 1, metric_tags={"error": e}
|
||||
)
|
||||
sentry.captureException()
|
||||
raise
|
||||
|
||||
|
||||
@sts_client('elb')
|
||||
@sts_client("elb")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs):
|
||||
"""
|
||||
@ -201,17 +213,30 @@ def describe_load_balancer_policies(load_balancer_name, policy_names, **kwargs):
|
||||
"""
|
||||
|
||||
try:
|
||||
return kwargs['client'].describe_load_balancer_policies(LoadBalancerName=load_balancer_name,
|
||||
PolicyNames=policy_names)
|
||||
return kwargs["client"].describe_load_balancer_policies(
|
||||
LoadBalancerName=load_balancer_name, PolicyNames=policy_names
|
||||
)
|
||||
except Exception as e: # noqa
|
||||
metrics.send('describe_load_balancer_policies_error', 'counter', 1,
|
||||
metric_tags={"load_balancer_name": load_balancer_name, "policy_names": policy_names, "error": e})
|
||||
sentry.captureException(extra={"load_balancer_name": str(load_balancer_name),
|
||||
"policy_names": str(policy_names)})
|
||||
metrics.send(
|
||||
"describe_load_balancer_policies_error",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={
|
||||
"load_balancer_name": load_balancer_name,
|
||||
"policy_names": policy_names,
|
||||
"error": e,
|
||||
},
|
||||
)
|
||||
sentry.captureException(
|
||||
extra={
|
||||
"load_balancer_name": str(load_balancer_name),
|
||||
"policy_names": str(policy_names),
|
||||
}
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
@sts_client('elbv2')
|
||||
@sts_client("elbv2")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def describe_ssl_policies_v2(policy_names, **kwargs):
|
||||
"""
|
||||
@ -221,15 +246,19 @@ def describe_ssl_policies_v2(policy_names, **kwargs):
|
||||
:return:
|
||||
"""
|
||||
try:
|
||||
return kwargs['client'].describe_ssl_policies(Names=policy_names)
|
||||
return kwargs["client"].describe_ssl_policies(Names=policy_names)
|
||||
except Exception as e: # noqa
|
||||
metrics.send('describe_ssl_policies_v2_error', 'counter', 1,
|
||||
metric_tags={"policy_names": policy_names, "error": e})
|
||||
metrics.send(
|
||||
"describe_ssl_policies_v2_error",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"policy_names": policy_names, "error": e},
|
||||
)
|
||||
sentry.captureException(extra={"policy_names": str(policy_names)})
|
||||
raise
|
||||
|
||||
|
||||
@sts_client('elb')
|
||||
@sts_client("elb")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def describe_load_balancer_types(policies, **kwargs):
|
||||
"""
|
||||
@ -238,10 +267,12 @@ def describe_load_balancer_types(policies, **kwargs):
|
||||
:param policies:
|
||||
:return:
|
||||
"""
|
||||
return kwargs['client'].describe_load_balancer_policy_types(PolicyTypeNames=policies)
|
||||
return kwargs["client"].describe_load_balancer_policy_types(
|
||||
PolicyTypeNames=policies
|
||||
)
|
||||
|
||||
|
||||
@sts_client('elb')
|
||||
@sts_client("elb")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def attach_certificate(name, port, certificate_id, **kwargs):
|
||||
"""
|
||||
@ -253,15 +284,19 @@ def attach_certificate(name, port, certificate_id, **kwargs):
|
||||
:param certificate_id:
|
||||
"""
|
||||
try:
|
||||
return kwargs['client'].set_load_balancer_listener_ssl_certificate(LoadBalancerName=name, LoadBalancerPort=port, SSLCertificateId=certificate_id)
|
||||
return kwargs["client"].set_load_balancer_listener_ssl_certificate(
|
||||
LoadBalancerName=name,
|
||||
LoadBalancerPort=port,
|
||||
SSLCertificateId=certificate_id,
|
||||
)
|
||||
except botocore.exceptions.ClientError as e:
|
||||
if e.response['Error']['Code'] == 'LoadBalancerNotFound':
|
||||
if e.response["Error"]["Code"] == "LoadBalancerNotFound":
|
||||
current_app.logger.warning("Loadbalancer does not exist.")
|
||||
else:
|
||||
raise e
|
||||
|
||||
|
||||
@sts_client('elbv2')
|
||||
@sts_client("elbv2")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=20)
|
||||
def attach_certificate_v2(listener_arn, port, certificates, **kwargs):
|
||||
"""
|
||||
@ -273,9 +308,11 @@ def attach_certificate_v2(listener_arn, port, certificates, **kwargs):
|
||||
:param certificates:
|
||||
"""
|
||||
try:
|
||||
return kwargs['client'].modify_listener(ListenerArn=listener_arn, Port=port, Certificates=certificates)
|
||||
return kwargs["client"].modify_listener(
|
||||
ListenerArn=listener_arn, Port=port, Certificates=certificates
|
||||
)
|
||||
except botocore.exceptions.ClientError as e:
|
||||
if e.response['Error']['Code'] == 'LoadBalancerNotFound':
|
||||
if e.response["Error"]["Code"] == "LoadBalancerNotFound":
|
||||
current_app.logger.warning("Loadbalancer does not exist.")
|
||||
else:
|
||||
raise e
|
||||
|
@ -21,10 +21,10 @@ def retry_throttled(exception):
|
||||
:return:
|
||||
"""
|
||||
if isinstance(exception, botocore.exceptions.ClientError):
|
||||
if exception.response['Error']['Code'] == 'NoSuchEntity':
|
||||
if exception.response["Error"]["Code"] == "NoSuchEntity":
|
||||
return False
|
||||
|
||||
metrics.send('iam_retry', 'counter', 1)
|
||||
metrics.send("iam_retry", "counter", 1, metric_tags={"exception": str(exception)})
|
||||
return True
|
||||
|
||||
|
||||
@ -47,12 +47,12 @@ def create_arn_from_cert(account_number, region, certificate_name):
|
||||
:return:
|
||||
"""
|
||||
return "arn:aws:iam::{account_number}:server-certificate/{certificate_name}".format(
|
||||
account_number=account_number,
|
||||
certificate_name=certificate_name)
|
||||
account_number=account_number, certificate_name=certificate_name
|
||||
)
|
||||
|
||||
|
||||
@sts_client('iam')
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000)
|
||||
@sts_client("iam")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25)
|
||||
def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs):
|
||||
"""
|
||||
Upload a certificate to AWS
|
||||
@ -65,13 +65,14 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs):
|
||||
:return:
|
||||
"""
|
||||
assert isinstance(private_key, str)
|
||||
client = kwargs.pop('client')
|
||||
client = kwargs.pop("client")
|
||||
|
||||
if not path or path == '/':
|
||||
path = '/'
|
||||
if not path or path == "/":
|
||||
path = "/"
|
||||
else:
|
||||
name = name + '-' + path.strip('/')
|
||||
name = name + "-" + path.strip("/")
|
||||
|
||||
metrics.send("upload_cert", "counter", 1, metric_tags={"name": name, "path": path})
|
||||
try:
|
||||
if cert_chain:
|
||||
return client.upload_server_certificate(
|
||||
@ -79,22 +80,22 @@ def upload_cert(name, body, private_key, path, cert_chain=None, **kwargs):
|
||||
ServerCertificateName=name,
|
||||
CertificateBody=str(body),
|
||||
PrivateKey=str(private_key),
|
||||
CertificateChain=str(cert_chain)
|
||||
CertificateChain=str(cert_chain),
|
||||
)
|
||||
else:
|
||||
return client.upload_server_certificate(
|
||||
Path=path,
|
||||
ServerCertificateName=name,
|
||||
CertificateBody=str(body),
|
||||
PrivateKey=str(private_key)
|
||||
PrivateKey=str(private_key),
|
||||
)
|
||||
except botocore.exceptions.ClientError as e:
|
||||
if e.response['Error']['Code'] != 'EntityAlreadyExists':
|
||||
if e.response["Error"]["Code"] != "EntityAlreadyExists":
|
||||
raise e
|
||||
|
||||
|
||||
@sts_client('iam')
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000)
|
||||
@sts_client("iam")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25)
|
||||
def delete_cert(cert_name, **kwargs):
|
||||
"""
|
||||
Delete a certificate from AWS
|
||||
@ -102,37 +103,40 @@ def delete_cert(cert_name, **kwargs):
|
||||
:param cert_name:
|
||||
:return:
|
||||
"""
|
||||
client = kwargs.pop('client')
|
||||
client = kwargs.pop("client")
|
||||
metrics.send("delete_cert", "counter", 1, metric_tags={"cert_name": cert_name})
|
||||
try:
|
||||
client.delete_server_certificate(ServerCertificateName=cert_name)
|
||||
except botocore.exceptions.ClientError as e:
|
||||
if e.response['Error']['Code'] != 'NoSuchEntity':
|
||||
if e.response["Error"]["Code"] != "NoSuchEntity":
|
||||
raise e
|
||||
|
||||
|
||||
@sts_client('iam')
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000)
|
||||
@sts_client("iam")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25)
|
||||
def get_certificate(name, **kwargs):
|
||||
"""
|
||||
Retrieves an SSL certificate.
|
||||
|
||||
:return:
|
||||
"""
|
||||
client = kwargs.pop('client')
|
||||
return client.get_server_certificate(
|
||||
ServerCertificateName=name
|
||||
)['ServerCertificate']
|
||||
client = kwargs.pop("client")
|
||||
metrics.send("get_certificate", "counter", 1, metric_tags={"name": name})
|
||||
return client.get_server_certificate(ServerCertificateName=name)[
|
||||
"ServerCertificate"
|
||||
]
|
||||
|
||||
|
||||
@sts_client('iam')
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000)
|
||||
@sts_client("iam")
|
||||
@retry(retry_on_exception=retry_throttled, wait_fixed=2000, stop_max_attempt_number=25)
|
||||
def get_certificates(**kwargs):
|
||||
"""
|
||||
Fetches one page of certificate objects for a given account.
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
client = kwargs.pop('client')
|
||||
client = kwargs.pop("client")
|
||||
metrics.send("get_certificates", "counter", 1)
|
||||
return client.list_server_certificates(**kwargs)
|
||||
|
||||
|
||||
@ -141,16 +145,26 @@ def get_all_certificates(**kwargs):
|
||||
Use STS to fetch all of the SSL certificates from a given account
|
||||
"""
|
||||
certificates = []
|
||||
account_number = kwargs.get('account_number')
|
||||
account_number = kwargs.get("account_number")
|
||||
metrics.send(
|
||||
"get_all_certificates",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"account_number": account_number},
|
||||
)
|
||||
|
||||
while True:
|
||||
response = get_certificates(**kwargs)
|
||||
metadata = response['ServerCertificateMetadataList']
|
||||
metadata = response["ServerCertificateMetadataList"]
|
||||
|
||||
for m in metadata:
|
||||
certificates.append(get_certificate(m['ServerCertificateName'], account_number=account_number))
|
||||
certificates.append(
|
||||
get_certificate(
|
||||
m["ServerCertificateName"], account_number=account_number
|
||||
)
|
||||
)
|
||||
|
||||
if not response.get('Marker'):
|
||||
if not response.get("Marker"):
|
||||
return certificates
|
||||
else:
|
||||
kwargs.update(dict(Marker=response['Marker']))
|
||||
kwargs.update(dict(Marker=response["Marker"]))
|
||||
|
@ -40,7 +40,7 @@ from lemur.plugins.lemur_aws import iam, s3, elb, ec2
|
||||
|
||||
|
||||
def get_region_from_dns(dns):
|
||||
return dns.split('.')[-4]
|
||||
return dns.split(".")[-4]
|
||||
|
||||
|
||||
def format_elb_cipher_policy_v2(policy):
|
||||
@ -52,10 +52,10 @@ def format_elb_cipher_policy_v2(policy):
|
||||
ciphers = []
|
||||
name = None
|
||||
|
||||
for descr in policy['SslPolicies']:
|
||||
name = descr['Name']
|
||||
for cipher in descr['Ciphers']:
|
||||
ciphers.append(cipher['Name'])
|
||||
for descr in policy["SslPolicies"]:
|
||||
name = descr["Name"]
|
||||
for cipher in descr["Ciphers"]:
|
||||
ciphers.append(cipher["Name"])
|
||||
|
||||
return dict(name=name, ciphers=ciphers)
|
||||
|
||||
@ -68,14 +68,14 @@ def format_elb_cipher_policy(policy):
|
||||
"""
|
||||
ciphers = []
|
||||
name = None
|
||||
for descr in policy['PolicyDescriptions']:
|
||||
for attr in descr['PolicyAttributeDescriptions']:
|
||||
if attr['AttributeName'] == 'Reference-Security-Policy':
|
||||
name = attr['AttributeValue']
|
||||
for descr in policy["PolicyDescriptions"]:
|
||||
for attr in descr["PolicyAttributeDescriptions"]:
|
||||
if attr["AttributeName"] == "Reference-Security-Policy":
|
||||
name = attr["AttributeValue"]
|
||||
continue
|
||||
|
||||
if attr['AttributeValue'] == 'true':
|
||||
ciphers.append(attr['AttributeName'])
|
||||
if attr["AttributeValue"] == "true":
|
||||
ciphers.append(attr["AttributeName"])
|
||||
|
||||
return dict(name=name, ciphers=ciphers)
|
||||
|
||||
@ -89,25 +89,31 @@ def get_elb_endpoints(account_number, region, elb_dict):
|
||||
:return:
|
||||
"""
|
||||
endpoints = []
|
||||
for listener in elb_dict['ListenerDescriptions']:
|
||||
if not listener['Listener'].get('SSLCertificateId'):
|
||||
for listener in elb_dict["ListenerDescriptions"]:
|
||||
if not listener["Listener"].get("SSLCertificateId"):
|
||||
continue
|
||||
|
||||
if listener['Listener']['SSLCertificateId'] == 'Invalid-Certificate':
|
||||
if listener["Listener"]["SSLCertificateId"] == "Invalid-Certificate":
|
||||
continue
|
||||
|
||||
endpoint = dict(
|
||||
name=elb_dict['LoadBalancerName'],
|
||||
dnsname=elb_dict['DNSName'],
|
||||
type='elb',
|
||||
port=listener['Listener']['LoadBalancerPort'],
|
||||
certificate_name=iam.get_name_from_arn(listener['Listener']['SSLCertificateId'])
|
||||
name=elb_dict["LoadBalancerName"],
|
||||
dnsname=elb_dict["DNSName"],
|
||||
type="elb",
|
||||
port=listener["Listener"]["LoadBalancerPort"],
|
||||
certificate_name=iam.get_name_from_arn(
|
||||
listener["Listener"]["SSLCertificateId"]
|
||||
),
|
||||
)
|
||||
|
||||
if listener['PolicyNames']:
|
||||
policy = elb.describe_load_balancer_policies(elb_dict['LoadBalancerName'], listener['PolicyNames'],
|
||||
account_number=account_number, region=region)
|
||||
endpoint['policy'] = format_elb_cipher_policy(policy)
|
||||
if listener["PolicyNames"]:
|
||||
policy = elb.describe_load_balancer_policies(
|
||||
elb_dict["LoadBalancerName"],
|
||||
listener["PolicyNames"],
|
||||
account_number=account_number,
|
||||
region=region,
|
||||
)
|
||||
endpoint["policy"] = format_elb_cipher_policy(policy)
|
||||
|
||||
current_app.logger.debug("Found new endpoint. Endpoint: {}".format(endpoint))
|
||||
|
||||
@ -125,24 +131,29 @@ def get_elb_endpoints_v2(account_number, region, elb_dict):
|
||||
:return:
|
||||
"""
|
||||
endpoints = []
|
||||
listeners = elb.describe_listeners_v2(account_number=account_number, region=region,
|
||||
LoadBalancerArn=elb_dict['LoadBalancerArn'])
|
||||
for listener in listeners['Listeners']:
|
||||
if not listener.get('Certificates'):
|
||||
listeners = elb.describe_listeners_v2(
|
||||
account_number=account_number,
|
||||
region=region,
|
||||
LoadBalancerArn=elb_dict["LoadBalancerArn"],
|
||||
)
|
||||
for listener in listeners["Listeners"]:
|
||||
if not listener.get("Certificates"):
|
||||
continue
|
||||
|
||||
for certificate in listener['Certificates']:
|
||||
for certificate in listener["Certificates"]:
|
||||
endpoint = dict(
|
||||
name=elb_dict['LoadBalancerName'],
|
||||
dnsname=elb_dict['DNSName'],
|
||||
type='elbv2',
|
||||
port=listener['Port'],
|
||||
certificate_name=iam.get_name_from_arn(certificate['CertificateArn'])
|
||||
name=elb_dict["LoadBalancerName"],
|
||||
dnsname=elb_dict["DNSName"],
|
||||
type="elbv2",
|
||||
port=listener["Port"],
|
||||
certificate_name=iam.get_name_from_arn(certificate["CertificateArn"]),
|
||||
)
|
||||
|
||||
if listener['SslPolicy']:
|
||||
policy = elb.describe_ssl_policies_v2([listener['SslPolicy']], account_number=account_number, region=region)
|
||||
endpoint['policy'] = format_elb_cipher_policy_v2(policy)
|
||||
if listener["SslPolicy"]:
|
||||
policy = elb.describe_ssl_policies_v2(
|
||||
[listener["SslPolicy"]], account_number=account_number, region=region
|
||||
)
|
||||
endpoint["policy"] = format_elb_cipher_policy_v2(policy)
|
||||
|
||||
endpoints.append(endpoint)
|
||||
|
||||
@ -150,54 +161,70 @@ def get_elb_endpoints_v2(account_number, region, elb_dict):
|
||||
|
||||
|
||||
class AWSSourcePlugin(SourcePlugin):
|
||||
title = 'AWS'
|
||||
slug = 'aws-source'
|
||||
description = 'Discovers all SSL certificates and ELB endpoints in an AWS account'
|
||||
title = "AWS"
|
||||
slug = "aws-source"
|
||||
description = "Discovers all SSL certificates and ELB endpoints in an AWS account"
|
||||
version = aws.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'accountNumber',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '/^[0-9]{12,12}$/',
|
||||
'helpMessage': 'Must be a valid AWS account number!',
|
||||
"name": "accountNumber",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "/^[0-9]{12,12}$/",
|
||||
"helpMessage": "Must be a valid AWS account number!",
|
||||
},
|
||||
{
|
||||
'name': 'regions',
|
||||
'type': 'str',
|
||||
'helpMessage': 'Comma separated list of regions to search in, if no region is specified we look in all regions.'
|
||||
"name": "regions",
|
||||
"type": "str",
|
||||
"helpMessage": "Comma separated list of regions to search in, if no region is specified we look in all regions.",
|
||||
},
|
||||
]
|
||||
|
||||
def get_certificates(self, options, **kwargs):
|
||||
cert_data = iam.get_all_certificates(account_number=self.get_option('accountNumber', options))
|
||||
return [dict(body=c['CertificateBody'], chain=c.get('CertificateChain'),
|
||||
name=c['ServerCertificateMetadata']['ServerCertificateName']) for c in cert_data]
|
||||
cert_data = iam.get_all_certificates(
|
||||
account_number=self.get_option("accountNumber", options)
|
||||
)
|
||||
return [
|
||||
dict(
|
||||
body=c["CertificateBody"],
|
||||
chain=c.get("CertificateChain"),
|
||||
name=c["ServerCertificateMetadata"]["ServerCertificateName"],
|
||||
)
|
||||
for c in cert_data
|
||||
]
|
||||
|
||||
def get_endpoints(self, options, **kwargs):
|
||||
endpoints = []
|
||||
account_number = self.get_option('accountNumber', options)
|
||||
regions = self.get_option('regions', options)
|
||||
account_number = self.get_option("accountNumber", options)
|
||||
regions = self.get_option("regions", options)
|
||||
|
||||
if not regions:
|
||||
regions = ec2.get_regions(account_number=account_number)
|
||||
else:
|
||||
regions = regions.split(',')
|
||||
regions = regions.split(",")
|
||||
|
||||
for region in regions:
|
||||
elbs = elb.get_all_elbs(account_number=account_number, region=region)
|
||||
current_app.logger.info("Describing classic load balancers in {0}-{1}".format(account_number, region))
|
||||
current_app.logger.info(
|
||||
"Describing classic load balancers in {0}-{1}".format(
|
||||
account_number, region
|
||||
)
|
||||
)
|
||||
|
||||
for e in elbs:
|
||||
endpoints.extend(get_elb_endpoints(account_number, region, e))
|
||||
|
||||
# fetch advanced ELBs
|
||||
elbs_v2 = elb.get_all_elbs_v2(account_number=account_number, region=region)
|
||||
current_app.logger.info("Describing advanced load balancers in {0}-{1}".format(account_number, region))
|
||||
current_app.logger.info(
|
||||
"Describing advanced load balancers in {0}-{1}".format(
|
||||
account_number, region
|
||||
)
|
||||
)
|
||||
|
||||
for e in elbs_v2:
|
||||
endpoints.extend(get_elb_endpoints_v2(account_number, region, e))
|
||||
@ -206,106 +233,125 @@ class AWSSourcePlugin(SourcePlugin):
|
||||
|
||||
def update_endpoint(self, endpoint, certificate):
|
||||
options = endpoint.source.options
|
||||
account_number = self.get_option('accountNumber', options)
|
||||
account_number = self.get_option("accountNumber", options)
|
||||
|
||||
# relies on the fact that region is included in DNS name
|
||||
region = get_region_from_dns(endpoint.dnsname)
|
||||
arn = iam.create_arn_from_cert(account_number, region, certificate.name)
|
||||
|
||||
if endpoint.type == 'elbv2':
|
||||
listener_arn = elb.get_listener_arn_from_endpoint(endpoint.name, endpoint.port,
|
||||
account_number=account_number, region=region)
|
||||
elb.attach_certificate_v2(listener_arn, endpoint.port, [{'CertificateArn': arn}],
|
||||
account_number=account_number, region=region)
|
||||
if endpoint.type == "elbv2":
|
||||
listener_arn = elb.get_listener_arn_from_endpoint(
|
||||
endpoint.name,
|
||||
endpoint.port,
|
||||
account_number=account_number,
|
||||
region=region,
|
||||
)
|
||||
elb.attach_certificate_v2(
|
||||
listener_arn,
|
||||
endpoint.port,
|
||||
[{"CertificateArn": arn}],
|
||||
account_number=account_number,
|
||||
region=region,
|
||||
)
|
||||
else:
|
||||
elb.attach_certificate(endpoint.name, endpoint.port, arn, account_number=account_number, region=region)
|
||||
elb.attach_certificate(
|
||||
endpoint.name,
|
||||
endpoint.port,
|
||||
arn,
|
||||
account_number=account_number,
|
||||
region=region,
|
||||
)
|
||||
|
||||
def clean(self, certificate, options, **kwargs):
|
||||
account_number = self.get_option('accountNumber', options)
|
||||
account_number = self.get_option("accountNumber", options)
|
||||
iam.delete_cert(certificate.name, account_number=account_number)
|
||||
|
||||
|
||||
class AWSDestinationPlugin(DestinationPlugin):
|
||||
title = 'AWS'
|
||||
slug = 'aws-destination'
|
||||
description = 'Allow the uploading of certificates to AWS IAM'
|
||||
title = "AWS"
|
||||
slug = "aws-destination"
|
||||
description = "Allow the uploading of certificates to AWS IAM"
|
||||
version = aws.VERSION
|
||||
sync_as_source = True
|
||||
sync_as_source_name = AWSSourcePlugin.slug
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'accountNumber',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '[0-9]{12}',
|
||||
'helpMessage': 'Must be a valid AWS account number!',
|
||||
"name": "accountNumber",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "[0-9]{12}",
|
||||
"helpMessage": "Must be a valid AWS account number!",
|
||||
},
|
||||
{
|
||||
'name': 'path',
|
||||
'type': 'str',
|
||||
'default': '/',
|
||||
'helpMessage': 'Path to upload certificate.'
|
||||
}
|
||||
"name": "path",
|
||||
"type": "str",
|
||||
"default": "/",
|
||||
"helpMessage": "Path to upload certificate.",
|
||||
},
|
||||
]
|
||||
|
||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||
iam.upload_cert(name, body, private_key,
|
||||
self.get_option('path', options),
|
||||
cert_chain=cert_chain,
|
||||
account_number=self.get_option('accountNumber', options))
|
||||
iam.upload_cert(
|
||||
name,
|
||||
body,
|
||||
private_key,
|
||||
self.get_option("path", options),
|
||||
cert_chain=cert_chain,
|
||||
account_number=self.get_option("accountNumber", options),
|
||||
)
|
||||
|
||||
def deploy(self, elb_name, account, region, certificate):
|
||||
pass
|
||||
|
||||
|
||||
class S3DestinationPlugin(ExportDestinationPlugin):
|
||||
title = 'AWS-S3'
|
||||
slug = 'aws-s3'
|
||||
description = 'Allow the uploading of certificates to Amazon S3'
|
||||
title = "AWS-S3"
|
||||
slug = "aws-s3"
|
||||
description = "Allow the uploading of certificates to Amazon S3"
|
||||
|
||||
author = 'Mikhail Khodorovskiy, Harm Weites <harm@weites.com>'
|
||||
author_url = 'https://github.com/Netflix/lemur'
|
||||
author = "Mikhail Khodorovskiy, Harm Weites <harm@weites.com>"
|
||||
author_url = "https://github.com/Netflix/lemur"
|
||||
|
||||
additional_options = [
|
||||
{
|
||||
'name': 'bucket',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '[0-9a-z.-]{3,63}',
|
||||
'helpMessage': 'Must be a valid S3 bucket name!',
|
||||
"name": "bucket",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "[0-9a-z.-]{3,63}",
|
||||
"helpMessage": "Must be a valid S3 bucket name!",
|
||||
},
|
||||
{
|
||||
'name': 'accountNumber',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '[0-9]{12}',
|
||||
'helpMessage': 'A valid AWS account number with permission to access S3',
|
||||
"name": "accountNumber",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "[0-9]{12}",
|
||||
"helpMessage": "A valid AWS account number with permission to access S3",
|
||||
},
|
||||
{
|
||||
'name': 'region',
|
||||
'type': 'str',
|
||||
'default': 'us-east-1',
|
||||
'required': False,
|
||||
'helpMessage': 'Region bucket exists',
|
||||
'available': ['us-east-1', 'us-west-2', 'eu-west-1']
|
||||
"name": "region",
|
||||
"type": "str",
|
||||
"default": "us-east-1",
|
||||
"required": False,
|
||||
"helpMessage": "Region bucket exists",
|
||||
"available": ["us-east-1", "us-west-2", "eu-west-1"],
|
||||
},
|
||||
{
|
||||
'name': 'encrypt',
|
||||
'type': 'bool',
|
||||
'required': False,
|
||||
'helpMessage': 'Enable server side encryption',
|
||||
'default': True
|
||||
"name": "encrypt",
|
||||
"type": "bool",
|
||||
"required": False,
|
||||
"helpMessage": "Enable server side encryption",
|
||||
"default": True,
|
||||
},
|
||||
{
|
||||
'name': 'prefix',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'Must be a valid S3 object prefix!',
|
||||
}
|
||||
"name": "prefix",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "Must be a valid S3 object prefix!",
|
||||
},
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -316,13 +362,12 @@ class S3DestinationPlugin(ExportDestinationPlugin):
|
||||
|
||||
for ext, passphrase, data in files:
|
||||
s3.put(
|
||||
self.get_option('bucket', options),
|
||||
self.get_option('region', options),
|
||||
'{prefix}/{name}.{extension}'.format(
|
||||
prefix=self.get_option('prefix', options),
|
||||
name=name,
|
||||
extension=ext),
|
||||
self.get_option("bucket", options),
|
||||
self.get_option("region", options),
|
||||
"{prefix}/{name}.{extension}".format(
|
||||
prefix=self.get_option("prefix", options), name=name, extension=ext
|
||||
),
|
||||
data,
|
||||
self.get_option('encrypt', options),
|
||||
account_number=self.get_option('accountNumber', options)
|
||||
self.get_option("encrypt", options),
|
||||
account_number=self.get_option("accountNumber", options),
|
||||
)
|
||||
|
@ -10,28 +10,26 @@ from flask import current_app
|
||||
from .sts import sts_client
|
||||
|
||||
|
||||
@sts_client('s3', service_type='resource')
|
||||
@sts_client("s3", service_type="resource")
|
||||
def put(bucket_name, region, prefix, data, encrypt, **kwargs):
|
||||
"""
|
||||
Use STS to write to an S3 bucket
|
||||
"""
|
||||
bucket = kwargs['resource'].Bucket(bucket_name)
|
||||
current_app.logger.debug('Persisting data to S3. Bucket: {0} Prefix: {1}'.format(bucket_name, prefix))
|
||||
bucket = kwargs["resource"].Bucket(bucket_name)
|
||||
current_app.logger.debug(
|
||||
"Persisting data to S3. Bucket: {0} Prefix: {1}".format(bucket_name, prefix)
|
||||
)
|
||||
|
||||
# get data ready for writing
|
||||
if isinstance(data, str):
|
||||
data = data.encode('utf-8')
|
||||
data = data.encode("utf-8")
|
||||
|
||||
if encrypt:
|
||||
bucket.put_object(
|
||||
Key=prefix,
|
||||
Body=data,
|
||||
ACL='bucket-owner-full-control',
|
||||
ServerSideEncryption='AES256'
|
||||
ACL="bucket-owner-full-control",
|
||||
ServerSideEncryption="AES256",
|
||||
)
|
||||
else:
|
||||
bucket.put_object(
|
||||
Key=prefix,
|
||||
Body=data,
|
||||
ACL='bucket-owner-full-control'
|
||||
)
|
||||
bucket.put_object(Key=prefix, Body=data, ACL="bucket-owner-full-control")
|
||||
|
@ -13,46 +13,42 @@ from botocore.config import Config
|
||||
from flask import current_app
|
||||
|
||||
|
||||
config = Config(
|
||||
retries=dict(
|
||||
max_attempts=20
|
||||
)
|
||||
)
|
||||
config = Config(retries=dict(max_attempts=20))
|
||||
|
||||
|
||||
def sts_client(service, service_type='client'):
|
||||
def sts_client(service, service_type="client"):
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
sts = boto3.client('sts', config=config)
|
||||
arn = 'arn:aws:iam::{0}:role/{1}'.format(
|
||||
kwargs.pop('account_number'),
|
||||
current_app.config.get('LEMUR_INSTANCE_PROFILE', 'Lemur')
|
||||
sts = boto3.client("sts", config=config)
|
||||
arn = "arn:aws:iam::{0}:role/{1}".format(
|
||||
kwargs.pop("account_number"),
|
||||
current_app.config.get("LEMUR_INSTANCE_PROFILE", "Lemur"),
|
||||
)
|
||||
|
||||
# TODO add user specific information to RoleSessionName
|
||||
role = sts.assume_role(RoleArn=arn, RoleSessionName='lemur')
|
||||
role = sts.assume_role(RoleArn=arn, RoleSessionName="lemur")
|
||||
|
||||
if service_type == 'client':
|
||||
if service_type == "client":
|
||||
client = boto3.client(
|
||||
service,
|
||||
region_name=kwargs.pop('region', 'us-east-1'),
|
||||
aws_access_key_id=role['Credentials']['AccessKeyId'],
|
||||
aws_secret_access_key=role['Credentials']['SecretAccessKey'],
|
||||
aws_session_token=role['Credentials']['SessionToken'],
|
||||
config=config
|
||||
region_name=kwargs.pop("region", "us-east-1"),
|
||||
aws_access_key_id=role["Credentials"]["AccessKeyId"],
|
||||
aws_secret_access_key=role["Credentials"]["SecretAccessKey"],
|
||||
aws_session_token=role["Credentials"]["SessionToken"],
|
||||
config=config,
|
||||
)
|
||||
kwargs['client'] = client
|
||||
elif service_type == 'resource':
|
||||
kwargs["client"] = client
|
||||
elif service_type == "resource":
|
||||
resource = boto3.resource(
|
||||
service,
|
||||
region_name=kwargs.pop('region', 'us-east-1'),
|
||||
aws_access_key_id=role['Credentials']['AccessKeyId'],
|
||||
aws_secret_access_key=role['Credentials']['SecretAccessKey'],
|
||||
aws_session_token=role['Credentials']['SessionToken'],
|
||||
config=config
|
||||
region_name=kwargs.pop("region", "us-east-1"),
|
||||
aws_access_key_id=role["Credentials"]["AccessKeyId"],
|
||||
aws_secret_access_key=role["Credentials"]["SecretAccessKey"],
|
||||
aws_session_token=role["Credentials"]["SessionToken"],
|
||||
config=config,
|
||||
)
|
||||
kwargs['resource'] = resource
|
||||
kwargs["resource"] = resource
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
@ -6,23 +6,24 @@ from moto import mock_sts, mock_elb
|
||||
@mock_elb()
|
||||
def test_get_all_elbs(app, aws_credentials):
|
||||
from lemur.plugins.lemur_aws.elb import get_all_elbs
|
||||
client = boto3.client('elb', region_name='us-east-1')
|
||||
|
||||
elbs = get_all_elbs(account_number='123456789012', region='us-east-1')
|
||||
client = boto3.client("elb", region_name="us-east-1")
|
||||
|
||||
elbs = get_all_elbs(account_number="123456789012", region="us-east-1")
|
||||
assert not elbs
|
||||
|
||||
client.create_load_balancer(
|
||||
LoadBalancerName='example-lb',
|
||||
LoadBalancerName="example-lb",
|
||||
Listeners=[
|
||||
{
|
||||
'Protocol': 'string',
|
||||
'LoadBalancerPort': 443,
|
||||
'InstanceProtocol': 'tcp',
|
||||
'InstancePort': 5443,
|
||||
'SSLCertificateId': 'tcp'
|
||||
"Protocol": "string",
|
||||
"LoadBalancerPort": 443,
|
||||
"InstanceProtocol": "tcp",
|
||||
"InstancePort": 5443,
|
||||
"SSLCertificateId": "tcp",
|
||||
}
|
||||
]
|
||||
],
|
||||
)
|
||||
|
||||
elbs = get_all_elbs(account_number='123456789012', region='us-east-1')
|
||||
elbs = get_all_elbs(account_number="123456789012", region="us-east-1")
|
||||
assert elbs
|
||||
|
@ -6,15 +6,21 @@ from lemur.tests.vectors import EXTERNAL_VALID_STR, SAN_CERT_KEY
|
||||
|
||||
def test_get_name_from_arn():
|
||||
from lemur.plugins.lemur_aws.iam import get_name_from_arn
|
||||
arn = 'arn:aws:iam::123456789012:server-certificate/tttt2.netflixtest.net-NetflixInc-20150624-20150625'
|
||||
assert get_name_from_arn(arn) == 'tttt2.netflixtest.net-NetflixInc-20150624-20150625'
|
||||
|
||||
arn = "arn:aws:iam::123456789012:server-certificate/tttt2.netflixtest.net-NetflixInc-20150624-20150625"
|
||||
assert (
|
||||
get_name_from_arn(arn) == "tttt2.netflixtest.net-NetflixInc-20150624-20150625"
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skipif(True, reason="this fails because moto is not currently returning what boto does")
|
||||
@pytest.mark.skipif(
|
||||
True, reason="this fails because moto is not currently returning what boto does"
|
||||
)
|
||||
@mock_sts()
|
||||
@mock_iam()
|
||||
def test_get_all_server_certs(app):
|
||||
from lemur.plugins.lemur_aws.iam import upload_cert, get_all_certificates
|
||||
upload_cert('123456789012', 'testCert', EXTERNAL_VALID_STR, SAN_CERT_KEY)
|
||||
certs = get_all_certificates('123456789012')
|
||||
|
||||
upload_cert("123456789012", "testCert", EXTERNAL_VALID_STR, SAN_CERT_KEY)
|
||||
certs = get_all_certificates("123456789012")
|
||||
assert len(certs) == 1
|
||||
|
@ -1,6 +1,5 @@
|
||||
|
||||
def test_get_certificates(app):
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
p = plugins.get('aws-s3')
|
||||
p = plugins.get("aws-s3")
|
||||
assert p
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -24,13 +24,13 @@ from lemur.extensions import metrics
|
||||
|
||||
|
||||
class CfsslIssuerPlugin(IssuerPlugin):
|
||||
title = 'CFSSL'
|
||||
slug = 'cfssl-issuer'
|
||||
description = 'Enables the creation of certificates by CFSSL private CA'
|
||||
title = "CFSSL"
|
||||
slug = "cfssl-issuer"
|
||||
description = "Enables the creation of certificates by CFSSL private CA"
|
||||
version = cfssl.VERSION
|
||||
|
||||
author = 'Charles Hendrie'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Charles Hendrie"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.session = requests.Session()
|
||||
@ -44,15 +44,17 @@ class CfsslIssuerPlugin(IssuerPlugin):
|
||||
:param issuer_options:
|
||||
:return:
|
||||
"""
|
||||
current_app.logger.info("Requesting a new cfssl certificate with csr: {0}".format(csr))
|
||||
current_app.logger.info(
|
||||
"Requesting a new cfssl certificate with csr: {0}".format(csr)
|
||||
)
|
||||
|
||||
url = "{0}{1}".format(current_app.config.get('CFSSL_URL'), '/api/v1/cfssl/sign')
|
||||
url = "{0}{1}".format(current_app.config.get("CFSSL_URL"), "/api/v1/cfssl/sign")
|
||||
|
||||
data = {'certificate_request': csr}
|
||||
data = {"certificate_request": csr}
|
||||
data = json.dumps(data)
|
||||
|
||||
try:
|
||||
hex_key = current_app.config.get('CFSSL_KEY')
|
||||
hex_key = current_app.config.get("CFSSL_KEY")
|
||||
key = bytes.fromhex(hex_key)
|
||||
except (ValueError, NameError):
|
||||
# unable to find CFSSL_KEY in config, continue using normal sign method
|
||||
@ -60,22 +62,33 @@ class CfsslIssuerPlugin(IssuerPlugin):
|
||||
else:
|
||||
data = data.encode()
|
||||
|
||||
token = base64.b64encode(hmac.new(key, data, digestmod=hashlib.sha256).digest())
|
||||
token = base64.b64encode(
|
||||
hmac.new(key, data, digestmod=hashlib.sha256).digest()
|
||||
)
|
||||
data = base64.b64encode(data)
|
||||
|
||||
data = json.dumps({'token': token.decode('utf-8'), 'request': data.decode('utf-8')})
|
||||
data = json.dumps(
|
||||
{"token": token.decode("utf-8"), "request": data.decode("utf-8")}
|
||||
)
|
||||
|
||||
url = "{0}{1}".format(current_app.config.get('CFSSL_URL'), '/api/v1/cfssl/authsign')
|
||||
response = self.session.post(url, data=data.encode(encoding='utf_8', errors='strict'))
|
||||
url = "{0}{1}".format(
|
||||
current_app.config.get("CFSSL_URL"), "/api/v1/cfssl/authsign"
|
||||
)
|
||||
response = self.session.post(
|
||||
url, data=data.encode(encoding="utf_8", errors="strict")
|
||||
)
|
||||
if response.status_code > 399:
|
||||
metrics.send('cfssl_create_certificate_failure', 'counter', 1)
|
||||
raise Exception(
|
||||
"Error creating cert. Please check your CFSSL API server")
|
||||
response_json = json.loads(response.content.decode('utf_8'))
|
||||
cert = response_json['result']['certificate']
|
||||
metrics.send("cfssl_create_certificate_failure", "counter", 1)
|
||||
raise Exception("Error creating cert. Please check your CFSSL API server")
|
||||
response_json = json.loads(response.content.decode("utf_8"))
|
||||
cert = response_json["result"]["certificate"]
|
||||
parsed_cert = parse_certificate(cert)
|
||||
metrics.send('cfssl_create_certificate_success', 'counter', 1)
|
||||
return cert, current_app.config.get('CFSSL_INTERMEDIATE'), parsed_cert.serial_number
|
||||
metrics.send("cfssl_create_certificate_success", "counter", 1)
|
||||
return (
|
||||
cert,
|
||||
current_app.config.get("CFSSL_INTERMEDIATE"),
|
||||
parsed_cert.serial_number,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_authority(options):
|
||||
@ -86,22 +99,26 @@ class CfsslIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return:
|
||||
"""
|
||||
role = {'username': '', 'password': '', 'name': 'cfssl'}
|
||||
return current_app.config.get('CFSSL_ROOT'), "", [role]
|
||||
role = {"username": "", "password": "", "name": "cfssl"}
|
||||
return current_app.config.get("CFSSL_ROOT"), "", [role]
|
||||
|
||||
def revoke_certificate(self, certificate, comments):
|
||||
"""Revoke a CFSSL certificate."""
|
||||
base_url = current_app.config.get('CFSSL_URL')
|
||||
create_url = '{0}/api/v1/cfssl/revoke'.format(base_url)
|
||||
data = '{"serial": "' + certificate.external_id + '","authority_key_id": "' + \
|
||||
get_authority_key(certificate.body) + \
|
||||
'", "reason": "superseded"}'
|
||||
base_url = current_app.config.get("CFSSL_URL")
|
||||
create_url = "{0}/api/v1/cfssl/revoke".format(base_url)
|
||||
data = (
|
||||
'{"serial": "'
|
||||
+ certificate.external_id
|
||||
+ '","authority_key_id": "'
|
||||
+ get_authority_key(certificate.body)
|
||||
+ '", "reason": "superseded"}'
|
||||
)
|
||||
current_app.logger.debug("Revoking cert: {0}".format(data))
|
||||
response = self.session.post(
|
||||
create_url, data=data.encode(encoding='utf_8', errors='strict'))
|
||||
create_url, data=data.encode(encoding="utf_8", errors="strict")
|
||||
)
|
||||
if response.status_code > 399:
|
||||
metrics.send('cfssl_revoke_certificate_failure', 'counter', 1)
|
||||
raise Exception(
|
||||
"Error revoking cert. Please check your CFSSL API server")
|
||||
metrics.send('cfssl_revoke_certificate_success', 'counter', 1)
|
||||
metrics.send("cfssl_revoke_certificate_failure", "counter", 1)
|
||||
raise Exception("Error revoking cert. Please check your CFSSL API server")
|
||||
metrics.send("cfssl_revoke_certificate_success", "counter", 1)
|
||||
return response.json()
|
||||
|
@ -1,6 +1,5 @@
|
||||
|
||||
def test_get_certificates(app):
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
p = plugins.get('cfssl-issuer')
|
||||
p = plugins.get("cfssl-issuer")
|
||||
assert p
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -22,7 +22,7 @@ from lemur.certificates.service import create_csr
|
||||
|
||||
|
||||
def build_certificate_authority(options):
|
||||
options['certificate_authority'] = True
|
||||
options["certificate_authority"] = True
|
||||
csr, private_key = create_csr(**options)
|
||||
cert_pem, chain_cert_pem = issue_certificate(csr, options, private_key)
|
||||
|
||||
@ -30,24 +30,32 @@ def build_certificate_authority(options):
|
||||
|
||||
|
||||
def issue_certificate(csr, options, private_key=None):
|
||||
csr = x509.load_pem_x509_csr(csr.encode('utf-8'), default_backend())
|
||||
csr = x509.load_pem_x509_csr(csr.encode("utf-8"), default_backend())
|
||||
|
||||
if options.get("parent"):
|
||||
# creating intermediate authorities will have options['parent'] to specify the issuer
|
||||
# creating certificates will have options['authority'] to specify the issuer
|
||||
# This works around that by making sure options['authority'] can be referenced for either
|
||||
options['authority'] = options['parent']
|
||||
options["authority"] = options["parent"]
|
||||
|
||||
if options.get("authority"):
|
||||
# Issue certificate signed by an existing lemur_certificates authority
|
||||
issuer_subject = options['authority'].authority_certificate.subject
|
||||
assert private_key is None, "Private would be ignored, authority key used instead"
|
||||
private_key = options['authority'].authority_certificate.private_key
|
||||
chain_cert_pem = options['authority'].authority_certificate.body
|
||||
authority_key_identifier_public = options['authority'].authority_certificate.public_key
|
||||
authority_key_identifier_subject = x509.SubjectKeyIdentifier.from_public_key(authority_key_identifier_public)
|
||||
issuer_subject = options["authority"].authority_certificate.subject
|
||||
assert (
|
||||
private_key is None
|
||||
), "Private would be ignored, authority key used instead"
|
||||
private_key = options["authority"].authority_certificate.private_key
|
||||
chain_cert_pem = options["authority"].authority_certificate.body
|
||||
authority_key_identifier_public = options[
|
||||
"authority"
|
||||
].authority_certificate.public_key
|
||||
authority_key_identifier_subject = x509.SubjectKeyIdentifier.from_public_key(
|
||||
authority_key_identifier_public
|
||||
)
|
||||
authority_key_identifier_issuer = issuer_subject
|
||||
authority_key_identifier_serial = int(options['authority'].authority_certificate.serial)
|
||||
authority_key_identifier_serial = int(
|
||||
options["authority"].authority_certificate.serial
|
||||
)
|
||||
# TODO figure out a better way to increment serial
|
||||
# New authorities have a value at options['serial_number'] that is being ignored here.
|
||||
serial = int(uuid.uuid4())
|
||||
@ -58,7 +66,7 @@ def issue_certificate(csr, options, private_key=None):
|
||||
authority_key_identifier_public = csr.public_key()
|
||||
authority_key_identifier_subject = None
|
||||
authority_key_identifier_issuer = csr.subject
|
||||
authority_key_identifier_serial = options['serial_number']
|
||||
authority_key_identifier_serial = options["serial_number"]
|
||||
# TODO figure out a better way to increment serial
|
||||
serial = int(uuid.uuid4())
|
||||
|
||||
@ -68,19 +76,20 @@ def issue_certificate(csr, options, private_key=None):
|
||||
issuer_name=issuer_subject,
|
||||
subject_name=csr.subject,
|
||||
public_key=csr.public_key(),
|
||||
not_valid_before=options['validity_start'],
|
||||
not_valid_after=options['validity_end'],
|
||||
not_valid_before=options["validity_start"],
|
||||
not_valid_after=options["validity_end"],
|
||||
serial_number=serial,
|
||||
extensions=extensions)
|
||||
extensions=extensions,
|
||||
)
|
||||
|
||||
for k, v in options.get('extensions', {}).items():
|
||||
if k == 'authority_key_identifier':
|
||||
for k, v in options.get("extensions", {}).items():
|
||||
if k == "authority_key_identifier":
|
||||
# One or both of these options may be present inside the aki extension
|
||||
(authority_key_identifier, authority_identifier) = (False, False)
|
||||
for k2, v2 in v.items():
|
||||
if k2 == 'use_key_identifier' and v2:
|
||||
if k2 == "use_key_identifier" and v2:
|
||||
authority_key_identifier = True
|
||||
if k2 == 'use_authority_cert' and v2:
|
||||
if k2 == "use_authority_cert" and v2:
|
||||
authority_identifier = True
|
||||
if authority_key_identifier:
|
||||
if authority_key_identifier_subject:
|
||||
@ -89,13 +98,21 @@ def issue_certificate(csr, options, private_key=None):
|
||||
# but the digest of the ski is at just ski.digest. Until that library is fixed,
|
||||
# this function won't work. The second line has the same result.
|
||||
# aki = x509.AuthorityKeyIdentifier.from_issuer_subject_key_identifier(authority_key_identifier_subject)
|
||||
aki = x509.AuthorityKeyIdentifier(authority_key_identifier_subject.digest, None, None)
|
||||
aki = x509.AuthorityKeyIdentifier(
|
||||
authority_key_identifier_subject.digest, None, None
|
||||
)
|
||||
else:
|
||||
aki = x509.AuthorityKeyIdentifier.from_issuer_public_key(authority_key_identifier_public)
|
||||
aki = x509.AuthorityKeyIdentifier.from_issuer_public_key(
|
||||
authority_key_identifier_public
|
||||
)
|
||||
elif authority_identifier:
|
||||
aki = x509.AuthorityKeyIdentifier(None, [x509.DirectoryName(authority_key_identifier_issuer)], authority_key_identifier_serial)
|
||||
aki = x509.AuthorityKeyIdentifier(
|
||||
None,
|
||||
[x509.DirectoryName(authority_key_identifier_issuer)],
|
||||
authority_key_identifier_serial,
|
||||
)
|
||||
builder = builder.add_extension(aki, critical=False)
|
||||
if k == 'certificate_info_access':
|
||||
if k == "certificate_info_access":
|
||||
# FIXME: Implement the AuthorityInformationAccess extension
|
||||
# descriptions = [
|
||||
# x509.AccessDescription(x509.oid.AuthorityInformationAccessOID.OCSP, x509.UniformResourceIdentifier(u"http://FIXME")),
|
||||
@ -108,7 +125,7 @@ def issue_certificate(csr, options, private_key=None):
|
||||
# critical=False
|
||||
# )
|
||||
pass
|
||||
if k == 'crl_distribution_points':
|
||||
if k == "crl_distribution_points":
|
||||
# FIXME: Implement the CRLDistributionPoints extension
|
||||
# FIXME: Not implemented in lemur/schemas.py yet https://github.com/Netflix/lemur/issues/662
|
||||
pass
|
||||
@ -116,20 +133,24 @@ def issue_certificate(csr, options, private_key=None):
|
||||
private_key = parse_private_key(private_key)
|
||||
|
||||
cert = builder.sign(private_key, hashes.SHA256(), default_backend())
|
||||
cert_pem = cert.public_bytes(
|
||||
encoding=serialization.Encoding.PEM
|
||||
).decode('utf-8')
|
||||
cert_pem = cert.public_bytes(encoding=serialization.Encoding.PEM).decode("utf-8")
|
||||
|
||||
return cert_pem, chain_cert_pem
|
||||
|
||||
|
||||
def normalize_extensions(csr):
|
||||
try:
|
||||
san_extension = csr.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
|
||||
san_extension = csr.extensions.get_extension_for_oid(
|
||||
x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME
|
||||
)
|
||||
san_dnsnames = san_extension.value.get_values_for_type(x509.DNSName)
|
||||
except x509.extensions.ExtensionNotFound:
|
||||
san_dnsnames = []
|
||||
san_extension = x509.Extension(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME, True, x509.SubjectAlternativeName(san_dnsnames))
|
||||
san_extension = x509.Extension(
|
||||
x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME,
|
||||
True,
|
||||
x509.SubjectAlternativeName(san_dnsnames),
|
||||
)
|
||||
|
||||
common_name = csr.subject.get_attributes_for_oid(x509.oid.NameOID.COMMON_NAME)
|
||||
common_name = common_name[0].value
|
||||
@ -149,7 +170,11 @@ def normalize_extensions(csr):
|
||||
for san in san_extension.value:
|
||||
general_names.append(san)
|
||||
|
||||
san_extension = x509.Extension(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME, True, x509.SubjectAlternativeName(general_names))
|
||||
san_extension = x509.Extension(
|
||||
x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME,
|
||||
True,
|
||||
x509.SubjectAlternativeName(general_names),
|
||||
)
|
||||
|
||||
# Remove original san extension from CSR and add new SAN extension
|
||||
extensions = list(filter(filter_san_extensions, csr.extensions._extensions))
|
||||
@ -166,13 +191,13 @@ def filter_san_extensions(ext):
|
||||
|
||||
|
||||
class CryptographyIssuerPlugin(IssuerPlugin):
|
||||
title = 'Cryptography'
|
||||
slug = 'cryptography-issuer'
|
||||
description = 'Enables the creation and signing of self-signed certificates'
|
||||
title = "Cryptography"
|
||||
slug = "cryptography-issuer"
|
||||
description = "Enables the creation and signing of self-signed certificates"
|
||||
version = cryptography_issuer.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
def create_certificate(self, csr, options):
|
||||
"""
|
||||
@ -182,7 +207,9 @@ class CryptographyIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return: :raise Exception:
|
||||
"""
|
||||
current_app.logger.debug("Issuing new cryptography certificate with options: {0}".format(options))
|
||||
current_app.logger.debug(
|
||||
"Issuing new cryptography certificate with options: {0}".format(options)
|
||||
)
|
||||
cert_pem, chain_cert_pem = issue_certificate(csr, options)
|
||||
return cert_pem, chain_cert_pem, None
|
||||
|
||||
@ -195,10 +222,12 @@ class CryptographyIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return:
|
||||
"""
|
||||
current_app.logger.debug("Issuing new cryptography authority with options: {0}".format(options))
|
||||
current_app.logger.debug(
|
||||
"Issuing new cryptography authority with options: {0}".format(options)
|
||||
)
|
||||
cert_pem, private_key, chain_cert_pem = build_certificate_authority(options)
|
||||
roles = [
|
||||
{'username': '', 'password': '', 'name': options['name'] + '_admin'},
|
||||
{'username': '', 'password': '', 'name': options['name'] + '_operator'}
|
||||
{"username": "", "password": "", "name": options["name"] + "_admin"},
|
||||
{"username": "", "password": "", "name": options["name"] + "_operator"},
|
||||
]
|
||||
return cert_pem, private_key, chain_cert_pem, roles
|
||||
|
@ -5,24 +5,24 @@ def test_build_certificate_authority():
|
||||
from lemur.plugins.lemur_cryptography.plugin import build_certificate_authority
|
||||
|
||||
options = {
|
||||
'key_type': 'RSA2048',
|
||||
'country': 'US',
|
||||
'state': 'CA',
|
||||
'location': 'Example place',
|
||||
'organization': 'Example, Inc.',
|
||||
'organizational_unit': 'Example Unit',
|
||||
'common_name': 'Example ROOT',
|
||||
'validity_start': arrow.get('2016-12-01').datetime,
|
||||
'validity_end': arrow.get('2016-12-02').datetime,
|
||||
'first_serial': 1,
|
||||
'serial_number': 1,
|
||||
'owner': 'owner@example.com'
|
||||
"key_type": "RSA2048",
|
||||
"country": "US",
|
||||
"state": "CA",
|
||||
"location": "Example place",
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Unit",
|
||||
"common_name": "Example ROOT",
|
||||
"validity_start": arrow.get("2016-12-01").datetime,
|
||||
"validity_end": arrow.get("2016-12-02").datetime,
|
||||
"first_serial": 1,
|
||||
"serial_number": 1,
|
||||
"owner": "owner@example.com",
|
||||
}
|
||||
cert_pem, private_key_pem, chain_cert_pem = build_certificate_authority(options)
|
||||
|
||||
assert cert_pem
|
||||
assert private_key_pem
|
||||
assert chain_cert_pem == ''
|
||||
assert chain_cert_pem == ""
|
||||
|
||||
|
||||
def test_issue_certificate(authority):
|
||||
@ -30,10 +30,10 @@ def test_issue_certificate(authority):
|
||||
from lemur.plugins.lemur_cryptography.plugin import issue_certificate
|
||||
|
||||
options = {
|
||||
'common_name': 'Example.com',
|
||||
'authority': authority,
|
||||
'validity_start': arrow.get('2016-12-01').datetime,
|
||||
'validity_end': arrow.get('2016-12-02').datetime
|
||||
"common_name": "Example.com",
|
||||
"authority": authority,
|
||||
"validity_start": arrow.get("2016-12-01").datetime,
|
||||
"validity_end": arrow.get("2016-12-02").datetime,
|
||||
}
|
||||
cert_pem, chain_cert_pem = issue_certificate(CSR_STR, options)
|
||||
assert cert_pem
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -43,38 +43,30 @@ def create_csr(cert, chain, csr_tmp, key):
|
||||
assert isinstance(key, str)
|
||||
|
||||
with mktempfile() as key_tmp:
|
||||
with open(key_tmp, 'w') as f:
|
||||
with open(key_tmp, "w") as f:
|
||||
f.write(key)
|
||||
|
||||
with mktempfile() as cert_tmp:
|
||||
with open(cert_tmp, 'w') as f:
|
||||
with open(cert_tmp, "w") as f:
|
||||
if chain:
|
||||
f.writelines([cert.strip() + "\n", chain.strip() + "\n"])
|
||||
else:
|
||||
f.writelines([cert.strip() + "\n"])
|
||||
|
||||
output = subprocess.check_output([
|
||||
"openssl",
|
||||
"x509",
|
||||
"-x509toreq",
|
||||
"-in", cert_tmp,
|
||||
"-signkey", key_tmp,
|
||||
])
|
||||
subprocess.run([
|
||||
"openssl",
|
||||
"req",
|
||||
"-out", csr_tmp
|
||||
], input=output)
|
||||
output = subprocess.check_output(
|
||||
["openssl", "x509", "-x509toreq", "-in", cert_tmp, "-signkey", key_tmp]
|
||||
)
|
||||
subprocess.run(["openssl", "req", "-out", csr_tmp], input=output)
|
||||
|
||||
|
||||
class CSRExportPlugin(ExportPlugin):
|
||||
title = 'CSR'
|
||||
slug = 'openssl-csr'
|
||||
description = 'Exports a CSR'
|
||||
title = "CSR"
|
||||
slug = "openssl-csr"
|
||||
description = "Exports a CSR"
|
||||
version = csr.VERSION
|
||||
|
||||
author = 'jchuong'
|
||||
author_url = 'https://github.com/jchuong'
|
||||
author = "jchuong"
|
||||
author_url = "https://github.com/jchuong"
|
||||
|
||||
def export(self, body, chain, key, options, **kwargs):
|
||||
"""
|
||||
@ -93,7 +85,7 @@ class CSRExportPlugin(ExportPlugin):
|
||||
create_csr(body, chain, output_tmp, key)
|
||||
extension = "csr"
|
||||
|
||||
with open(output_tmp, 'rb') as f:
|
||||
with open(output_tmp, "rb") as f:
|
||||
raw = f.read()
|
||||
# passphrase is None
|
||||
return extension, None, raw
|
||||
|
@ -4,7 +4,8 @@ from lemur.tests.vectors import INTERNAL_PRIVATE_KEY_A_STR, INTERNAL_CERTIFICATE
|
||||
|
||||
def test_export_certificate_to_csr(app):
|
||||
from lemur.plugins.base import plugins
|
||||
p = plugins.get('openssl-csr')
|
||||
|
||||
p = plugins.get("openssl-csr")
|
||||
options = []
|
||||
with pytest.raises(Exception):
|
||||
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -40,7 +40,7 @@ def log_status_code(r, *args, **kwargs):
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
metrics.send('digicert_status_code_{}'.format(r.status_code), 'counter', 1)
|
||||
metrics.send("digicert_status_code_{}".format(r.status_code), "counter", 1)
|
||||
|
||||
|
||||
def signature_hash(signing_algorithm):
|
||||
@ -50,18 +50,18 @@ def signature_hash(signing_algorithm):
|
||||
:return: str digicert specific algorithm string
|
||||
"""
|
||||
if not signing_algorithm:
|
||||
return current_app.config.get('DIGICERT_DEFAULT_SIGNING_ALGORITHM', 'sha256')
|
||||
return current_app.config.get("DIGICERT_DEFAULT_SIGNING_ALGORITHM", "sha256")
|
||||
|
||||
if signing_algorithm == 'sha256WithRSA':
|
||||
return 'sha256'
|
||||
if signing_algorithm == "sha256WithRSA":
|
||||
return "sha256"
|
||||
|
||||
elif signing_algorithm == 'sha384WithRSA':
|
||||
return 'sha384'
|
||||
elif signing_algorithm == "sha384WithRSA":
|
||||
return "sha384"
|
||||
|
||||
elif signing_algorithm == 'sha512WithRSA':
|
||||
return 'sha512'
|
||||
elif signing_algorithm == "sha512WithRSA":
|
||||
return "sha512"
|
||||
|
||||
raise Exception('Unsupported signing algorithm.')
|
||||
raise Exception("Unsupported signing algorithm.")
|
||||
|
||||
|
||||
def determine_validity_years(end_date):
|
||||
@ -79,8 +79,9 @@ def determine_validity_years(end_date):
|
||||
elif end_date < now.replace(years=+3):
|
||||
return 3
|
||||
|
||||
raise Exception("DigiCert issued certificates cannot exceed three"
|
||||
" years in validity")
|
||||
raise Exception(
|
||||
"DigiCert issued certificates cannot exceed three" " years in validity"
|
||||
)
|
||||
|
||||
|
||||
def get_additional_names(options):
|
||||
@ -92,8 +93,8 @@ def get_additional_names(options):
|
||||
"""
|
||||
names = []
|
||||
# add SANs if present
|
||||
if options.get('extensions'):
|
||||
for san in options['extensions']['sub_alt_names']['names']:
|
||||
if options.get("extensions"):
|
||||
for san in options["extensions"]["sub_alt_names"]["names"]:
|
||||
if isinstance(san, x509.DNSName):
|
||||
names.append(san.value)
|
||||
return names
|
||||
@ -106,31 +107,33 @@ def map_fields(options, csr):
|
||||
:param csr:
|
||||
:return: dict or valid DigiCert options
|
||||
"""
|
||||
if not options.get('validity_years'):
|
||||
if not options.get('validity_end'):
|
||||
options['validity_years'] = current_app.config.get('DIGICERT_DEFAULT_VALIDITY', 1)
|
||||
if not options.get("validity_years"):
|
||||
if not options.get("validity_end"):
|
||||
options["validity_years"] = current_app.config.get(
|
||||
"DIGICERT_DEFAULT_VALIDITY", 1
|
||||
)
|
||||
|
||||
data = dict(certificate={
|
||||
"common_name": options['common_name'],
|
||||
"csr": csr,
|
||||
"signature_hash":
|
||||
signature_hash(options.get('signing_algorithm')),
|
||||
}, organization={
|
||||
"id": current_app.config.get("DIGICERT_ORG_ID")
|
||||
})
|
||||
data = dict(
|
||||
certificate={
|
||||
"common_name": options["common_name"],
|
||||
"csr": csr,
|
||||
"signature_hash": signature_hash(options.get("signing_algorithm")),
|
||||
},
|
||||
organization={"id": current_app.config.get("DIGICERT_ORG_ID")},
|
||||
)
|
||||
|
||||
data['certificate']['dns_names'] = get_additional_names(options)
|
||||
data["certificate"]["dns_names"] = get_additional_names(options)
|
||||
|
||||
if options.get('validity_years'):
|
||||
data['validity_years'] = options['validity_years']
|
||||
if options.get("validity_years"):
|
||||
data["validity_years"] = options["validity_years"]
|
||||
else:
|
||||
data['custom_expiration_date'] = options['validity_end'].format('YYYY-MM-DD')
|
||||
data["custom_expiration_date"] = options["validity_end"].format("YYYY-MM-DD")
|
||||
|
||||
if current_app.config.get('DIGICERT_PRIVATE', False):
|
||||
if 'product' in data:
|
||||
data['product']['type_hint'] = 'private'
|
||||
if current_app.config.get("DIGICERT_PRIVATE", False):
|
||||
if "product" in data:
|
||||
data["product"]["type_hint"] = "private"
|
||||
else:
|
||||
data['product'] = dict(type_hint='private')
|
||||
data["product"] = dict(type_hint="private")
|
||||
|
||||
return data
|
||||
|
||||
@ -143,26 +146,30 @@ def map_cis_fields(options, csr):
|
||||
:param csr:
|
||||
:return:
|
||||
"""
|
||||
if not options.get('validity_years'):
|
||||
if not options.get('validity_end'):
|
||||
options['validity_end'] = arrow.utcnow().replace(years=current_app.config.get('DIGICERT_DEFAULT_VALIDITY', 1))
|
||||
options['validity_years'] = determine_validity_years(options['validity_end'])
|
||||
if not options.get("validity_years"):
|
||||
if not options.get("validity_end"):
|
||||
options["validity_end"] = arrow.utcnow().replace(
|
||||
years=current_app.config.get("DIGICERT_DEFAULT_VALIDITY", 1)
|
||||
)
|
||||
options["validity_years"] = determine_validity_years(options["validity_end"])
|
||||
else:
|
||||
options['validity_end'] = arrow.utcnow().replace(years=options['validity_years'])
|
||||
options["validity_end"] = arrow.utcnow().replace(
|
||||
years=options["validity_years"]
|
||||
)
|
||||
|
||||
data = {
|
||||
"profile_name": current_app.config.get('DIGICERT_CIS_PROFILE_NAME'),
|
||||
"common_name": options['common_name'],
|
||||
"profile_name": current_app.config.get("DIGICERT_CIS_PROFILE_NAME"),
|
||||
"common_name": options["common_name"],
|
||||
"additional_dns_names": get_additional_names(options),
|
||||
"csr": csr,
|
||||
"signature_hash": signature_hash(options.get('signing_algorithm')),
|
||||
"signature_hash": signature_hash(options.get("signing_algorithm")),
|
||||
"validity": {
|
||||
"valid_to": options['validity_end'].format('YYYY-MM-DDTHH:MM') + 'Z'
|
||||
"valid_to": options["validity_end"].format("YYYY-MM-DDTHH:MM") + "Z"
|
||||
},
|
||||
"organization": {
|
||||
"name": options['organization'],
|
||||
"units": [options['organizational_unit']]
|
||||
}
|
||||
"name": options["organization"],
|
||||
"units": [options["organizational_unit"]],
|
||||
},
|
||||
}
|
||||
|
||||
return data
|
||||
@ -175,7 +182,7 @@ def handle_response(response):
|
||||
:return:
|
||||
"""
|
||||
if response.status_code > 399:
|
||||
raise Exception(response.json()['errors'][0]['message'])
|
||||
raise Exception(response.json()["errors"][0]["message"])
|
||||
|
||||
return response.json()
|
||||
|
||||
@ -187,7 +194,7 @@ def handle_cis_response(response):
|
||||
:return:
|
||||
"""
|
||||
if response.status_code > 399:
|
||||
raise Exception(response.json()['errors'][0]['message'])
|
||||
raise Exception(response.text)
|
||||
|
||||
return response.json()
|
||||
|
||||
@ -197,19 +204,17 @@ def get_certificate_id(session, base_url, order_id):
|
||||
"""Retrieve certificate order id from Digicert API."""
|
||||
order_url = "{0}/services/v2/order/certificate/{1}".format(base_url, order_id)
|
||||
response_data = handle_response(session.get(order_url))
|
||||
if response_data['status'] != 'issued':
|
||||
if response_data["status"] != "issued":
|
||||
raise Exception("Order not in issued state.")
|
||||
|
||||
return response_data['certificate']['id']
|
||||
return response_data["certificate"]["id"]
|
||||
|
||||
|
||||
@retry(stop_max_attempt_number=10, wait_fixed=10000)
|
||||
def get_cis_certificate(session, base_url, order_id):
|
||||
"""Retrieve certificate order id from Digicert API."""
|
||||
certificate_url = '{0}/platform/cis/certificate/{1}'.format(base_url, order_id)
|
||||
session.headers.update(
|
||||
{'Accept': 'application/x-pem-file'}
|
||||
)
|
||||
certificate_url = "{0}/platform/cis/certificate/{1}".format(base_url, order_id)
|
||||
session.headers.update({"Accept": "application/x-pem-file"})
|
||||
response = session.get(certificate_url)
|
||||
|
||||
if response.status_code == 404:
|
||||
@ -220,29 +225,30 @@ def get_cis_certificate(session, base_url, order_id):
|
||||
|
||||
class DigiCertSourcePlugin(SourcePlugin):
|
||||
"""Wrap the Digicert Certifcate API."""
|
||||
title = 'DigiCert'
|
||||
slug = 'digicert-source'
|
||||
|
||||
title = "DigiCert"
|
||||
slug = "digicert-source"
|
||||
description = "Enables the use of Digicert as a source of existing certificates."
|
||||
version = digicert.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize source with appropriate details."""
|
||||
required_vars = [
|
||||
'DIGICERT_API_KEY',
|
||||
'DIGICERT_URL',
|
||||
'DIGICERT_ORG_ID',
|
||||
'DIGICERT_ROOT',
|
||||
"DIGICERT_API_KEY",
|
||||
"DIGICERT_URL",
|
||||
"DIGICERT_ORG_ID",
|
||||
"DIGICERT_ROOT",
|
||||
]
|
||||
validate_conf(current_app, required_vars)
|
||||
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(
|
||||
{
|
||||
'X-DC-DEVKEY': current_app.config['DIGICERT_API_KEY'],
|
||||
'Content-Type': 'application/json'
|
||||
"X-DC-DEVKEY": current_app.config["DIGICERT_API_KEY"],
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
)
|
||||
|
||||
@ -256,22 +262,23 @@ class DigiCertSourcePlugin(SourcePlugin):
|
||||
|
||||
class DigiCertIssuerPlugin(IssuerPlugin):
|
||||
"""Wrap the Digicert Issuer API."""
|
||||
title = 'DigiCert'
|
||||
slug = 'digicert-issuer'
|
||||
|
||||
title = "DigiCert"
|
||||
slug = "digicert-issuer"
|
||||
description = "Enables the creation of certificates by the DigiCert REST API."
|
||||
version = digicert.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize the issuer with the appropriate details."""
|
||||
required_vars = [
|
||||
'DIGICERT_API_KEY',
|
||||
'DIGICERT_URL',
|
||||
'DIGICERT_ORG_ID',
|
||||
'DIGICERT_ORDER_TYPE',
|
||||
'DIGICERT_ROOT',
|
||||
"DIGICERT_API_KEY",
|
||||
"DIGICERT_URL",
|
||||
"DIGICERT_ORG_ID",
|
||||
"DIGICERT_ORDER_TYPE",
|
||||
"DIGICERT_ROOT",
|
||||
]
|
||||
|
||||
validate_conf(current_app, required_vars)
|
||||
@ -279,8 +286,8 @@ class DigiCertIssuerPlugin(IssuerPlugin):
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(
|
||||
{
|
||||
'X-DC-DEVKEY': current_app.config['DIGICERT_API_KEY'],
|
||||
'Content-Type': 'application/json'
|
||||
"X-DC-DEVKEY": current_app.config["DIGICERT_API_KEY"],
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
)
|
||||
|
||||
@ -295,69 +302,93 @@ class DigiCertIssuerPlugin(IssuerPlugin):
|
||||
:param issuer_options:
|
||||
:return: :raise Exception:
|
||||
"""
|
||||
base_url = current_app.config.get('DIGICERT_URL')
|
||||
cert_type = current_app.config.get('DIGICERT_ORDER_TYPE')
|
||||
base_url = current_app.config.get("DIGICERT_URL")
|
||||
cert_type = current_app.config.get("DIGICERT_ORDER_TYPE")
|
||||
|
||||
# make certificate request
|
||||
determinator_url = "{0}/services/v2/order/certificate/{1}".format(base_url, cert_type)
|
||||
determinator_url = "{0}/services/v2/order/certificate/{1}".format(
|
||||
base_url, cert_type
|
||||
)
|
||||
data = map_fields(issuer_options, csr)
|
||||
response = self.session.post(determinator_url, data=json.dumps(data))
|
||||
|
||||
if response.status_code > 399:
|
||||
raise Exception(response.json()['errors'][0]['message'])
|
||||
raise Exception(response.json()["errors"][0]["message"])
|
||||
|
||||
order_id = response.json()['id']
|
||||
order_id = response.json()["id"]
|
||||
|
||||
certificate_id = get_certificate_id(self.session, base_url, order_id)
|
||||
|
||||
# retrieve certificate
|
||||
certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format(base_url, certificate_id)
|
||||
end_entity, intermediate, root = pem.parse(self.session.get(certificate_url).content)
|
||||
return "\n".join(str(end_entity).splitlines()), "\n".join(str(intermediate).splitlines()), certificate_id
|
||||
certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format(
|
||||
base_url, certificate_id
|
||||
)
|
||||
end_entity, intermediate, root = pem.parse(
|
||||
self.session.get(certificate_url).content
|
||||
)
|
||||
return (
|
||||
"\n".join(str(end_entity).splitlines()),
|
||||
"\n".join(str(intermediate).splitlines()),
|
||||
certificate_id,
|
||||
)
|
||||
|
||||
def revoke_certificate(self, certificate, comments):
|
||||
"""Revoke a Digicert certificate."""
|
||||
base_url = current_app.config.get('DIGICERT_URL')
|
||||
base_url = current_app.config.get("DIGICERT_URL")
|
||||
|
||||
# make certificate revoke request
|
||||
create_url = '{0}/services/v2/certificate/{1}/revoke'.format(base_url, certificate.external_id)
|
||||
metrics.send('digicert_revoke_certificate', 'counter', 1)
|
||||
response = self.session.put(create_url, data=json.dumps({'comments': comments}))
|
||||
create_url = "{0}/services/v2/certificate/{1}/revoke".format(
|
||||
base_url, certificate.external_id
|
||||
)
|
||||
metrics.send("digicert_revoke_certificate", "counter", 1)
|
||||
response = self.session.put(create_url, data=json.dumps({"comments": comments}))
|
||||
return handle_response(response)
|
||||
|
||||
def get_ordered_certificate(self, pending_cert):
|
||||
""" Retrieve a certificate via order id """
|
||||
order_id = pending_cert.external_id
|
||||
base_url = current_app.config.get('DIGICERT_URL')
|
||||
base_url = current_app.config.get("DIGICERT_URL")
|
||||
try:
|
||||
certificate_id = get_certificate_id(self.session, base_url, order_id)
|
||||
except Exception as ex:
|
||||
return None
|
||||
certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format(base_url, certificate_id)
|
||||
end_entity, intermediate, root = pem.parse(self.session.get(certificate_url).content)
|
||||
cert = {'body': "\n".join(str(end_entity).splitlines()),
|
||||
'chain': "\n".join(str(intermediate).splitlines()),
|
||||
'external_id': str(certificate_id)}
|
||||
certificate_url = "{0}/services/v2/certificate/{1}/download/format/pem_all".format(
|
||||
base_url, certificate_id
|
||||
)
|
||||
end_entity, intermediate, root = pem.parse(
|
||||
self.session.get(certificate_url).content
|
||||
)
|
||||
cert = {
|
||||
"body": "\n".join(str(end_entity).splitlines()),
|
||||
"chain": "\n".join(str(intermediate).splitlines()),
|
||||
"external_id": str(certificate_id),
|
||||
}
|
||||
return cert
|
||||
|
||||
def cancel_ordered_certificate(self, pending_cert, **kwargs):
|
||||
""" Set the certificate order to canceled """
|
||||
base_url = current_app.config.get('DIGICERT_URL')
|
||||
api_url = "{0}/services/v2/order/certificate/{1}/status".format(base_url, pending_cert.external_id)
|
||||
payload = {
|
||||
'status': 'CANCELED',
|
||||
'note': kwargs.get('note')
|
||||
}
|
||||
base_url = current_app.config.get("DIGICERT_URL")
|
||||
api_url = "{0}/services/v2/order/certificate/{1}/status".format(
|
||||
base_url, pending_cert.external_id
|
||||
)
|
||||
payload = {"status": "CANCELED", "note": kwargs.get("note")}
|
||||
response = self.session.put(api_url, data=json.dumps(payload))
|
||||
if response.status_code == 404:
|
||||
# not well documented by Digicert, but either the certificate does not exist or we
|
||||
# don't own that order (someone else's order id!). Either way, we can just ignore it
|
||||
# and have it removed from Lemur
|
||||
current_app.logger.warning(
|
||||
"Digicert Plugin tried to cancel pending certificate {0} but it does not exist!".format(pending_cert.name))
|
||||
"Digicert Plugin tried to cancel pending certificate {0} but it does not exist!".format(
|
||||
pending_cert.name
|
||||
)
|
||||
)
|
||||
elif response.status_code != 204:
|
||||
current_app.logger.debug("{0} code {1}".format(response.status_code, response.content))
|
||||
raise Exception("Failed to cancel pending certificate {0}".format(pending_cert.name))
|
||||
current_app.logger.debug(
|
||||
"{0} code {1}".format(response.status_code, response.content)
|
||||
)
|
||||
raise Exception(
|
||||
"Failed to cancel pending certificate {0}".format(pending_cert.name)
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def create_authority(options):
|
||||
@ -370,72 +401,81 @@ class DigiCertIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return:
|
||||
"""
|
||||
role = {'username': '', 'password': '', 'name': 'digicert'}
|
||||
return current_app.config.get('DIGICERT_ROOT'), "", [role]
|
||||
role = {"username": "", "password": "", "name": "digicert"}
|
||||
return current_app.config.get("DIGICERT_ROOT"), "", [role]
|
||||
|
||||
|
||||
class DigiCertCISSourcePlugin(SourcePlugin):
|
||||
"""Wrap the Digicert CIS Certifcate API."""
|
||||
title = 'DigiCert'
|
||||
slug = 'digicert-cis-source'
|
||||
|
||||
title = "DigiCert"
|
||||
slug = "digicert-cis-source"
|
||||
description = "Enables the use of Digicert as a source of existing certificates."
|
||||
version = digicert.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
additional_options = []
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize source with appropriate details."""
|
||||
required_vars = [
|
||||
'DIGICERT_CIS_API_KEY',
|
||||
'DIGICERT_CIS_URL',
|
||||
'DIGICERT_CIS_ROOT',
|
||||
'DIGICERT_CIS_INTERMEDIATE',
|
||||
'DIGICERT_CIS_PROFILE_NAME'
|
||||
"DIGICERT_CIS_API_KEY",
|
||||
"DIGICERT_CIS_URL",
|
||||
"DIGICERT_CIS_ROOT",
|
||||
"DIGICERT_CIS_INTERMEDIATE",
|
||||
"DIGICERT_CIS_PROFILE_NAME",
|
||||
]
|
||||
validate_conf(current_app, required_vars)
|
||||
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(
|
||||
{
|
||||
'X-DC-DEVKEY': current_app.config['DIGICERT_CIS_API_KEY'],
|
||||
'Content-Type': 'application/json'
|
||||
"X-DC-DEVKEY": current_app.config["DIGICERT_CIS_API_KEY"],
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
)
|
||||
|
||||
self.session.hooks = dict(response=log_status_code)
|
||||
|
||||
a = requests.adapters.HTTPAdapter(max_retries=3)
|
||||
self.session.mount('https://', a)
|
||||
self.session.mount("https://", a)
|
||||
|
||||
super(DigiCertCISSourcePlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_certificates(self, options, **kwargs):
|
||||
"""Fetch all Digicert certificates."""
|
||||
base_url = current_app.config.get('DIGICERT_CIS_URL')
|
||||
base_url = current_app.config.get("DIGICERT_CIS_URL")
|
||||
|
||||
# make request
|
||||
search_url = '{0}/platform/cis/certificate/search'.format(base_url)
|
||||
search_url = "{0}/platform/cis/certificate/search".format(base_url)
|
||||
|
||||
certs = []
|
||||
page = 1
|
||||
|
||||
while True:
|
||||
response = self.session.get(search_url, params={'status': ['issued'], 'page': page})
|
||||
response = self.session.get(
|
||||
search_url, params={"status": ["issued"], "page": page}
|
||||
)
|
||||
data = handle_cis_response(response)
|
||||
|
||||
for c in data['certificates']:
|
||||
download_url = '{0}/platform/cis/certificate/{1}'.format(base_url, c['id'])
|
||||
for c in data["certificates"]:
|
||||
download_url = "{0}/platform/cis/certificate/{1}".format(
|
||||
base_url, c["id"]
|
||||
)
|
||||
certificate = self.session.get(download_url)
|
||||
|
||||
# normalize serial
|
||||
serial = str(int(c['serial_number'], 16))
|
||||
cert = {'body': certificate.content, 'serial': serial, 'external_id': c['id']}
|
||||
serial = str(int(c["serial_number"], 16))
|
||||
cert = {
|
||||
"body": certificate.content,
|
||||
"serial": serial,
|
||||
"external_id": c["id"],
|
||||
}
|
||||
certs.append(cert)
|
||||
|
||||
if page == data['total_pages']:
|
||||
if page == data["total_pages"]:
|
||||
break
|
||||
|
||||
page += 1
|
||||
@ -444,22 +484,23 @@ class DigiCertCISSourcePlugin(SourcePlugin):
|
||||
|
||||
class DigiCertCISIssuerPlugin(IssuerPlugin):
|
||||
"""Wrap the Digicert Certificate Issuing API."""
|
||||
title = 'DigiCert CIS'
|
||||
slug = 'digicert-cis-issuer'
|
||||
|
||||
title = "DigiCert CIS"
|
||||
slug = "digicert-cis-issuer"
|
||||
description = "Enables the creation of certificates by the DigiCert CIS REST API."
|
||||
version = digicert.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize the issuer with the appropriate details."""
|
||||
required_vars = [
|
||||
'DIGICERT_CIS_API_KEY',
|
||||
'DIGICERT_CIS_URL',
|
||||
'DIGICERT_CIS_ROOT',
|
||||
'DIGICERT_CIS_INTERMEDIATE',
|
||||
'DIGICERT_CIS_PROFILE_NAME'
|
||||
"DIGICERT_CIS_API_KEY",
|
||||
"DIGICERT_CIS_URL",
|
||||
"DIGICERT_CIS_ROOT",
|
||||
"DIGICERT_CIS_INTERMEDIATE",
|
||||
"DIGICERT_CIS_PROFILE_NAME",
|
||||
]
|
||||
|
||||
validate_conf(current_app, required_vars)
|
||||
@ -467,8 +508,8 @@ class DigiCertCISIssuerPlugin(IssuerPlugin):
|
||||
self.session = requests.Session()
|
||||
self.session.headers.update(
|
||||
{
|
||||
'X-DC-DEVKEY': current_app.config['DIGICERT_CIS_API_KEY'],
|
||||
'Content-Type': 'application/json'
|
||||
"X-DC-DEVKEY": current_app.config["DIGICERT_CIS_API_KEY"],
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
)
|
||||
|
||||
@ -478,41 +519,51 @@ class DigiCertCISIssuerPlugin(IssuerPlugin):
|
||||
|
||||
def create_certificate(self, csr, issuer_options):
|
||||
"""Create a DigiCert certificate."""
|
||||
base_url = current_app.config.get('DIGICERT_CIS_URL')
|
||||
base_url = current_app.config.get("DIGICERT_CIS_URL")
|
||||
|
||||
# make certificate request
|
||||
create_url = '{0}/platform/cis/certificate'.format(base_url)
|
||||
create_url = "{0}/platform/cis/certificate".format(base_url)
|
||||
|
||||
data = map_cis_fields(issuer_options, csr)
|
||||
response = self.session.post(create_url, data=json.dumps(data))
|
||||
data = handle_cis_response(response)
|
||||
|
||||
# retrieve certificate
|
||||
certificate_pem = get_cis_certificate(self.session, base_url, data['id'])
|
||||
certificate_pem = get_cis_certificate(self.session, base_url, data["id"])
|
||||
|
||||
self.session.headers.pop('Accept')
|
||||
self.session.headers.pop("Accept")
|
||||
end_entity = pem.parse(certificate_pem)[0]
|
||||
|
||||
if 'ECC' in issuer_options['key_type']:
|
||||
return "\n".join(str(end_entity).splitlines()), current_app.config.get('DIGICERT_ECC_CIS_INTERMEDIATE'), data['id']
|
||||
if "ECC" in issuer_options["key_type"]:
|
||||
return (
|
||||
"\n".join(str(end_entity).splitlines()),
|
||||
current_app.config.get("DIGICERT_ECC_CIS_INTERMEDIATE"),
|
||||
data["id"],
|
||||
)
|
||||
|
||||
# By default return RSA
|
||||
return "\n".join(str(end_entity).splitlines()), current_app.config.get('DIGICERT_CIS_INTERMEDIATE'), data['id']
|
||||
return (
|
||||
"\n".join(str(end_entity).splitlines()),
|
||||
current_app.config.get("DIGICERT_CIS_INTERMEDIATE"),
|
||||
data["id"],
|
||||
)
|
||||
|
||||
def revoke_certificate(self, certificate, comments):
|
||||
"""Revoke a Digicert certificate."""
|
||||
base_url = current_app.config.get('DIGICERT_CIS_URL')
|
||||
base_url = current_app.config.get("DIGICERT_CIS_URL")
|
||||
|
||||
# make certificate revoke request
|
||||
revoke_url = '{0}/platform/cis/certificate/{1}/revoke'.format(base_url, certificate.external_id)
|
||||
metrics.send('digicert_revoke_certificate_success', 'counter', 1)
|
||||
response = self.session.put(revoke_url, data=json.dumps({'comments': comments}))
|
||||
revoke_url = "{0}/platform/cis/certificate/{1}/revoke".format(
|
||||
base_url, certificate.external_id
|
||||
)
|
||||
metrics.send("digicert_revoke_certificate_success", "counter", 1)
|
||||
response = self.session.put(revoke_url, data=json.dumps({"comments": comments}))
|
||||
|
||||
if response.status_code != 204:
|
||||
metrics.send('digicert_revoke_certificate_failure', 'counter', 1)
|
||||
raise Exception('Failed to revoke certificate.')
|
||||
metrics.send("digicert_revoke_certificate_failure", "counter", 1)
|
||||
raise Exception("Failed to revoke certificate.")
|
||||
|
||||
metrics.send('digicert_revoke_certificate_success', 'counter', 1)
|
||||
metrics.send("digicert_revoke_certificate_success", "counter", 1)
|
||||
|
||||
@staticmethod
|
||||
def create_authority(options):
|
||||
@ -525,5 +576,5 @@ class DigiCertCISIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return:
|
||||
"""
|
||||
role = {'username': '', 'password': '', 'name': 'digicert'}
|
||||
return current_app.config.get('DIGICERT_CIS_ROOT'), "", [role]
|
||||
role = {"username": "", "password": "", "name": "digicert"}
|
||||
return current_app.config.get("DIGICERT_CIS_ROOT"), "", [role]
|
||||
|
@ -13,144 +13,129 @@ from cryptography import x509
|
||||
def test_map_fields_with_validity_end_and_start(app):
|
||||
from lemur.plugins.lemur_digicert.plugin import map_fields
|
||||
|
||||
names = [u'one.example.com', u'two.example.com', u'three.example.com']
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
|
||||
options = {
|
||||
'common_name': 'example.com',
|
||||
'owner': 'bob@example.com',
|
||||
'description': 'test certificate',
|
||||
'extensions': {
|
||||
'sub_alt_names': {
|
||||
'names': [x509.DNSName(x) for x in names]
|
||||
}
|
||||
},
|
||||
'validity_end': arrow.get(2017, 5, 7),
|
||||
'validity_start': arrow.get(2016, 10, 30)
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"validity_end": arrow.get(2017, 5, 7),
|
||||
"validity_start": arrow.get(2016, 10, 30),
|
||||
}
|
||||
|
||||
data = map_fields(options, CSR_STR)
|
||||
|
||||
assert data == {
|
||||
'certificate': {
|
||||
'csr': CSR_STR,
|
||||
'common_name': 'example.com',
|
||||
'dns_names': names,
|
||||
'signature_hash': 'sha256'
|
||||
"certificate": {
|
||||
"csr": CSR_STR,
|
||||
"common_name": "example.com",
|
||||
"dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
},
|
||||
'organization': {'id': 111111},
|
||||
'custom_expiration_date': arrow.get(2017, 5, 7).format('YYYY-MM-DD')
|
||||
"organization": {"id": 111111},
|
||||
"custom_expiration_date": arrow.get(2017, 5, 7).format("YYYY-MM-DD"),
|
||||
}
|
||||
|
||||
|
||||
def test_map_fields_with_validity_years(app):
|
||||
from lemur.plugins.lemur_digicert.plugin import map_fields
|
||||
|
||||
names = [u'one.example.com', u'two.example.com', u'three.example.com']
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
|
||||
options = {
|
||||
'common_name': 'example.com',
|
||||
'owner': 'bob@example.com',
|
||||
'description': 'test certificate',
|
||||
'extensions': {
|
||||
'sub_alt_names': {
|
||||
'names': [x509.DNSName(x) for x in names]
|
||||
}
|
||||
},
|
||||
'validity_years': 2,
|
||||
'validity_end': arrow.get(2017, 10, 30)
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"validity_years": 2,
|
||||
"validity_end": arrow.get(2017, 10, 30),
|
||||
}
|
||||
|
||||
data = map_fields(options, CSR_STR)
|
||||
|
||||
assert data == {
|
||||
'certificate': {
|
||||
'csr': CSR_STR,
|
||||
'common_name': 'example.com',
|
||||
'dns_names': names,
|
||||
'signature_hash': 'sha256'
|
||||
"certificate": {
|
||||
"csr": CSR_STR,
|
||||
"common_name": "example.com",
|
||||
"dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
},
|
||||
'organization': {'id': 111111},
|
||||
'validity_years': 2
|
||||
"organization": {"id": 111111},
|
||||
"validity_years": 2,
|
||||
}
|
||||
|
||||
|
||||
def test_map_cis_fields(app):
|
||||
from lemur.plugins.lemur_digicert.plugin import map_cis_fields
|
||||
|
||||
names = [u'one.example.com', u'two.example.com', u'three.example.com']
|
||||
names = [u"one.example.com", u"two.example.com", u"three.example.com"]
|
||||
|
||||
options = {
|
||||
'common_name': 'example.com',
|
||||
'owner': 'bob@example.com',
|
||||
'description': 'test certificate',
|
||||
'extensions': {
|
||||
'sub_alt_names': {
|
||||
'names': [x509.DNSName(x) for x in names]
|
||||
}
|
||||
},
|
||||
'organization': 'Example, Inc.',
|
||||
'organizational_unit': 'Example Org',
|
||||
'validity_end': arrow.get(2017, 5, 7),
|
||||
'validity_start': arrow.get(2016, 10, 30)
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Org",
|
||||
"validity_end": arrow.get(2017, 5, 7),
|
||||
"validity_start": arrow.get(2016, 10, 30),
|
||||
}
|
||||
|
||||
data = map_cis_fields(options, CSR_STR)
|
||||
|
||||
assert data == {
|
||||
'common_name': 'example.com',
|
||||
'csr': CSR_STR,
|
||||
'additional_dns_names': names,
|
||||
'signature_hash': 'sha256',
|
||||
'organization': {'name': 'Example, Inc.', 'units': ['Example Org']},
|
||||
'validity': {
|
||||
'valid_to': arrow.get(2017, 5, 7).format('YYYY-MM-DDTHH:MM') + 'Z'
|
||||
"common_name": "example.com",
|
||||
"csr": CSR_STR,
|
||||
"additional_dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
"organization": {"name": "Example, Inc.", "units": ["Example Org"]},
|
||||
"validity": {
|
||||
"valid_to": arrow.get(2017, 5, 7).format("YYYY-MM-DDTHH:MM") + "Z"
|
||||
},
|
||||
'profile_name': None
|
||||
"profile_name": None,
|
||||
}
|
||||
|
||||
options = {
|
||||
'common_name': 'example.com',
|
||||
'owner': 'bob@example.com',
|
||||
'description': 'test certificate',
|
||||
'extensions': {
|
||||
'sub_alt_names': {
|
||||
'names': [x509.DNSName(x) for x in names]
|
||||
}
|
||||
},
|
||||
'organization': 'Example, Inc.',
|
||||
'organizational_unit': 'Example Org',
|
||||
'validity_years': 2
|
||||
"common_name": "example.com",
|
||||
"owner": "bob@example.com",
|
||||
"description": "test certificate",
|
||||
"extensions": {"sub_alt_names": {"names": [x509.DNSName(x) for x in names]}},
|
||||
"organization": "Example, Inc.",
|
||||
"organizational_unit": "Example Org",
|
||||
"validity_years": 2,
|
||||
}
|
||||
|
||||
with freeze_time(time_to_freeze=arrow.get(2016, 11, 3).datetime):
|
||||
data = map_cis_fields(options, CSR_STR)
|
||||
|
||||
assert data == {
|
||||
'common_name': 'example.com',
|
||||
'csr': CSR_STR,
|
||||
'additional_dns_names': names,
|
||||
'signature_hash': 'sha256',
|
||||
'organization': {'name': 'Example, Inc.', 'units': ['Example Org']},
|
||||
'validity': {
|
||||
'valid_to': arrow.get(2018, 11, 3).format('YYYY-MM-DDTHH:MM') + 'Z'
|
||||
"common_name": "example.com",
|
||||
"csr": CSR_STR,
|
||||
"additional_dns_names": names,
|
||||
"signature_hash": "sha256",
|
||||
"organization": {"name": "Example, Inc.", "units": ["Example Org"]},
|
||||
"validity": {
|
||||
"valid_to": arrow.get(2018, 11, 3).format("YYYY-MM-DDTHH:MM") + "Z"
|
||||
},
|
||||
'profile_name': None
|
||||
"profile_name": None,
|
||||
}
|
||||
|
||||
|
||||
def test_signature_hash(app):
|
||||
from lemur.plugins.lemur_digicert.plugin import signature_hash
|
||||
|
||||
assert signature_hash(None) == 'sha256'
|
||||
assert signature_hash('sha256WithRSA') == 'sha256'
|
||||
assert signature_hash('sha384WithRSA') == 'sha384'
|
||||
assert signature_hash('sha512WithRSA') == 'sha512'
|
||||
assert signature_hash(None) == "sha256"
|
||||
assert signature_hash("sha256WithRSA") == "sha256"
|
||||
assert signature_hash("sha384WithRSA") == "sha384"
|
||||
assert signature_hash("sha512WithRSA") == "sha512"
|
||||
|
||||
with pytest.raises(Exception):
|
||||
signature_hash('sdfdsf')
|
||||
signature_hash("sdfdsf")
|
||||
|
||||
|
||||
def test_issuer_plugin_create_certificate(certificate_="""\
|
||||
def test_issuer_plugin_create_certificate(
|
||||
certificate_="""\
|
||||
-----BEGIN CERTIFICATE-----
|
||||
abc
|
||||
-----END CERTIFICATE-----
|
||||
@ -160,7 +145,8 @@ def
|
||||
-----BEGIN CERTIFICATE-----
|
||||
ghi
|
||||
-----END CERTIFICATE-----
|
||||
"""):
|
||||
"""
|
||||
):
|
||||
import requests_mock
|
||||
from lemur.plugins.lemur_digicert.plugin import DigiCertIssuerPlugin
|
||||
|
||||
@ -168,12 +154,26 @@ ghi
|
||||
|
||||
subject = DigiCertIssuerPlugin()
|
||||
adapter = requests_mock.Adapter()
|
||||
adapter.register_uri('POST', 'mock://www.digicert.com/services/v2/order/certificate/ssl_plus', text=json.dumps({'id': 'id123'}))
|
||||
adapter.register_uri('GET', 'mock://www.digicert.com/services/v2/order/certificate/id123', text=json.dumps({'status': 'issued', 'certificate': {'id': 'cert123'}}))
|
||||
adapter.register_uri('GET', 'mock://www.digicert.com/services/v2/certificate/cert123/download/format/pem_all', text=pem_fixture)
|
||||
subject.session.mount('mock', adapter)
|
||||
adapter.register_uri(
|
||||
"POST",
|
||||
"mock://www.digicert.com/services/v2/order/certificate/ssl_plus",
|
||||
text=json.dumps({"id": "id123"}),
|
||||
)
|
||||
adapter.register_uri(
|
||||
"GET",
|
||||
"mock://www.digicert.com/services/v2/order/certificate/id123",
|
||||
text=json.dumps({"status": "issued", "certificate": {"id": "cert123"}}),
|
||||
)
|
||||
adapter.register_uri(
|
||||
"GET",
|
||||
"mock://www.digicert.com/services/v2/certificate/cert123/download/format/pem_all",
|
||||
text=pem_fixture,
|
||||
)
|
||||
subject.session.mount("mock", adapter)
|
||||
|
||||
cert, intermediate, external_id = subject.create_certificate("", {'common_name': 'test.com'})
|
||||
cert, intermediate, external_id = subject.create_certificate(
|
||||
"", {"common_name": "test.com"}
|
||||
)
|
||||
|
||||
assert cert == "-----BEGIN CERTIFICATE-----\nabc\n-----END CERTIFICATE-----"
|
||||
assert intermediate == "-----BEGIN CERTIFICATE-----\ndef\n-----END CERTIFICATE-----"
|
||||
@ -187,10 +187,18 @@ def test_cancel_ordered_certificate(mock_pending_cert):
|
||||
mock_pending_cert.external_id = 1234
|
||||
subject = DigiCertIssuerPlugin()
|
||||
adapter = requests_mock.Adapter()
|
||||
adapter.register_uri('PUT', 'mock://www.digicert.com/services/v2/order/certificate/1234/status', status_code=204)
|
||||
adapter.register_uri('PUT', 'mock://www.digicert.com/services/v2/order/certificate/111/status', status_code=404)
|
||||
subject.session.mount('mock', adapter)
|
||||
data = {'note': 'Test'}
|
||||
adapter.register_uri(
|
||||
"PUT",
|
||||
"mock://www.digicert.com/services/v2/order/certificate/1234/status",
|
||||
status_code=204,
|
||||
)
|
||||
adapter.register_uri(
|
||||
"PUT",
|
||||
"mock://www.digicert.com/services/v2/order/certificate/111/status",
|
||||
status_code=404,
|
||||
)
|
||||
subject.session.mount("mock", adapter)
|
||||
data = {"note": "Test"}
|
||||
subject.cancel_ordered_certificate(mock_pending_cert, **data)
|
||||
|
||||
# A non-existing order id, does not raise exception because if it doesn't exist, then it doesn't matter
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -27,8 +27,10 @@ def render_html(template_name, message):
|
||||
:param message:
|
||||
:return:
|
||||
"""
|
||||
template = env.get_template('{}.html'.format(template_name))
|
||||
return template.render(dict(message=message, hostname=current_app.config.get('LEMUR_HOSTNAME')))
|
||||
template = env.get_template("{}.html".format(template_name))
|
||||
return template.render(
|
||||
dict(message=message, hostname=current_app.config.get("LEMUR_HOSTNAME"))
|
||||
)
|
||||
|
||||
|
||||
def send_via_smtp(subject, body, targets):
|
||||
@ -40,7 +42,9 @@ def send_via_smtp(subject, body, targets):
|
||||
:param targets:
|
||||
:return:
|
||||
"""
|
||||
msg = Message(subject, recipients=targets, sender=current_app.config.get("LEMUR_EMAIL"))
|
||||
msg = Message(
|
||||
subject, recipients=targets, sender=current_app.config.get("LEMUR_EMAIL")
|
||||
)
|
||||
msg.body = "" # kinda a weird api for sending html emails
|
||||
msg.html = body
|
||||
smtp_mail.send(msg)
|
||||
@ -54,65 +58,55 @@ def send_via_ses(subject, body, targets):
|
||||
:param targets:
|
||||
:return:
|
||||
"""
|
||||
client = boto3.client('ses', region_name='us-east-1')
|
||||
client = boto3.client("ses", region_name="us-east-1")
|
||||
client.send_email(
|
||||
Source=current_app.config.get('LEMUR_EMAIL'),
|
||||
Destination={
|
||||
'ToAddresses': targets
|
||||
},
|
||||
Source=current_app.config.get("LEMUR_EMAIL"),
|
||||
Destination={"ToAddresses": targets},
|
||||
Message={
|
||||
'Subject': {
|
||||
'Data': subject,
|
||||
'Charset': 'UTF-8'
|
||||
},
|
||||
'Body': {
|
||||
'Html': {
|
||||
'Data': body,
|
||||
'Charset': 'UTF-8'
|
||||
}
|
||||
}
|
||||
}
|
||||
"Subject": {"Data": subject, "Charset": "UTF-8"},
|
||||
"Body": {"Html": {"Data": body, "Charset": "UTF-8"}},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class EmailNotificationPlugin(ExpirationNotificationPlugin):
|
||||
title = 'Email'
|
||||
slug = 'email-notification'
|
||||
description = 'Sends expiration email notifications'
|
||||
title = "Email"
|
||||
slug = "email-notification"
|
||||
description = "Sends expiration email notifications"
|
||||
version = email.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur"
|
||||
|
||||
additional_options = [
|
||||
{
|
||||
'name': 'recipients',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$',
|
||||
'helpMessage': 'Comma delimited list of email addresses',
|
||||
},
|
||||
"name": "recipients",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^([\w+-.%]+@[\w-.]+\.[A-Za-z]{2,4},?)+$",
|
||||
"helpMessage": "Comma delimited list of email addresses",
|
||||
}
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
"""Initialize the plugin with the appropriate details."""
|
||||
sender = current_app.config.get('LEMUR_EMAIL_SENDER', 'ses').lower()
|
||||
sender = current_app.config.get("LEMUR_EMAIL_SENDER", "ses").lower()
|
||||
|
||||
if sender not in ['ses', 'smtp']:
|
||||
raise InvalidConfiguration('Email sender type {0} is not recognized.')
|
||||
if sender not in ["ses", "smtp"]:
|
||||
raise InvalidConfiguration("Email sender type {0} is not recognized.")
|
||||
|
||||
@staticmethod
|
||||
def send(notification_type, message, targets, options, **kwargs):
|
||||
|
||||
subject = 'Lemur: {0} Notification'.format(notification_type.capitalize())
|
||||
subject = "Lemur: {0} Notification".format(notification_type.capitalize())
|
||||
|
||||
data = {'options': options, 'certificates': message}
|
||||
data = {"options": options, "certificates": message}
|
||||
body = render_html(notification_type, data)
|
||||
|
||||
s_type = current_app.config.get("LEMUR_EMAIL_SENDER", 'ses').lower()
|
||||
s_type = current_app.config.get("LEMUR_EMAIL_SENDER", "ses").lower()
|
||||
|
||||
if s_type == 'ses':
|
||||
if s_type == "ses":
|
||||
send_via_ses(subject, body, targets)
|
||||
|
||||
elif s_type == 'smtp':
|
||||
elif s_type == "smtp":
|
||||
send_via_smtp(subject, body, targets)
|
||||
|
@ -5,22 +5,24 @@ from jinja2 import Environment, FileSystemLoader, select_autoescape
|
||||
from lemur.plugins.utils import get_plugin_option
|
||||
|
||||
loader = FileSystemLoader(searchpath=os.path.dirname(os.path.realpath(__file__)))
|
||||
env = Environment(loader=loader, # nosec: potentially dangerous types esc.
|
||||
autoescape=select_autoescape(['html', 'xml']))
|
||||
env = Environment(
|
||||
loader=loader, # nosec: potentially dangerous types esc.
|
||||
autoescape=select_autoescape(["html", "xml"]),
|
||||
)
|
||||
|
||||
|
||||
def human_time(time):
|
||||
return arrow.get(time).format('dddd, MMMM D, YYYY')
|
||||
return arrow.get(time).format("dddd, MMMM D, YYYY")
|
||||
|
||||
|
||||
def interval(options):
|
||||
return get_plugin_option('interval', options)
|
||||
return get_plugin_option("interval", options)
|
||||
|
||||
|
||||
def unit(options):
|
||||
return get_plugin_option('unit', options)
|
||||
return get_plugin_option("unit", options)
|
||||
|
||||
|
||||
env.filters['time'] = human_time
|
||||
env.filters['interval'] = interval
|
||||
env.filters['unit'] = unit
|
||||
env.filters["time"] = human_time
|
||||
env.filters["interval"] = interval
|
||||
env.filters["unit"] = unit
|
||||
|
@ -13,21 +13,24 @@ def test_render(certificate, endpoint):
|
||||
new_cert.replaces.append(certificate)
|
||||
|
||||
data = {
|
||||
'certificates': [certificate_notification_output_schema.dump(certificate).data],
|
||||
'options': [{'name': 'interval', 'value': 10}, {'name': 'unit', 'value': 'days'}]
|
||||
"certificates": [certificate_notification_output_schema.dump(certificate).data],
|
||||
"options": [
|
||||
{"name": "interval", "value": 10},
|
||||
{"name": "unit", "value": "days"},
|
||||
],
|
||||
}
|
||||
|
||||
template = env.get_template('{}.html'.format('expiration'))
|
||||
template = env.get_template("{}.html".format("expiration"))
|
||||
|
||||
body = template.render(dict(message=data, hostname='lemur.test.example.com'))
|
||||
body = template.render(dict(message=data, hostname="lemur.test.example.com"))
|
||||
|
||||
template = env.get_template('{}.html'.format('rotation'))
|
||||
template = env.get_template("{}.html".format("rotation"))
|
||||
|
||||
certificate.endpoints.append(endpoint)
|
||||
|
||||
body = template.render(
|
||||
dict(
|
||||
certificate=certificate_notification_output_schema.dump(certificate).data,
|
||||
hostname='lemur.test.example.com'
|
||||
hostname="lemur.test.example.com",
|
||||
)
|
||||
)
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -31,10 +31,10 @@ def create_truststore(cert, chain, alias, passphrase):
|
||||
entries = []
|
||||
for idx, cert_bytes in enumerate(cert_chain_as_der(cert, chain)):
|
||||
# The original cert gets name <ALIAS>_cert, first chain element is <ALIAS>_cert_1, etc.
|
||||
cert_alias = alias + '_cert' + ('_{}'.format(idx) if idx else '')
|
||||
cert_alias = alias + "_cert" + ("_{}".format(idx) if idx else "")
|
||||
entries.append(TrustedCertEntry.new(cert_alias, cert_bytes))
|
||||
|
||||
return KeyStore.new('jks', entries).saves(passphrase)
|
||||
return KeyStore.new("jks", entries).saves(passphrase)
|
||||
|
||||
|
||||
def create_keystore(cert, chain, key, alias, passphrase):
|
||||
@ -42,36 +42,36 @@ def create_keystore(cert, chain, key, alias, passphrase):
|
||||
key_bytes = parse_private_key(key).private_bytes(
|
||||
encoding=serialization.Encoding.DER,
|
||||
format=serialization.PrivateFormat.PKCS8,
|
||||
encryption_algorithm=serialization.NoEncryption()
|
||||
encryption_algorithm=serialization.NoEncryption(),
|
||||
)
|
||||
entry = PrivateKeyEntry.new(alias, certs_bytes, key_bytes)
|
||||
|
||||
return KeyStore.new('jks', [entry]).saves(passphrase)
|
||||
return KeyStore.new("jks", [entry]).saves(passphrase)
|
||||
|
||||
|
||||
class JavaTruststoreExportPlugin(ExportPlugin):
|
||||
title = 'Java Truststore (JKS)'
|
||||
slug = 'java-truststore-jks'
|
||||
description = 'Generates a JKS truststore'
|
||||
title = "Java Truststore (JKS)"
|
||||
slug = "java-truststore-jks"
|
||||
description = "Generates a JKS truststore"
|
||||
requires_key = False
|
||||
version = jks.VERSION
|
||||
|
||||
author = 'Marti Raudsepp'
|
||||
author_url = 'https://github.com/intgr'
|
||||
author = "Marti Raudsepp"
|
||||
author_url = "https://github.com/intgr"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'alias',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'Enter the alias you wish to use for the truststore.',
|
||||
"name": "alias",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "Enter the alias you wish to use for the truststore.",
|
||||
},
|
||||
{
|
||||
'name': 'passphrase',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.',
|
||||
'validation': ''
|
||||
"name": "passphrase",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.",
|
||||
"validation": "",
|
||||
},
|
||||
]
|
||||
|
||||
@ -80,44 +80,44 @@ class JavaTruststoreExportPlugin(ExportPlugin):
|
||||
Generates a Java Truststore
|
||||
"""
|
||||
|
||||
if self.get_option('alias', options):
|
||||
alias = self.get_option('alias', options)
|
||||
if self.get_option("alias", options):
|
||||
alias = self.get_option("alias", options)
|
||||
else:
|
||||
alias = common_name(parse_certificate(body))
|
||||
|
||||
if self.get_option('passphrase', options):
|
||||
passphrase = self.get_option('passphrase', options)
|
||||
if self.get_option("passphrase", options):
|
||||
passphrase = self.get_option("passphrase", options)
|
||||
else:
|
||||
passphrase = Fernet.generate_key().decode('utf-8')
|
||||
passphrase = Fernet.generate_key().decode("utf-8")
|
||||
|
||||
raw = create_truststore(body, chain, alias, passphrase)
|
||||
|
||||
return 'jks', passphrase, raw
|
||||
return "jks", passphrase, raw
|
||||
|
||||
|
||||
class JavaKeystoreExportPlugin(ExportPlugin):
|
||||
title = 'Java Keystore (JKS)'
|
||||
slug = 'java-keystore-jks'
|
||||
description = 'Generates a JKS keystore'
|
||||
title = "Java Keystore (JKS)"
|
||||
slug = "java-keystore-jks"
|
||||
description = "Generates a JKS keystore"
|
||||
version = jks.VERSION
|
||||
|
||||
author = 'Marti Raudsepp'
|
||||
author_url = 'https://github.com/intgr'
|
||||
author = "Marti Raudsepp"
|
||||
author_url = "https://github.com/intgr"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'passphrase',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.',
|
||||
'validation': ''
|
||||
"name": "passphrase",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.",
|
||||
"validation": "",
|
||||
},
|
||||
{
|
||||
'name': 'alias',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'Enter the alias you wish to use for the keystore.',
|
||||
}
|
||||
"name": "alias",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "Enter the alias you wish to use for the keystore.",
|
||||
},
|
||||
]
|
||||
|
||||
def export(self, body, chain, key, options, **kwargs):
|
||||
@ -125,16 +125,16 @@ class JavaKeystoreExportPlugin(ExportPlugin):
|
||||
Generates a Java Keystore
|
||||
"""
|
||||
|
||||
if self.get_option('passphrase', options):
|
||||
passphrase = self.get_option('passphrase', options)
|
||||
if self.get_option("passphrase", options):
|
||||
passphrase = self.get_option("passphrase", options)
|
||||
else:
|
||||
passphrase = Fernet.generate_key().decode('utf-8')
|
||||
passphrase = Fernet.generate_key().decode("utf-8")
|
||||
|
||||
if self.get_option('alias', options):
|
||||
alias = self.get_option('alias', options)
|
||||
if self.get_option("alias", options):
|
||||
alias = self.get_option("alias", options)
|
||||
else:
|
||||
alias = common_name(parse_certificate(body))
|
||||
|
||||
raw = create_keystore(body, chain, key, alias, passphrase)
|
||||
|
||||
return 'jks', passphrase, raw
|
||||
return "jks", passphrase, raw
|
||||
|
@ -1,96 +1,105 @@
|
||||
import pytest
|
||||
from jks import KeyStore, TrustedCertEntry, PrivateKeyEntry
|
||||
|
||||
from lemur.tests.vectors import INTERNAL_CERTIFICATE_A_STR, SAN_CERT_STR, INTERMEDIATE_CERT_STR, ROOTCA_CERT_STR, \
|
||||
SAN_CERT_KEY
|
||||
from lemur.tests.vectors import (
|
||||
INTERNAL_CERTIFICATE_A_STR,
|
||||
SAN_CERT_STR,
|
||||
INTERMEDIATE_CERT_STR,
|
||||
ROOTCA_CERT_STR,
|
||||
SAN_CERT_KEY,
|
||||
)
|
||||
|
||||
|
||||
def test_export_truststore(app):
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
p = plugins.get('java-truststore-jks')
|
||||
p = plugins.get("java-truststore-jks")
|
||||
options = [
|
||||
{'name': 'passphrase', 'value': 'hunter2'},
|
||||
{'name': 'alias', 'value': 'AzureDiamond'},
|
||||
{"name": "passphrase", "value": "hunter2"},
|
||||
{"name": "alias", "value": "AzureDiamond"},
|
||||
]
|
||||
chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR
|
||||
chain = INTERMEDIATE_CERT_STR + "\n" + ROOTCA_CERT_STR
|
||||
ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options)
|
||||
|
||||
assert ext == 'jks'
|
||||
assert password == 'hunter2'
|
||||
assert ext == "jks"
|
||||
assert password == "hunter2"
|
||||
assert isinstance(raw, bytes)
|
||||
|
||||
ks = KeyStore.loads(raw, 'hunter2')
|
||||
assert ks.store_type == 'jks'
|
||||
ks = KeyStore.loads(raw, "hunter2")
|
||||
assert ks.store_type == "jks"
|
||||
# JKS lower-cases alias strings
|
||||
assert ks.entries.keys() == {'azurediamond_cert', 'azurediamond_cert_1', 'azurediamond_cert_2'}
|
||||
assert isinstance(ks.entries['azurediamond_cert'], TrustedCertEntry)
|
||||
assert ks.entries.keys() == {
|
||||
"azurediamond_cert",
|
||||
"azurediamond_cert_1",
|
||||
"azurediamond_cert_2",
|
||||
}
|
||||
assert isinstance(ks.entries["azurediamond_cert"], TrustedCertEntry)
|
||||
|
||||
|
||||
def test_export_truststore_defaults(app):
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
p = plugins.get('java-truststore-jks')
|
||||
p = plugins.get("java-truststore-jks")
|
||||
options = []
|
||||
ext, password, raw = p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options)
|
||||
ext, password, raw = p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
||||
|
||||
assert ext == 'jks'
|
||||
assert ext == "jks"
|
||||
assert isinstance(password, str)
|
||||
assert isinstance(raw, bytes)
|
||||
|
||||
ks = KeyStore.loads(raw, password)
|
||||
assert ks.store_type == 'jks'
|
||||
assert ks.store_type == "jks"
|
||||
# JKS lower-cases alias strings
|
||||
assert ks.entries.keys() == {'acommonname_cert'}
|
||||
assert isinstance(ks.entries['acommonname_cert'], TrustedCertEntry)
|
||||
assert ks.entries.keys() == {"acommonname_cert"}
|
||||
assert isinstance(ks.entries["acommonname_cert"], TrustedCertEntry)
|
||||
|
||||
|
||||
def test_export_keystore(app):
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
p = plugins.get('java-keystore-jks')
|
||||
p = plugins.get("java-keystore-jks")
|
||||
options = [
|
||||
{'name': 'passphrase', 'value': 'hunter2'},
|
||||
{'name': 'alias', 'value': 'AzureDiamond'},
|
||||
{"name": "passphrase", "value": "hunter2"},
|
||||
{"name": "alias", "value": "AzureDiamond"},
|
||||
]
|
||||
|
||||
chain = INTERMEDIATE_CERT_STR + '\n' + ROOTCA_CERT_STR
|
||||
chain = INTERMEDIATE_CERT_STR + "\n" + ROOTCA_CERT_STR
|
||||
with pytest.raises(Exception):
|
||||
p.export(INTERNAL_CERTIFICATE_A_STR, chain, '', options)
|
||||
p.export(INTERNAL_CERTIFICATE_A_STR, chain, "", options)
|
||||
|
||||
ext, password, raw = p.export(SAN_CERT_STR, chain, SAN_CERT_KEY, options)
|
||||
|
||||
assert ext == 'jks'
|
||||
assert password == 'hunter2'
|
||||
assert ext == "jks"
|
||||
assert password == "hunter2"
|
||||
assert isinstance(raw, bytes)
|
||||
|
||||
ks = KeyStore.loads(raw, password)
|
||||
assert ks.store_type == 'jks'
|
||||
assert ks.store_type == "jks"
|
||||
# JKS lower-cases alias strings
|
||||
assert ks.entries.keys() == {'azurediamond'}
|
||||
entry = ks.entries['azurediamond']
|
||||
assert ks.entries.keys() == {"azurediamond"}
|
||||
entry = ks.entries["azurediamond"]
|
||||
assert isinstance(entry, PrivateKeyEntry)
|
||||
assert len(entry.cert_chain) == 3 # Cert and chain were provided
|
||||
assert len(entry.cert_chain) == 3 # Cert and chain were provided
|
||||
|
||||
|
||||
def test_export_keystore_defaults(app):
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
p = plugins.get('java-keystore-jks')
|
||||
p = plugins.get("java-keystore-jks")
|
||||
options = []
|
||||
|
||||
with pytest.raises(Exception):
|
||||
p.export(INTERNAL_CERTIFICATE_A_STR, '', '', options)
|
||||
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
||||
|
||||
ext, password, raw = p.export(SAN_CERT_STR, '', SAN_CERT_KEY, options)
|
||||
ext, password, raw = p.export(SAN_CERT_STR, "", SAN_CERT_KEY, options)
|
||||
|
||||
assert ext == 'jks'
|
||||
assert ext == "jks"
|
||||
assert isinstance(password, str)
|
||||
assert isinstance(raw, bytes)
|
||||
|
||||
ks = KeyStore.loads(raw, password)
|
||||
assert ks.store_type == 'jks'
|
||||
assert ks.entries.keys() == {'san.example.org'}
|
||||
entry = ks.entries['san.example.org']
|
||||
assert ks.store_type == "jks"
|
||||
assert ks.entries.keys() == {"san.example.org"}
|
||||
entry = ks.entries["san.example.org"]
|
||||
assert isinstance(entry, PrivateKeyEntry)
|
||||
assert len(entry.cert_chain) == 1 # Only cert itself, no chain was provided
|
||||
assert len(entry.cert_chain) == 1 # Only cert itself, no chain was provided
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -21,7 +21,7 @@ from lemur.common.defaults import common_name
|
||||
from lemur.common.utils import parse_certificate
|
||||
from lemur.plugins.bases import DestinationPlugin
|
||||
|
||||
DEFAULT_API_VERSION = 'v1'
|
||||
DEFAULT_API_VERSION = "v1"
|
||||
|
||||
|
||||
def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data):
|
||||
@ -34,7 +34,7 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data):
|
||||
|
||||
if 200 <= create_resp.status_code <= 299:
|
||||
return None
|
||||
elif create_resp.json().get('reason', '') != 'AlreadyExists':
|
||||
elif create_resp.json().get("reason", "") != "AlreadyExists":
|
||||
return create_resp.content
|
||||
|
||||
url = _resolve_uri(k8s_base_uri, namespace, kind, name)
|
||||
@ -50,22 +50,27 @@ def ensure_resource(k8s_api, k8s_base_uri, namespace, kind, name, data):
|
||||
|
||||
|
||||
def _resolve_ns(k8s_base_uri, namespace, api_ver=DEFAULT_API_VERSION):
|
||||
api_group = 'api'
|
||||
if '/' in api_ver:
|
||||
api_group = 'apis'
|
||||
return '{base}/{api_group}/{api_ver}/namespaces'.format(base=k8s_base_uri, api_group=api_group, api_ver=api_ver) + (
|
||||
'/' + namespace if namespace else '')
|
||||
api_group = "api"
|
||||
if "/" in api_ver:
|
||||
api_group = "apis"
|
||||
return "{base}/{api_group}/{api_ver}/namespaces".format(
|
||||
base=k8s_base_uri, api_group=api_group, api_ver=api_ver
|
||||
) + ("/" + namespace if namespace else "")
|
||||
|
||||
|
||||
def _resolve_uri(k8s_base_uri, namespace, kind, name=None, api_ver=DEFAULT_API_VERSION):
|
||||
if not namespace:
|
||||
namespace = 'default'
|
||||
namespace = "default"
|
||||
|
||||
return "/".join(itertools.chain.from_iterable([
|
||||
(_resolve_ns(k8s_base_uri, namespace, api_ver=api_ver),),
|
||||
((kind + 's').lower(),),
|
||||
(name,) if name else (),
|
||||
]))
|
||||
return "/".join(
|
||||
itertools.chain.from_iterable(
|
||||
[
|
||||
(_resolve_ns(k8s_base_uri, namespace, api_ver=api_ver),),
|
||||
((kind + "s").lower(),),
|
||||
(name,) if name else (),
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# Performs Base64 encoding of string to string using the base64.b64encode() function
|
||||
@ -76,117 +81,113 @@ def base64encode(string):
|
||||
|
||||
def build_secret(secret_format, secret_name, body, private_key, cert_chain):
|
||||
secret = {
|
||||
'apiVersion': 'v1',
|
||||
'kind': 'Secret',
|
||||
'type': 'Opaque',
|
||||
'metadata': {
|
||||
'name': secret_name,
|
||||
}
|
||||
"apiVersion": "v1",
|
||||
"kind": "Secret",
|
||||
"type": "Opaque",
|
||||
"metadata": {"name": secret_name},
|
||||
}
|
||||
if secret_format == 'Full':
|
||||
secret['data'] = {
|
||||
'combined.pem': base64encode('%s\n%s' % (body, private_key)),
|
||||
'ca.crt': base64encode(cert_chain),
|
||||
'service.key': base64encode(private_key),
|
||||
'service.crt': base64encode(body),
|
||||
if secret_format == "Full":
|
||||
secret["data"] = {
|
||||
"combined.pem": base64encode("%s\n%s" % (body, private_key)),
|
||||
"ca.crt": base64encode(cert_chain),
|
||||
"service.key": base64encode(private_key),
|
||||
"service.crt": base64encode(body),
|
||||
}
|
||||
if secret_format == 'TLS':
|
||||
secret['type'] = 'kubernetes.io/tls'
|
||||
secret['data'] = {
|
||||
'tls.crt': base64encode(cert_chain),
|
||||
'tls.key': base64encode(private_key)
|
||||
}
|
||||
if secret_format == 'Certificate':
|
||||
secret['data'] = {
|
||||
'tls.crt': base64encode(cert_chain),
|
||||
if secret_format == "TLS":
|
||||
secret["type"] = "kubernetes.io/tls"
|
||||
secret["data"] = {
|
||||
"tls.crt": base64encode(cert_chain),
|
||||
"tls.key": base64encode(private_key),
|
||||
}
|
||||
if secret_format == "Certificate":
|
||||
secret["data"] = {"tls.crt": base64encode(cert_chain)}
|
||||
return secret
|
||||
|
||||
|
||||
class KubernetesDestinationPlugin(DestinationPlugin):
|
||||
title = 'Kubernetes'
|
||||
slug = 'kubernetes-destination'
|
||||
description = 'Allow the uploading of certificates to Kubernetes as secret'
|
||||
title = "Kubernetes"
|
||||
slug = "kubernetes-destination"
|
||||
description = "Allow the uploading of certificates to Kubernetes as secret"
|
||||
|
||||
author = 'Mikhail Khodorovskiy'
|
||||
author_url = 'https://github.com/mik373/lemur'
|
||||
author = "Mikhail Khodorovskiy"
|
||||
author_url = "https://github.com/mik373/lemur"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'secretNameFormat',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
"name": "secretNameFormat",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
# Validation is difficult. This regex is used by kubectl to validate secret names:
|
||||
# [a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*
|
||||
# Allowing the insertion of "{common_name}" (or any other such placeholder}
|
||||
# at any point in the string proved very challenging and had a tendency to
|
||||
# cause my browser to hang. The specified expression will allow any valid string
|
||||
# but will also accept many invalid strings.
|
||||
'validation': '(?:[a-z0-9.-]|\\{common_name\\})+',
|
||||
'helpMessage': 'Must be a valid secret name, possibly including "{common_name}"',
|
||||
'default': '{common_name}'
|
||||
"validation": "(?:[a-z0-9.-]|\\{common_name\\})+",
|
||||
"helpMessage": 'Must be a valid secret name, possibly including "{common_name}"',
|
||||
"default": "{common_name}",
|
||||
},
|
||||
{
|
||||
'name': 'kubernetesURL',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': 'https?://[a-zA-Z0-9.-]+(?::[0-9]+)?',
|
||||
'helpMessage': 'Must be a valid Kubernetes server URL!',
|
||||
'default': 'https://kubernetes.default'
|
||||
"name": "kubernetesURL",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "https?://[a-zA-Z0-9.-]+(?::[0-9]+)?",
|
||||
"helpMessage": "Must be a valid Kubernetes server URL!",
|
||||
"default": "https://kubernetes.default",
|
||||
},
|
||||
{
|
||||
'name': 'kubernetesAuthToken',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': '[0-9a-zA-Z-_.]+',
|
||||
'helpMessage': 'Must be a valid Kubernetes server Token!',
|
||||
"name": "kubernetesAuthToken",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "[0-9a-zA-Z-_.]+",
|
||||
"helpMessage": "Must be a valid Kubernetes server Token!",
|
||||
},
|
||||
{
|
||||
'name': 'kubernetesAuthTokenFile',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': '(/[^/]+)+',
|
||||
'helpMessage': 'Must be a valid file path!',
|
||||
'default': '/var/run/secrets/kubernetes.io/serviceaccount/token'
|
||||
"name": "kubernetesAuthTokenFile",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "(/[^/]+)+",
|
||||
"helpMessage": "Must be a valid file path!",
|
||||
"default": "/var/run/secrets/kubernetes.io/serviceaccount/token",
|
||||
},
|
||||
{
|
||||
'name': 'kubernetesServerCertificate',
|
||||
'type': 'textarea',
|
||||
'required': False,
|
||||
'validation': '-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----',
|
||||
'helpMessage': 'Must be a valid Kubernetes server Certificate!',
|
||||
"name": "kubernetesServerCertificate",
|
||||
"type": "textarea",
|
||||
"required": False,
|
||||
"validation": "-----BEGIN CERTIFICATE-----[a-zA-Z0-9/+\\s\\r\\n]+-----END CERTIFICATE-----",
|
||||
"helpMessage": "Must be a valid Kubernetes server Certificate!",
|
||||
},
|
||||
{
|
||||
'name': 'kubernetesServerCertificateFile',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': '(/[^/]+)+',
|
||||
'helpMessage': 'Must be a valid file path!',
|
||||
'default': '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt'
|
||||
"name": "kubernetesServerCertificateFile",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "(/[^/]+)+",
|
||||
"helpMessage": "Must be a valid file path!",
|
||||
"default": "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt",
|
||||
},
|
||||
{
|
||||
'name': 'kubernetesNamespace',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': '[a-z0-9]([-a-z0-9]*[a-z0-9])?',
|
||||
'helpMessage': 'Must be a valid Kubernetes Namespace!',
|
||||
"name": "kubernetesNamespace",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "[a-z0-9]([-a-z0-9]*[a-z0-9])?",
|
||||
"helpMessage": "Must be a valid Kubernetes Namespace!",
|
||||
},
|
||||
{
|
||||
'name': 'kubernetesNamespaceFile',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': '(/[^/]+)+',
|
||||
'helpMessage': 'Must be a valid file path!',
|
||||
'default': '/var/run/secrets/kubernetes.io/serviceaccount/namespace'
|
||||
"name": "kubernetesNamespaceFile",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "(/[^/]+)+",
|
||||
"helpMessage": "Must be a valid file path!",
|
||||
"default": "/var/run/secrets/kubernetes.io/serviceaccount/namespace",
|
||||
},
|
||||
{
|
||||
'name': 'secretFormat',
|
||||
'type': 'select',
|
||||
'required': True,
|
||||
'available': ['Full', 'TLS', 'Certificate'],
|
||||
'helpMessage': 'The type of Secret to create.',
|
||||
'default': 'Full'
|
||||
}
|
||||
"name": "secretFormat",
|
||||
"type": "select",
|
||||
"required": True,
|
||||
"available": ["Full", "TLS", "Certificate"],
|
||||
"helpMessage": "The type of Secret to create.",
|
||||
"default": "Full",
|
||||
},
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -195,27 +196,28 @@ class KubernetesDestinationPlugin(DestinationPlugin):
|
||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||
|
||||
try:
|
||||
k8_base_uri = self.get_option('kubernetesURL', options)
|
||||
secret_format = self.get_option('secretFormat', options)
|
||||
k8s_api = K8sSession(
|
||||
self.k8s_bearer(options),
|
||||
self.k8s_cert(options)
|
||||
)
|
||||
k8_base_uri = self.get_option("kubernetesURL", options)
|
||||
secret_format = self.get_option("secretFormat", options)
|
||||
k8s_api = K8sSession(self.k8s_bearer(options), self.k8s_cert(options))
|
||||
cn = common_name(parse_certificate(body))
|
||||
secret_name_format = self.get_option('secretNameFormat', options)
|
||||
secret_name_format = self.get_option("secretNameFormat", options)
|
||||
secret_name = secret_name_format.format(common_name=cn)
|
||||
secret = build_secret(secret_format, secret_name, body, private_key, cert_chain)
|
||||
secret = build_secret(
|
||||
secret_format, secret_name, body, private_key, cert_chain
|
||||
)
|
||||
err = ensure_resource(
|
||||
k8s_api,
|
||||
k8s_base_uri=k8_base_uri,
|
||||
namespace=self.k8s_namespace(options),
|
||||
kind="secret",
|
||||
name=secret_name,
|
||||
data=secret
|
||||
data=secret,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.exception("Exception in upload: {}".format(e), exc_info=True)
|
||||
current_app.logger.exception(
|
||||
"Exception in upload: {}".format(e), exc_info=True
|
||||
)
|
||||
raise
|
||||
|
||||
if err is not None:
|
||||
@ -223,24 +225,28 @@ class KubernetesDestinationPlugin(DestinationPlugin):
|
||||
raise Exception("Error uploading secret: " + err)
|
||||
|
||||
def k8s_bearer(self, options):
|
||||
bearer = self.get_option('kubernetesAuthToken', options)
|
||||
bearer = self.get_option("kubernetesAuthToken", options)
|
||||
if not bearer:
|
||||
bearer_file = self.get_option('kubernetesAuthTokenFile', options)
|
||||
bearer_file = self.get_option("kubernetesAuthTokenFile", options)
|
||||
with open(bearer_file, "r") as file:
|
||||
bearer = file.readline()
|
||||
if bearer:
|
||||
current_app.logger.debug("Using token read from %s", bearer_file)
|
||||
else:
|
||||
raise Exception("Unable to locate token in options or from %s", bearer_file)
|
||||
raise Exception(
|
||||
"Unable to locate token in options or from %s", bearer_file
|
||||
)
|
||||
else:
|
||||
current_app.logger.debug("Using token from options")
|
||||
return bearer
|
||||
|
||||
def k8s_cert(self, options):
|
||||
cert_file = self.get_option('kubernetesServerCertificateFile', options)
|
||||
cert = self.get_option('kubernetesServerCertificate', options)
|
||||
cert_file = self.get_option("kubernetesServerCertificateFile", options)
|
||||
cert = self.get_option("kubernetesServerCertificate", options)
|
||||
if cert:
|
||||
cert_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'k8.cert')
|
||||
cert_file = os.path.join(
|
||||
os.path.abspath(os.path.dirname(__file__)), "k8.cert"
|
||||
)
|
||||
with open(cert_file, "w") as text_file:
|
||||
text_file.write(cert)
|
||||
current_app.logger.debug("Using certificate from options")
|
||||
@ -249,36 +255,69 @@ class KubernetesDestinationPlugin(DestinationPlugin):
|
||||
return cert_file
|
||||
|
||||
def k8s_namespace(self, options):
|
||||
namespace = self.get_option('kubernetesNamespace', options)
|
||||
namespace = self.get_option("kubernetesNamespace", options)
|
||||
if not namespace:
|
||||
namespace_file = self.get_option('kubernetesNamespaceFile', options)
|
||||
namespace_file = self.get_option("kubernetesNamespaceFile", options)
|
||||
with open(namespace_file, "r") as file:
|
||||
namespace = file.readline()
|
||||
if namespace:
|
||||
current_app.logger.debug("Using namespace %s from %s", namespace, namespace_file)
|
||||
current_app.logger.debug(
|
||||
"Using namespace %s from %s", namespace, namespace_file
|
||||
)
|
||||
else:
|
||||
raise Exception("Unable to locate namespace in options or from %s", namespace_file)
|
||||
raise Exception(
|
||||
"Unable to locate namespace in options or from %s", namespace_file
|
||||
)
|
||||
else:
|
||||
current_app.logger.debug("Using namespace %s from options", namespace)
|
||||
return namespace
|
||||
|
||||
|
||||
class K8sSession(requests.Session):
|
||||
|
||||
def __init__(self, bearer, cert_file):
|
||||
super(K8sSession, self).__init__()
|
||||
|
||||
self.headers.update({
|
||||
'Authorization': 'Bearer %s' % bearer
|
||||
})
|
||||
self.headers.update({"Authorization": "Bearer %s" % bearer})
|
||||
|
||||
self.verify = cert_file
|
||||
|
||||
def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None,
|
||||
timeout=30, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None,
|
||||
json=None):
|
||||
def request(
|
||||
self,
|
||||
method,
|
||||
url,
|
||||
params=None,
|
||||
data=None,
|
||||
headers=None,
|
||||
cookies=None,
|
||||
files=None,
|
||||
auth=None,
|
||||
timeout=30,
|
||||
allow_redirects=True,
|
||||
proxies=None,
|
||||
hooks=None,
|
||||
stream=None,
|
||||
verify=None,
|
||||
cert=None,
|
||||
json=None,
|
||||
):
|
||||
"""
|
||||
This method overrides the default timeout to be 10s.
|
||||
"""
|
||||
return super(K8sSession, self).request(method, url, params, data, headers, cookies, files, auth, timeout,
|
||||
allow_redirects, proxies, hooks, stream, verify, cert, json)
|
||||
return super(K8sSession, self).request(
|
||||
method,
|
||||
url,
|
||||
params,
|
||||
data,
|
||||
headers,
|
||||
cookies,
|
||||
files,
|
||||
auth,
|
||||
timeout,
|
||||
allow_redirects,
|
||||
proxies,
|
||||
hooks,
|
||||
stream,
|
||||
verify,
|
||||
cert,
|
||||
json,
|
||||
)
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -50,59 +50,66 @@ def create_pkcs12(cert, chain, p12_tmp, key, alias, passphrase):
|
||||
assert isinstance(key, str)
|
||||
|
||||
with mktempfile() as key_tmp:
|
||||
with open(key_tmp, 'w') as f:
|
||||
with open(key_tmp, "w") as f:
|
||||
f.write(key)
|
||||
|
||||
# Create PKCS12 keystore from private key and public certificate
|
||||
with mktempfile() as cert_tmp:
|
||||
with open(cert_tmp, 'w') as f:
|
||||
with open(cert_tmp, "w") as f:
|
||||
if chain:
|
||||
f.writelines([cert.strip() + "\n", chain.strip() + "\n"])
|
||||
else:
|
||||
f.writelines([cert.strip() + "\n"])
|
||||
|
||||
run_process([
|
||||
"openssl",
|
||||
"pkcs12",
|
||||
"-export",
|
||||
"-name", alias,
|
||||
"-in", cert_tmp,
|
||||
"-inkey", key_tmp,
|
||||
"-out", p12_tmp,
|
||||
"-password", "pass:{}".format(passphrase)
|
||||
])
|
||||
run_process(
|
||||
[
|
||||
"openssl",
|
||||
"pkcs12",
|
||||
"-export",
|
||||
"-name",
|
||||
alias,
|
||||
"-in",
|
||||
cert_tmp,
|
||||
"-inkey",
|
||||
key_tmp,
|
||||
"-out",
|
||||
p12_tmp,
|
||||
"-password",
|
||||
"pass:{}".format(passphrase),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class OpenSSLExportPlugin(ExportPlugin):
|
||||
title = 'OpenSSL'
|
||||
slug = 'openssl-export'
|
||||
description = 'Is a loose interface to openssl and support various formats'
|
||||
title = "OpenSSL"
|
||||
slug = "openssl-export"
|
||||
description = "Is a loose interface to openssl and support various formats"
|
||||
version = openssl.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'type',
|
||||
'type': 'select',
|
||||
'required': True,
|
||||
'available': ['PKCS12 (.p12)'],
|
||||
'helpMessage': 'Choose the format you wish to export',
|
||||
"name": "type",
|
||||
"type": "select",
|
||||
"required": True,
|
||||
"available": ["PKCS12 (.p12)"],
|
||||
"helpMessage": "Choose the format you wish to export",
|
||||
},
|
||||
{
|
||||
'name': 'passphrase',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'If no passphrase is given one will be generated for you, we highly recommend this.',
|
||||
'validation': ''
|
||||
"name": "passphrase",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "If no passphrase is given one will be generated for you, we highly recommend this.",
|
||||
"validation": "",
|
||||
},
|
||||
{
|
||||
'name': 'alias',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'Enter the alias you wish to use for the keystore.',
|
||||
}
|
||||
"name": "alias",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "Enter the alias you wish to use for the keystore.",
|
||||
},
|
||||
]
|
||||
|
||||
def export(self, body, chain, key, options, **kwargs):
|
||||
@ -115,20 +122,20 @@ class OpenSSLExportPlugin(ExportPlugin):
|
||||
:param options:
|
||||
:param kwargs:
|
||||
"""
|
||||
if self.get_option('passphrase', options):
|
||||
passphrase = self.get_option('passphrase', options)
|
||||
if self.get_option("passphrase", options):
|
||||
passphrase = self.get_option("passphrase", options)
|
||||
else:
|
||||
passphrase = get_psuedo_random_string()
|
||||
|
||||
if self.get_option('alias', options):
|
||||
alias = self.get_option('alias', options)
|
||||
if self.get_option("alias", options):
|
||||
alias = self.get_option("alias", options)
|
||||
else:
|
||||
alias = common_name(parse_certificate(body))
|
||||
|
||||
type = self.get_option('type', options)
|
||||
type = self.get_option("type", options)
|
||||
|
||||
with mktemppath() as output_tmp:
|
||||
if type == 'PKCS12 (.p12)':
|
||||
if type == "PKCS12 (.p12)":
|
||||
if not key:
|
||||
raise Exception("Private Key required by {0}".format(type))
|
||||
|
||||
@ -137,7 +144,7 @@ class OpenSSLExportPlugin(ExportPlugin):
|
||||
else:
|
||||
raise Exception("Unable to export, unsupported type: {0}".format(type))
|
||||
|
||||
with open(output_tmp, 'rb') as f:
|
||||
with open(output_tmp, "rb") as f:
|
||||
raw = f.read()
|
||||
|
||||
return extension, passphrase, raw
|
||||
|
@ -4,8 +4,12 @@ from lemur.tests.vectors import INTERNAL_PRIVATE_KEY_A_STR, INTERNAL_CERTIFICATE
|
||||
|
||||
def test_export_certificate_to_pkcs12(app):
|
||||
from lemur.plugins.base import plugins
|
||||
p = plugins.get('openssl-export')
|
||||
options = [{'name': 'passphrase', 'value': 'test1234'}, {'name': 'type', 'value': 'PKCS12 (.p12)'}]
|
||||
|
||||
p = plugins.get("openssl-export")
|
||||
options = [
|
||||
{"name": "passphrase", "value": "test1234"},
|
||||
{"name": "type", "value": "PKCS12 (.p12)"},
|
||||
]
|
||||
with pytest.raises(Exception):
|
||||
p.export(INTERNAL_CERTIFICATE_A_STR, "", "", options)
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -27,107 +27,105 @@ from lemur.plugins.bases import DestinationPlugin
|
||||
|
||||
|
||||
class SFTPDestinationPlugin(DestinationPlugin):
|
||||
title = 'SFTP'
|
||||
slug = 'sftp-destination'
|
||||
description = 'Allow the uploading of certificates to SFTP'
|
||||
title = "SFTP"
|
||||
slug = "sftp-destination"
|
||||
description = "Allow the uploading of certificates to SFTP"
|
||||
version = lemur_sftp.VERSION
|
||||
|
||||
author = 'Dmitry Zykov'
|
||||
author_url = 'https://github.com/DmitryZykov'
|
||||
author = "Dmitry Zykov"
|
||||
author_url = "https://github.com/DmitryZykov"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'host',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'helpMessage': 'The SFTP host.'
|
||||
"name": "host",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"helpMessage": "The SFTP host.",
|
||||
},
|
||||
{
|
||||
'name': 'port',
|
||||
'type': 'int',
|
||||
'required': True,
|
||||
'helpMessage': 'The SFTP port, default is 22.',
|
||||
'validation': '^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})',
|
||||
'default': '22'
|
||||
"name": "port",
|
||||
"type": "int",
|
||||
"required": True,
|
||||
"helpMessage": "The SFTP port, default is 22.",
|
||||
"validation": "^(6553[0-5]|655[0-2][0-9]\d|65[0-4](\d){2}|6[0-4](\d){3}|[1-5](\d){4}|[1-9](\d){0,3})",
|
||||
"default": "22",
|
||||
},
|
||||
{
|
||||
'name': 'user',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'helpMessage': 'The SFTP user. Default is root.',
|
||||
'default': 'root'
|
||||
"name": "user",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"helpMessage": "The SFTP user. Default is root.",
|
||||
"default": "root",
|
||||
},
|
||||
{
|
||||
'name': 'password',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'The SFTP password (optional when the private key is used).',
|
||||
'default': None
|
||||
"name": "password",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "The SFTP password (optional when the private key is used).",
|
||||
"default": None,
|
||||
},
|
||||
{
|
||||
'name': 'privateKeyPath',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'The path to the RSA private key on the Lemur server (optional).',
|
||||
'default': None
|
||||
"name": "privateKeyPath",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "The path to the RSA private key on the Lemur server (optional).",
|
||||
"default": None,
|
||||
},
|
||||
{
|
||||
'name': 'privateKeyPass',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'helpMessage': 'The password for the encrypted RSA private key (optional).',
|
||||
'default': None
|
||||
"name": "privateKeyPass",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"helpMessage": "The password for the encrypted RSA private key (optional).",
|
||||
"default": None,
|
||||
},
|
||||
{
|
||||
'name': 'destinationPath',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'helpMessage': 'The SFTP path where certificates will be uploaded.',
|
||||
'default': '/etc/nginx/certs'
|
||||
"name": "destinationPath",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"helpMessage": "The SFTP path where certificates will be uploaded.",
|
||||
"default": "/etc/nginx/certs",
|
||||
},
|
||||
{
|
||||
'name': 'exportFormat',
|
||||
'required': True,
|
||||
'value': 'NGINX',
|
||||
'helpMessage': 'The export format for certificates.',
|
||||
'type': 'select',
|
||||
'available': [
|
||||
'NGINX',
|
||||
'Apache'
|
||||
]
|
||||
}
|
||||
"name": "exportFormat",
|
||||
"required": True,
|
||||
"value": "NGINX",
|
||||
"helpMessage": "The export format for certificates.",
|
||||
"type": "select",
|
||||
"available": ["NGINX", "Apache"],
|
||||
},
|
||||
]
|
||||
|
||||
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
|
||||
|
||||
current_app.logger.debug('SFTP destination plugin is started')
|
||||
current_app.logger.debug("SFTP destination plugin is started")
|
||||
|
||||
cn = common_name(parse_certificate(body))
|
||||
host = self.get_option('host', options)
|
||||
port = self.get_option('port', options)
|
||||
user = self.get_option('user', options)
|
||||
password = self.get_option('password', options)
|
||||
ssh_priv_key = self.get_option('privateKeyPath', options)
|
||||
ssh_priv_key_pass = self.get_option('privateKeyPass', options)
|
||||
dst_path = self.get_option('destinationPath', options)
|
||||
export_format = self.get_option('exportFormat', options)
|
||||
host = self.get_option("host", options)
|
||||
port = self.get_option("port", options)
|
||||
user = self.get_option("user", options)
|
||||
password = self.get_option("password", options)
|
||||
ssh_priv_key = self.get_option("privateKeyPath", options)
|
||||
ssh_priv_key_pass = self.get_option("privateKeyPass", options)
|
||||
dst_path = self.get_option("destinationPath", options)
|
||||
export_format = self.get_option("exportFormat", options)
|
||||
|
||||
# prepare files for upload
|
||||
files = {cn + '.key': private_key,
|
||||
cn + '.pem': body}
|
||||
files = {cn + ".key": private_key, cn + ".pem": body}
|
||||
|
||||
if cert_chain:
|
||||
if export_format == 'NGINX':
|
||||
if export_format == "NGINX":
|
||||
# assemble body + chain in the single file
|
||||
files[cn + '.pem'] += '\n' + cert_chain
|
||||
files[cn + ".pem"] += "\n" + cert_chain
|
||||
|
||||
elif export_format == 'Apache':
|
||||
elif export_format == "Apache":
|
||||
# store chain in the separate file
|
||||
files[cn + '.ca.bundle.pem'] = cert_chain
|
||||
files[cn + ".ca.bundle.pem"] = cert_chain
|
||||
|
||||
# upload files
|
||||
try:
|
||||
current_app.logger.debug('Connecting to {0}@{1}:{2}'.format(user, host, port))
|
||||
current_app.logger.debug(
|
||||
"Connecting to {0}@{1}:{2}".format(user, host, port)
|
||||
)
|
||||
ssh = paramiko.SSHClient()
|
||||
|
||||
# allow connection to the new unknown host
|
||||
@ -135,14 +133,18 @@ class SFTPDestinationPlugin(DestinationPlugin):
|
||||
|
||||
# open the ssh connection
|
||||
if password:
|
||||
current_app.logger.debug('Using password')
|
||||
current_app.logger.debug("Using password")
|
||||
ssh.connect(host, username=user, port=port, password=password)
|
||||
elif ssh_priv_key:
|
||||
current_app.logger.debug('Using RSA private key')
|
||||
pkey = paramiko.RSAKey.from_private_key_file(ssh_priv_key, ssh_priv_key_pass)
|
||||
current_app.logger.debug("Using RSA private key")
|
||||
pkey = paramiko.RSAKey.from_private_key_file(
|
||||
ssh_priv_key, ssh_priv_key_pass
|
||||
)
|
||||
ssh.connect(host, username=user, port=port, pkey=pkey)
|
||||
else:
|
||||
current_app.logger.error("No password or private key provided. Can't proceed")
|
||||
current_app.logger.error(
|
||||
"No password or private key provided. Can't proceed"
|
||||
)
|
||||
raise paramiko.ssh_exception.AuthenticationException
|
||||
|
||||
# open the sftp session inside the ssh connection
|
||||
@ -150,29 +152,33 @@ class SFTPDestinationPlugin(DestinationPlugin):
|
||||
|
||||
# make sure that the destination path exist
|
||||
try:
|
||||
current_app.logger.debug('Creating {0}'.format(dst_path))
|
||||
current_app.logger.debug("Creating {0}".format(dst_path))
|
||||
sftp.mkdir(dst_path)
|
||||
except IOError:
|
||||
current_app.logger.debug('{0} already exist, resuming'.format(dst_path))
|
||||
current_app.logger.debug("{0} already exist, resuming".format(dst_path))
|
||||
try:
|
||||
dst_path_cn = dst_path + '/' + cn
|
||||
current_app.logger.debug('Creating {0}'.format(dst_path_cn))
|
||||
dst_path_cn = dst_path + "/" + cn
|
||||
current_app.logger.debug("Creating {0}".format(dst_path_cn))
|
||||
sftp.mkdir(dst_path_cn)
|
||||
except IOError:
|
||||
current_app.logger.debug('{0} already exist, resuming'.format(dst_path_cn))
|
||||
current_app.logger.debug(
|
||||
"{0} already exist, resuming".format(dst_path_cn)
|
||||
)
|
||||
|
||||
# upload certificate files to the sftp destination
|
||||
for filename, data in files.items():
|
||||
current_app.logger.debug('Uploading {0} to {1}'.format(filename, dst_path_cn))
|
||||
with sftp.open(dst_path_cn + '/' + filename, 'w') as f:
|
||||
current_app.logger.debug(
|
||||
"Uploading {0} to {1}".format(filename, dst_path_cn)
|
||||
)
|
||||
with sftp.open(dst_path_cn + "/" + filename, "w") as f:
|
||||
f.write(data)
|
||||
# read only for owner, -r--------
|
||||
sftp.chmod(dst_path_cn + '/' + filename, 0o400)
|
||||
sftp.chmod(dst_path_cn + "/" + filename, 0o400)
|
||||
|
||||
ssh.close()
|
||||
|
||||
except Exception as e:
|
||||
current_app.logger.error('ERROR in {0}: {1}'.format(e.__class__, e))
|
||||
current_app.logger.error("ERROR in {0}: {1}".format(e.__class__, e))
|
||||
try:
|
||||
ssh.close()
|
||||
except BaseException:
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -17,102 +17,101 @@ import requests
|
||||
|
||||
|
||||
def create_certificate_url(name):
|
||||
return 'https://{hostname}/#/certificates/{name}'.format(
|
||||
hostname=current_app.config.get('LEMUR_HOSTNAME'),
|
||||
name=name
|
||||
return "https://{hostname}/#/certificates/{name}".format(
|
||||
hostname=current_app.config.get("LEMUR_HOSTNAME"), name=name
|
||||
)
|
||||
|
||||
|
||||
def create_expiration_attachments(certificates):
|
||||
attachments = []
|
||||
for certificate in certificates:
|
||||
attachments.append({
|
||||
'title': certificate['name'],
|
||||
'title_link': create_certificate_url(certificate['name']),
|
||||
'color': 'danger',
|
||||
'fallback': '',
|
||||
'fields': [
|
||||
{
|
||||
'title': 'Owner',
|
||||
'value': certificate['owner'],
|
||||
'short': True
|
||||
},
|
||||
{
|
||||
'title': 'Expires',
|
||||
'value': arrow.get(certificate['validityEnd']).format('dddd, MMMM D, YYYY'),
|
||||
'short': True
|
||||
},
|
||||
{
|
||||
'title': 'Endpoints Detected',
|
||||
'value': len(certificate['endpoints']),
|
||||
'short': True
|
||||
}
|
||||
],
|
||||
'text': '',
|
||||
'mrkdwn_in': ['text']
|
||||
})
|
||||
attachments.append(
|
||||
{
|
||||
"title": certificate["name"],
|
||||
"title_link": create_certificate_url(certificate["name"]),
|
||||
"color": "danger",
|
||||
"fallback": "",
|
||||
"fields": [
|
||||
{"title": "Owner", "value": certificate["owner"], "short": True},
|
||||
{
|
||||
"title": "Expires",
|
||||
"value": arrow.get(certificate["validityEnd"]).format(
|
||||
"dddd, MMMM D, YYYY"
|
||||
),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
"title": "Endpoints Detected",
|
||||
"value": len(certificate["endpoints"]),
|
||||
"short": True,
|
||||
},
|
||||
],
|
||||
"text": "",
|
||||
"mrkdwn_in": ["text"],
|
||||
}
|
||||
)
|
||||
return attachments
|
||||
|
||||
|
||||
def create_rotation_attachments(certificate):
|
||||
return {
|
||||
'title': certificate['name'],
|
||||
'title_link': create_certificate_url(certificate['name']),
|
||||
'fields': [
|
||||
"title": certificate["name"],
|
||||
"title_link": create_certificate_url(certificate["name"]),
|
||||
"fields": [
|
||||
{
|
||||
{"title": "Owner", "value": certificate["owner"], "short": True},
|
||||
{
|
||||
'title': 'Owner',
|
||||
'value': certificate['owner'],
|
||||
'short': True
|
||||
"title": "Expires",
|
||||
"value": arrow.get(certificate["validityEnd"]).format(
|
||||
"dddd, MMMM D, YYYY"
|
||||
),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
'title': 'Expires',
|
||||
'value': arrow.get(certificate['validityEnd']).format('dddd, MMMM D, YYYY'),
|
||||
'short': True
|
||||
"title": "Replaced By",
|
||||
"value": len(certificate["replaced"][0]["name"]),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
'title': 'Replaced By',
|
||||
'value': len(certificate['replaced'][0]['name']),
|
||||
'short': True
|
||||
"title": "Endpoints Rotated",
|
||||
"value": len(certificate["endpoints"]),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
'title': 'Endpoints Rotated',
|
||||
'value': len(certificate['endpoints']),
|
||||
'short': True
|
||||
}
|
||||
}
|
||||
]
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
class SlackNotificationPlugin(ExpirationNotificationPlugin):
|
||||
title = 'Slack'
|
||||
slug = 'slack-notification'
|
||||
description = 'Sends notifications to Slack'
|
||||
title = "Slack"
|
||||
slug = "slack-notification"
|
||||
description = "Sends notifications to Slack"
|
||||
version = slack.VERSION
|
||||
|
||||
author = 'Harm Weites'
|
||||
author_url = 'https://github.com/netflix/lemur'
|
||||
author = "Harm Weites"
|
||||
author_url = "https://github.com/netflix/lemur"
|
||||
|
||||
additional_options = [
|
||||
{
|
||||
'name': 'webhook',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '^https:\/\/hooks\.slack\.com\/services\/.+$',
|
||||
'helpMessage': 'The url Slack told you to use for this integration',
|
||||
}, {
|
||||
'name': 'username',
|
||||
'type': 'str',
|
||||
'validation': '^.+$',
|
||||
'helpMessage': 'The great storyteller',
|
||||
'default': 'Lemur'
|
||||
}, {
|
||||
'name': 'recipients',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '^(@|#).+$',
|
||||
'helpMessage': 'Where to send to, either @username or #channel',
|
||||
"name": "webhook",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^https:\/\/hooks\.slack\.com\/services\/.+$",
|
||||
"helpMessage": "The url Slack told you to use for this integration",
|
||||
},
|
||||
{
|
||||
"name": "username",
|
||||
"type": "str",
|
||||
"validation": "^.+$",
|
||||
"helpMessage": "The great storyteller",
|
||||
"default": "Lemur",
|
||||
},
|
||||
{
|
||||
"name": "recipients",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^(@|#).+$",
|
||||
"helpMessage": "Where to send to, either @username or #channel",
|
||||
},
|
||||
]
|
||||
|
||||
@ -122,25 +121,27 @@ class SlackNotificationPlugin(ExpirationNotificationPlugin):
|
||||
`lemur notify`
|
||||
"""
|
||||
attachments = None
|
||||
if notification_type == 'expiration':
|
||||
if notification_type == "expiration":
|
||||
attachments = create_expiration_attachments(message)
|
||||
|
||||
elif notification_type == 'rotation':
|
||||
elif notification_type == "rotation":
|
||||
attachments = create_rotation_attachments(message)
|
||||
|
||||
if not attachments:
|
||||
raise Exception('Unable to create message attachments')
|
||||
raise Exception("Unable to create message attachments")
|
||||
|
||||
body = {
|
||||
'text': 'Lemur {0} Notification'.format(notification_type.capitalize()),
|
||||
'attachments': attachments,
|
||||
'channel': self.get_option('recipients', options),
|
||||
'username': self.get_option('username', options)
|
||||
"text": "Lemur {0} Notification".format(notification_type.capitalize()),
|
||||
"attachments": attachments,
|
||||
"channel": self.get_option("recipients", options),
|
||||
"username": self.get_option("username", options),
|
||||
}
|
||||
|
||||
r = requests.post(self.get_option('webhook', options), json.dumps(body))
|
||||
r = requests.post(self.get_option("webhook", options), json.dumps(body))
|
||||
|
||||
if r.status_code not in [200]:
|
||||
raise Exception('Failed to send message')
|
||||
raise Exception("Failed to send message")
|
||||
|
||||
current_app.logger.error("Slack response: {0} Message Body: {1}".format(r.status_code, body))
|
||||
current_app.logger.error(
|
||||
"Slack response: {0} Message Body: {1}".format(r.status_code, body)
|
||||
)
|
||||
|
@ -1,33 +1,23 @@
|
||||
|
||||
|
||||
def test_formatting(certificate):
|
||||
from lemur.plugins.lemur_slack.plugin import create_expiration_attachments
|
||||
from lemur.certificates.schemas import certificate_notification_output_schema
|
||||
|
||||
data = [certificate_notification_output_schema.dump(certificate).data]
|
||||
|
||||
attachment = {
|
||||
'title': certificate.name,
|
||||
'color': 'danger',
|
||||
'fields': [
|
||||
{
|
||||
'short': True,
|
||||
'value': 'joe@example.com',
|
||||
'title': 'Owner'
|
||||
},
|
||||
{
|
||||
'short': True,
|
||||
'value': u'Tuesday, December 31, 2047',
|
||||
'title': 'Expires'
|
||||
}, {
|
||||
'short': True,
|
||||
'value': 0,
|
||||
'title': 'Endpoints Detected'
|
||||
}
|
||||
"title": certificate.name,
|
||||
"color": "danger",
|
||||
"fields": [
|
||||
{"short": True, "value": "joe@example.com", "title": "Owner"},
|
||||
{"short": True, "value": u"Tuesday, December 31, 2047", "title": "Expires"},
|
||||
{"short": True, "value": 0, "title": "Endpoints Detected"},
|
||||
],
|
||||
'title_link': 'https://lemur.example.com/#/certificates/{name}'.format(name=certificate.name),
|
||||
'mrkdwn_in': ['text'],
|
||||
'text': '',
|
||||
'fallback': ''
|
||||
"title_link": "https://lemur.example.com/#/certificates/{name}".format(
|
||||
name=certificate.name
|
||||
),
|
||||
"mrkdwn_in": ["text"],
|
||||
"text": "",
|
||||
"fallback": "",
|
||||
}
|
||||
|
||||
assert attachment == create_expiration_attachments(data)[0]
|
||||
|
@ -1,4 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources').get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'Unknown'
|
||||
VERSION = "Unknown"
|
||||
|
@ -6,40 +6,44 @@ from datadog import DogStatsd
|
||||
|
||||
|
||||
class StatsdMetricPlugin(MetricPlugin):
|
||||
title = 'Statsd'
|
||||
slug = 'statsd-metrics'
|
||||
description = 'Adds support for sending metrics to Statsd'
|
||||
title = "Statsd"
|
||||
slug = "statsd-metrics"
|
||||
description = "Adds support for sending metrics to Statsd"
|
||||
version = plug.VERSION
|
||||
|
||||
def __init__(self):
|
||||
host = current_app.config.get('STATSD_HOST')
|
||||
port = current_app.config.get('STATSD_PORT')
|
||||
prefix = current_app.config.get('STATSD_PREFIX')
|
||||
host = current_app.config.get("STATSD_HOST")
|
||||
port = current_app.config.get("STATSD_PORT")
|
||||
prefix = current_app.config.get("STATSD_PREFIX")
|
||||
|
||||
self.statsd = DogStatsd(host=host, port=port, namespace=prefix)
|
||||
|
||||
def submit(self, metric_name, metric_type, metric_value, metric_tags=None, options=None):
|
||||
valid_types = ['COUNTER', 'GAUGE', 'TIMER']
|
||||
def submit(
|
||||
self, metric_name, metric_type, metric_value, metric_tags=None, options=None
|
||||
):
|
||||
valid_types = ["COUNTER", "GAUGE", "TIMER"]
|
||||
tags = []
|
||||
|
||||
if metric_type.upper() not in valid_types:
|
||||
raise Exception(
|
||||
"Invalid Metric Type for Statsd, '{metric}' choose from: {options}".format(
|
||||
metric=metric_type, options=','.join(valid_types)
|
||||
metric=metric_type, options=",".join(valid_types)
|
||||
)
|
||||
)
|
||||
|
||||
if metric_tags:
|
||||
if not isinstance(metric_tags, dict):
|
||||
raise Exception("Invalid Metric Tags for Statsd: Tags must be in dict format")
|
||||
raise Exception(
|
||||
"Invalid Metric Tags for Statsd: Tags must be in dict format"
|
||||
)
|
||||
else:
|
||||
tags = map(lambda e: "{0}:{1}".format(*e), metric_tags.items())
|
||||
|
||||
if metric_type.upper() == 'COUNTER':
|
||||
if metric_type.upper() == "COUNTER":
|
||||
self.statsd.increment(metric_name, metric_value, tags)
|
||||
elif metric_type.upper() == 'GAUGE':
|
||||
elif metric_type.upper() == "GAUGE":
|
||||
self.statsd.gauge(metric_name, metric_value, tags)
|
||||
elif metric_type.upper() == 'TIMER':
|
||||
elif metric_type.upper() == "TIMER":
|
||||
self.statsd.timing(metric_name, metric_value, tags)
|
||||
|
||||
return
|
||||
|
@ -2,23 +2,16 @@
|
||||
from __future__ import absolute_import
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
install_requires = [
|
||||
'lemur',
|
||||
'datadog'
|
||||
]
|
||||
install_requires = ["lemur", "datadog"]
|
||||
|
||||
setup(
|
||||
name='lemur_statsd',
|
||||
version='1.0.0',
|
||||
author='Cloudflare Security Engineering',
|
||||
author_email='',
|
||||
name="lemur_statsd",
|
||||
version="1.0.0",
|
||||
author="Cloudflare Security Engineering",
|
||||
author_email="",
|
||||
include_package_data=True,
|
||||
packages=find_packages(),
|
||||
zip_safe=False,
|
||||
install_requires=install_requires,
|
||||
entry_points={
|
||||
'lemur.plugins': [
|
||||
'statsd = lemur_statsd.plugin:StatsdMetricPlugin',
|
||||
]
|
||||
}
|
||||
entry_points={"lemur.plugins": ["statsd = lemur_statsd.plugin:StatsdMetricPlugin"]},
|
||||
)
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -25,59 +25,57 @@ from cryptography.hazmat.backends import default_backend
|
||||
|
||||
class VaultSourcePlugin(SourcePlugin):
|
||||
""" Class for importing certificates from Hashicorp Vault"""
|
||||
title = 'Vault'
|
||||
slug = 'vault-source'
|
||||
description = 'Discovers all certificates in a given path'
|
||||
|
||||
author = 'Christopher Jolley'
|
||||
author_url = 'https://github.com/alwaysjolley/lemur'
|
||||
title = "Vault"
|
||||
slug = "vault-source"
|
||||
description = "Discovers all certificates in a given path"
|
||||
|
||||
author = "Christopher Jolley"
|
||||
author_url = "https://github.com/alwaysjolley/lemur"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'vaultUrl',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '^https?://[a-zA-Z0-9.:-]+$',
|
||||
'helpMessage': 'Valid URL to Hashi Vault instance'
|
||||
"name": "vaultUrl",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^https?://[a-zA-Z0-9.:-]+$",
|
||||
"helpMessage": "Valid URL to Hashi Vault instance",
|
||||
},
|
||||
{
|
||||
'name': 'vaultKvApiVersion',
|
||||
'type': 'select',
|
||||
'value': '2',
|
||||
'available': [
|
||||
'1',
|
||||
'2'
|
||||
],
|
||||
'required': True,
|
||||
'helpMessage': 'Version of the Vault KV API to use'
|
||||
"name": "vaultKvApiVersion",
|
||||
"type": "select",
|
||||
"value": "2",
|
||||
"available": ["1", "2"],
|
||||
"required": True,
|
||||
"helpMessage": "Version of the Vault KV API to use",
|
||||
},
|
||||
{
|
||||
'name': 'vaultAuthTokenFile',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '(/[^/]+)+',
|
||||
'helpMessage': 'Must be a valid file path!'
|
||||
"name": "vaultAuthTokenFile",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "(/[^/]+)+",
|
||||
"helpMessage": "Must be a valid file path!",
|
||||
},
|
||||
{
|
||||
'name': 'vaultMount',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': r'^\S+$',
|
||||
'helpMessage': 'Must be a valid Vault secrets mount name!'
|
||||
"name": "vaultMount",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": r"^\S+$",
|
||||
"helpMessage": "Must be a valid Vault secrets mount name!",
|
||||
},
|
||||
{
|
||||
'name': 'vaultPath',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '^([a-zA-Z0-9._-]+/?)+$',
|
||||
'helpMessage': 'Must be a valid Vault secrets path'
|
||||
"name": "vaultPath",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^([a-zA-Z0-9._-]+/?)+$",
|
||||
"helpMessage": "Must be a valid Vault secrets path",
|
||||
},
|
||||
{
|
||||
'name': 'objectName',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '[0-9a-zA-Z.:_-]+',
|
||||
'helpMessage': 'Object Name to search'
|
||||
"name": "objectName",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "[0-9a-zA-Z.:_-]+",
|
||||
"helpMessage": "Object Name to search",
|
||||
},
|
||||
]
|
||||
|
||||
@ -85,38 +83,38 @@ class VaultSourcePlugin(SourcePlugin):
|
||||
"""Pull certificates from objects in Hashicorp Vault"""
|
||||
data = []
|
||||
cert = []
|
||||
body = ''
|
||||
url = self.get_option('vaultUrl', options)
|
||||
token_file = self.get_option('vaultAuthTokenFile', options)
|
||||
mount = self.get_option('vaultMount', options)
|
||||
path = self.get_option('vaultPath', options)
|
||||
obj_name = self.get_option('objectName', options)
|
||||
api_version = self.get_option('vaultKvApiVersion', options)
|
||||
cert_filter = '-----BEGIN CERTIFICATE-----'
|
||||
cert_delimiter = '-----END CERTIFICATE-----'
|
||||
body = ""
|
||||
url = self.get_option("vaultUrl", options)
|
||||
token_file = self.get_option("vaultAuthTokenFile", options)
|
||||
mount = self.get_option("vaultMount", options)
|
||||
path = self.get_option("vaultPath", options)
|
||||
obj_name = self.get_option("objectName", options)
|
||||
api_version = self.get_option("vaultKvApiVersion", options)
|
||||
cert_filter = "-----BEGIN CERTIFICATE-----"
|
||||
cert_delimiter = "-----END CERTIFICATE-----"
|
||||
|
||||
with open(token_file, 'r') as tfile:
|
||||
token = tfile.readline().rstrip('\n')
|
||||
with open(token_file, "r") as tfile:
|
||||
token = tfile.readline().rstrip("\n")
|
||||
|
||||
client = hvac.Client(url=url, token=token)
|
||||
client.secrets.kv.default_kv_version = api_version
|
||||
|
||||
path = '{0}/{1}'.format(path, obj_name)
|
||||
path = "{0}/{1}".format(path, obj_name)
|
||||
|
||||
secret = get_secret(client, mount, path)
|
||||
for cname in secret['data']:
|
||||
if 'crt' in secret['data'][cname]:
|
||||
cert = secret['data'][cname]['crt'].split(cert_delimiter + '\n')
|
||||
elif 'pem' in secret['data'][cname]:
|
||||
cert = secret['data'][cname]['pem'].split(cert_delimiter + '\n')
|
||||
for cname in secret["data"]:
|
||||
if "crt" in secret["data"][cname]:
|
||||
cert = secret["data"][cname]["crt"].split(cert_delimiter + "\n")
|
||||
elif "pem" in secret["data"][cname]:
|
||||
cert = secret["data"][cname]["pem"].split(cert_delimiter + "\n")
|
||||
else:
|
||||
for key in secret['data'][cname]:
|
||||
if secret['data'][cname][key].startswith(cert_filter):
|
||||
cert = secret['data'][cname][key].split(cert_delimiter + '\n')
|
||||
for key in secret["data"][cname]:
|
||||
if secret["data"][cname][key].startswith(cert_filter):
|
||||
cert = secret["data"][cname][key].split(cert_delimiter + "\n")
|
||||
break
|
||||
body = cert[0] + cert_delimiter
|
||||
if 'chain' in secret['data'][cname]:
|
||||
chain = secret['data'][cname]['chain']
|
||||
if "chain" in secret["data"][cname]:
|
||||
chain = secret["data"][cname]["chain"]
|
||||
elif len(cert) > 1:
|
||||
if cert[1].startswith(cert_filter):
|
||||
chain = cert[1] + cert_delimiter
|
||||
@ -124,8 +122,10 @@ class VaultSourcePlugin(SourcePlugin):
|
||||
chain = None
|
||||
else:
|
||||
chain = None
|
||||
data.append({'body': body, 'chain': chain, 'name': cname})
|
||||
return [dict(body=c['body'], chain=c.get('chain'), name=c['name']) for c in data]
|
||||
data.append({"body": body, "chain": chain, "name": cname})
|
||||
return [
|
||||
dict(body=c["body"], chain=c.get("chain"), name=c["name"]) for c in data
|
||||
]
|
||||
|
||||
def get_endpoints(self, options, **kwargs):
|
||||
""" Not implemented yet """
|
||||
@ -135,81 +135,74 @@ class VaultSourcePlugin(SourcePlugin):
|
||||
|
||||
class VaultDestinationPlugin(DestinationPlugin):
|
||||
"""Hashicorp Vault Destination plugin for Lemur"""
|
||||
title = 'Vault'
|
||||
slug = 'hashi-vault-destination'
|
||||
description = 'Allow the uploading of certificates to Hashi Vault as secret'
|
||||
|
||||
author = 'Christopher Jolley'
|
||||
author_url = 'https://github.com/alwaysjolley/lemur'
|
||||
title = "Vault"
|
||||
slug = "hashi-vault-destination"
|
||||
description = "Allow the uploading of certificates to Hashi Vault as secret"
|
||||
|
||||
author = "Christopher Jolley"
|
||||
author_url = "https://github.com/alwaysjolley/lemur"
|
||||
|
||||
options = [
|
||||
{
|
||||
'name': 'vaultUrl',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '^https?://[a-zA-Z0-9.:-]+$',
|
||||
'helpMessage': 'Valid URL to Hashi Vault instance'
|
||||
"name": "vaultUrl",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^https?://[a-zA-Z0-9.:-]+$",
|
||||
"helpMessage": "Valid URL to Hashi Vault instance",
|
||||
},
|
||||
{
|
||||
'name': 'vaultKvApiVersion',
|
||||
'type': 'select',
|
||||
'value': '2',
|
||||
'available': [
|
||||
'1',
|
||||
'2'
|
||||
],
|
||||
'required': True,
|
||||
'helpMessage': 'Version of the Vault KV API to use'
|
||||
"name": "vaultKvApiVersion",
|
||||
"type": "select",
|
||||
"value": "2",
|
||||
"available": ["1", "2"],
|
||||
"required": True,
|
||||
"helpMessage": "Version of the Vault KV API to use",
|
||||
},
|
||||
{
|
||||
'name': 'vaultAuthTokenFile',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '(/[^/]+)+',
|
||||
'helpMessage': 'Must be a valid file path!'
|
||||
"name": "vaultAuthTokenFile",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "(/[^/]+)+",
|
||||
"helpMessage": "Must be a valid file path!",
|
||||
},
|
||||
{
|
||||
'name': 'vaultMount',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': r'^\S+$',
|
||||
'helpMessage': 'Must be a valid Vault secrets mount name!'
|
||||
"name": "vaultMount",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": r"^\S+$",
|
||||
"helpMessage": "Must be a valid Vault secrets mount name!",
|
||||
},
|
||||
{
|
||||
'name': 'vaultPath',
|
||||
'type': 'str',
|
||||
'required': True,
|
||||
'validation': '^([a-zA-Z0-9_-]+/?)+$',
|
||||
'helpMessage': 'Must be a valid Vault secrets path'
|
||||
"name": "vaultPath",
|
||||
"type": "str",
|
||||
"required": True,
|
||||
"validation": "^([a-zA-Z0-9_-]+/?)+$",
|
||||
"helpMessage": "Must be a valid Vault secrets path",
|
||||
},
|
||||
{
|
||||
'name': 'objectName',
|
||||
'type': 'str',
|
||||
'required': False,
|
||||
'validation': '[0-9a-zA-Z:_-]+',
|
||||
'helpMessage': 'Name to bundle certs under, if blank use cn'
|
||||
"name": "objectName",
|
||||
"type": "str",
|
||||
"required": False,
|
||||
"validation": "[0-9a-zA-Z:_-]+",
|
||||
"helpMessage": "Name to bundle certs under, if blank use cn",
|
||||
},
|
||||
{
|
||||
'name': 'bundleChain',
|
||||
'type': 'select',
|
||||
'value': 'cert only',
|
||||
'available': [
|
||||
'Nginx',
|
||||
'Apache',
|
||||
'PEM',
|
||||
'no chain'
|
||||
],
|
||||
'required': True,
|
||||
'helpMessage': 'Bundle the chain into the certificate'
|
||||
"name": "bundleChain",
|
||||
"type": "select",
|
||||
"value": "cert only",
|
||||
"available": ["Nginx", "Apache", "PEM", "no chain"],
|
||||
"required": True,
|
||||
"helpMessage": "Bundle the chain into the certificate",
|
||||
},
|
||||
{
|
||||
'name': 'sanFilter',
|
||||
'type': 'str',
|
||||
'value': '.*',
|
||||
'required': False,
|
||||
'validation': '.*',
|
||||
'helpMessage': 'Valid regex filter'
|
||||
}
|
||||
"name": "sanFilter",
|
||||
"type": "str",
|
||||
"value": ".*",
|
||||
"required": False,
|
||||
"validation": ".*",
|
||||
"helpMessage": "Valid regex filter",
|
||||
},
|
||||
]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -225,14 +218,14 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||
"""
|
||||
cname = common_name(parse_certificate(body))
|
||||
|
||||
url = self.get_option('vaultUrl', options)
|
||||
token_file = self.get_option('vaultAuthTokenFile', options)
|
||||
mount = self.get_option('vaultMount', options)
|
||||
path = self.get_option('vaultPath', options)
|
||||
bundle = self.get_option('bundleChain', options)
|
||||
obj_name = self.get_option('objectName', options)
|
||||
api_version = self.get_option('vaultKvApiVersion', options)
|
||||
san_filter = self.get_option('sanFilter', options)
|
||||
url = self.get_option("vaultUrl", options)
|
||||
token_file = self.get_option("vaultAuthTokenFile", options)
|
||||
mount = self.get_option("vaultMount", options)
|
||||
path = self.get_option("vaultPath", options)
|
||||
bundle = self.get_option("bundleChain", options)
|
||||
obj_name = self.get_option("objectName", options)
|
||||
api_version = self.get_option("vaultKvApiVersion", options)
|
||||
san_filter = self.get_option("sanFilter", options)
|
||||
|
||||
san_list = get_san_list(body)
|
||||
if san_filter:
|
||||
@ -240,58 +233,67 @@ class VaultDestinationPlugin(DestinationPlugin):
|
||||
try:
|
||||
if not re.match(san_filter, san, flags=re.IGNORECASE):
|
||||
current_app.logger.exception(
|
||||
"Exception uploading secret to vault: invalid SAN: {}".format(san),
|
||||
exc_info=True)
|
||||
"Exception uploading secret to vault: invalid SAN: {}".format(
|
||||
san
|
||||
),
|
||||
exc_info=True,
|
||||
)
|
||||
os._exit(1)
|
||||
except re.error:
|
||||
current_app.logger.exception(
|
||||
"Exception compiling regex filter: invalid filter",
|
||||
exc_info=True)
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
with open(token_file, 'r') as tfile:
|
||||
token = tfile.readline().rstrip('\n')
|
||||
with open(token_file, "r") as tfile:
|
||||
token = tfile.readline().rstrip("\n")
|
||||
|
||||
client = hvac.Client(url=url, token=token)
|
||||
client.secrets.kv.default_kv_version = api_version
|
||||
|
||||
if obj_name:
|
||||
path = '{0}/{1}'.format(path, obj_name)
|
||||
path = "{0}/{1}".format(path, obj_name)
|
||||
else:
|
||||
path = '{0}/{1}'.format(path, cname)
|
||||
path = "{0}/{1}".format(path, cname)
|
||||
|
||||
secret = get_secret(client, mount, path)
|
||||
secret['data'][cname] = {}
|
||||
secret["data"][cname] = {}
|
||||
|
||||
if bundle == 'Nginx':
|
||||
secret['data'][cname]['crt'] = '{0}\n{1}'.format(body, cert_chain)
|
||||
secret['data'][cname]['key'] = private_key
|
||||
elif bundle == 'Apache':
|
||||
secret['data'][cname]['crt'] = body
|
||||
secret['data'][cname]['chain'] = cert_chain
|
||||
secret['data'][cname]['key'] = private_key
|
||||
elif bundle == 'PEM':
|
||||
secret['data'][cname]['pem'] = '{0}\n{1}\n{2}'.format(body, cert_chain, private_key)
|
||||
if bundle == "Nginx":
|
||||
secret["data"][cname]["crt"] = "{0}\n{1}".format(body, cert_chain)
|
||||
secret["data"][cname]["key"] = private_key
|
||||
elif bundle == "Apache":
|
||||
secret["data"][cname]["crt"] = body
|
||||
secret["data"][cname]["chain"] = cert_chain
|
||||
secret["data"][cname]["key"] = private_key
|
||||
elif bundle == "PEM":
|
||||
secret["data"][cname]["pem"] = "{0}\n{1}\n{2}".format(
|
||||
body, cert_chain, private_key
|
||||
)
|
||||
else:
|
||||
secret['data'][cname]['crt'] = body
|
||||
secret['data'][cname]['key'] = private_key
|
||||
secret["data"][cname]["crt"] = body
|
||||
secret["data"][cname]["key"] = private_key
|
||||
if isinstance(san_list, list):
|
||||
secret['data'][cname]['san'] = san_list
|
||||
secret["data"][cname]["san"] = san_list
|
||||
try:
|
||||
client.secrets.kv.create_or_update_secret(
|
||||
path=path, mount_point=mount, secret=secret['data']
|
||||
path=path, mount_point=mount, secret=secret["data"]
|
||||
)
|
||||
except ConnectionError as err:
|
||||
current_app.logger.exception(
|
||||
"Exception uploading secret to vault: {0}".format(err), exc_info=True)
|
||||
"Exception uploading secret to vault: {0}".format(err), exc_info=True
|
||||
)
|
||||
|
||||
|
||||
def get_san_list(body):
|
||||
""" parse certificate for SAN names and return list, return empty list on error """
|
||||
san_list = []
|
||||
try:
|
||||
byte_body = body.encode('utf-8')
|
||||
byte_body = body.encode("utf-8")
|
||||
cert = x509.load_pem_x509_certificate(byte_body, default_backend())
|
||||
ext = cert.extensions.get_extension_for_oid(x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME)
|
||||
ext = cert.extensions.get_extension_for_oid(
|
||||
x509.oid.ExtensionOID.SUBJECT_ALTERNATIVE_NAME
|
||||
)
|
||||
san_list = ext.value.get_values_for_type(x509.DNSName)
|
||||
except x509.extensions.ExtensionNotFound:
|
||||
pass
|
||||
@ -301,12 +303,15 @@ def get_san_list(body):
|
||||
|
||||
def get_secret(client, mount, path):
|
||||
""" retreive existing data from mount path and return dictionary """
|
||||
result = {'data': {}}
|
||||
result = {"data": {}}
|
||||
try:
|
||||
if client.secrets.kv.default_kv_version == '1':
|
||||
if client.secrets.kv.default_kv_version == "1":
|
||||
result = client.secrets.kv.v1.read_secret(path=path, mount_point=mount)
|
||||
else:
|
||||
result = client.secrets.kv.v2.read_secret_version(path=path, mount_point=mount)
|
||||
result = client.secrets.kv.v2.read_secret_version(
|
||||
path=path, mount_point=mount
|
||||
)
|
||||
result = result['data']
|
||||
except ConnectionError:
|
||||
pass
|
||||
finally:
|
||||
|
@ -1,5 +1,4 @@
|
||||
try:
|
||||
VERSION = __import__('pkg_resources') \
|
||||
.get_distribution(__name__).version
|
||||
VERSION = __import__("pkg_resources").get_distribution(__name__).version
|
||||
except Exception as e:
|
||||
VERSION = 'unknown'
|
||||
VERSION = "unknown"
|
||||
|
@ -58,7 +58,7 @@ VERISIGN_ERRORS = {
|
||||
"0x300a": "Domain/SubjectAltName Mismatched -- make sure that the SANs have the proper domain suffix",
|
||||
"0x950e": "Invalid Common Name -- make sure the CN has a proper domain suffix",
|
||||
"0xa00e": "Pending. (Insufficient number of tokens.)",
|
||||
"0x8134": "Pending. (Domain failed CAA validation.)"
|
||||
"0x8134": "Pending. (Domain failed CAA validation.)",
|
||||
}
|
||||
|
||||
|
||||
@ -71,7 +71,7 @@ def log_status_code(r, *args, **kwargs):
|
||||
:param kwargs:
|
||||
:return:
|
||||
"""
|
||||
metrics.send('symantec_status_code_{}'.format(r.status_code), 'counter', 1)
|
||||
metrics.send("symantec_status_code_{}".format(r.status_code), "counter", 1)
|
||||
|
||||
|
||||
def get_additional_names(options):
|
||||
@ -83,8 +83,8 @@ def get_additional_names(options):
|
||||
"""
|
||||
names = []
|
||||
# add SANs if present
|
||||
if options.get('extensions'):
|
||||
for san in options['extensions']['sub_alt_names']:
|
||||
if options.get("extensions"):
|
||||
for san in options["extensions"]["sub_alt_names"]:
|
||||
if isinstance(san, x509.DNSName):
|
||||
names.append(san.value)
|
||||
return names
|
||||
@ -99,37 +99,43 @@ def process_options(options):
|
||||
:return: dict or valid verisign options
|
||||
"""
|
||||
data = {
|
||||
'challenge': get_psuedo_random_string(),
|
||||
'serverType': 'Apache',
|
||||
'certProductType': 'Server',
|
||||
'firstName': current_app.config.get("VERISIGN_FIRST_NAME"),
|
||||
'lastName': current_app.config.get("VERISIGN_LAST_NAME"),
|
||||
'signatureAlgorithm': 'sha256WithRSAEncryption',
|
||||
'email': current_app.config.get("VERISIGN_EMAIL"),
|
||||
'ctLogOption': current_app.config.get("VERISIGN_CS_LOG_OPTION", "public"),
|
||||
"challenge": get_psuedo_random_string(),
|
||||
"serverType": "Apache",
|
||||
"certProductType": "Server",
|
||||
"firstName": current_app.config.get("VERISIGN_FIRST_NAME"),
|
||||
"lastName": current_app.config.get("VERISIGN_LAST_NAME"),
|
||||
"signatureAlgorithm": "sha256WithRSAEncryption",
|
||||
"email": current_app.config.get("VERISIGN_EMAIL"),
|
||||
"ctLogOption": current_app.config.get("VERISIGN_CS_LOG_OPTION", "public"),
|
||||
}
|
||||
|
||||
data['subject_alt_names'] = ",".join(get_additional_names(options))
|
||||
data["subject_alt_names"] = ",".join(get_additional_names(options))
|
||||
|
||||
if options.get('validity_end') > arrow.utcnow().replace(years=2):
|
||||
raise Exception("Verisign issued certificates cannot exceed two years in validity")
|
||||
if options.get("validity_end") > arrow.utcnow().replace(years=2):
|
||||
raise Exception(
|
||||
"Verisign issued certificates cannot exceed two years in validity"
|
||||
)
|
||||
|
||||
if options.get('validity_end'):
|
||||
if options.get("validity_end"):
|
||||
# VeriSign (Symantec) only accepts strictly smaller than 2 year end date
|
||||
if options.get('validity_end') < arrow.utcnow().replace(years=2).replace(days=-1):
|
||||
if options.get("validity_end") < arrow.utcnow().replace(years=2).replace(
|
||||
days=-1
|
||||
):
|
||||
period = get_default_issuance(options)
|
||||
data['specificEndDate'] = options['validity_end'].format("MM/DD/YYYY")
|
||||
data['validityPeriod'] = period
|
||||
data["specificEndDate"] = options["validity_end"].format("MM/DD/YYYY")
|
||||
data["validityPeriod"] = period
|
||||
else:
|
||||
# allowing Symantec website setting the end date, given the validity period
|
||||
data['validityPeriod'] = str(get_default_issuance(options))
|
||||
options.pop('validity_end', None)
|
||||
data["validityPeriod"] = str(get_default_issuance(options))
|
||||
options.pop("validity_end", None)
|
||||
|
||||
elif options.get('validity_years'):
|
||||
if options['validity_years'] in [1, 2]:
|
||||
data['validityPeriod'] = str(options['validity_years']) + 'Y'
|
||||
elif options.get("validity_years"):
|
||||
if options["validity_years"] in [1, 2]:
|
||||
data["validityPeriod"] = str(options["validity_years"]) + "Y"
|
||||
else:
|
||||
raise Exception("Verisign issued certificates cannot exceed two years in validity")
|
||||
raise Exception(
|
||||
"Verisign issued certificates cannot exceed two years in validity"
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
@ -143,12 +149,14 @@ def get_default_issuance(options):
|
||||
"""
|
||||
now = arrow.utcnow()
|
||||
|
||||
if options['validity_end'] < now.replace(years=+1):
|
||||
validity_period = '1Y'
|
||||
elif options['validity_end'] < now.replace(years=+2):
|
||||
validity_period = '2Y'
|
||||
if options["validity_end"] < now.replace(years=+1):
|
||||
validity_period = "1Y"
|
||||
elif options["validity_end"] < now.replace(years=+2):
|
||||
validity_period = "2Y"
|
||||
else:
|
||||
raise Exception("Verisign issued certificates cannot exceed two years in validity")
|
||||
raise Exception(
|
||||
"Verisign issued certificates cannot exceed two years in validity"
|
||||
)
|
||||
|
||||
return validity_period
|
||||
|
||||
@ -161,27 +169,27 @@ def handle_response(content):
|
||||
"""
|
||||
d = xmltodict.parse(content)
|
||||
global VERISIGN_ERRORS
|
||||
if d.get('Error'):
|
||||
status_code = d['Error']['StatusCode']
|
||||
elif d.get('Response'):
|
||||
status_code = d['Response']['StatusCode']
|
||||
if d.get("Error"):
|
||||
status_code = d["Error"]["StatusCode"]
|
||||
elif d.get("Response"):
|
||||
status_code = d["Response"]["StatusCode"]
|
||||
if status_code in VERISIGN_ERRORS.keys():
|
||||
raise Exception(VERISIGN_ERRORS[status_code])
|
||||
return d
|
||||
|
||||
|
||||
class VerisignIssuerPlugin(IssuerPlugin):
|
||||
title = 'Verisign'
|
||||
slug = 'verisign-issuer'
|
||||
description = 'Enables the creation of certificates by the VICE2.0 verisign API.'
|
||||
title = "Verisign"
|
||||
slug = "verisign-issuer"
|
||||
description = "Enables the creation of certificates by the VICE2.0 verisign API."
|
||||
version = verisign.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.session = requests.Session()
|
||||
self.session.cert = current_app.config.get('VERISIGN_PEM_PATH')
|
||||
self.session.cert = current_app.config.get("VERISIGN_PEM_PATH")
|
||||
self.session.hooks = dict(response=log_status_code)
|
||||
super(VerisignIssuerPlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
@ -193,23 +201,31 @@ class VerisignIssuerPlugin(IssuerPlugin):
|
||||
:param issuer_options:
|
||||
:return: :raise Exception:
|
||||
"""
|
||||
url = current_app.config.get("VERISIGN_URL") + '/rest/services/enroll'
|
||||
url = current_app.config.get("VERISIGN_URL") + "/rest/services/enroll"
|
||||
|
||||
data = process_options(issuer_options)
|
||||
data['csr'] = csr
|
||||
data["csr"] = csr
|
||||
|
||||
current_app.logger.info("Requesting a new verisign certificate: {0}".format(data))
|
||||
current_app.logger.info(
|
||||
"Requesting a new verisign certificate: {0}".format(data)
|
||||
)
|
||||
|
||||
response = self.session.post(url, data=data)
|
||||
try:
|
||||
cert = handle_response(response.content)['Response']['Certificate']
|
||||
cert = handle_response(response.content)["Response"]["Certificate"]
|
||||
except KeyError:
|
||||
metrics.send('verisign_create_certificate_error', 'counter', 1,
|
||||
metric_tags={"common_name": issuer_options.get("common_name", "")})
|
||||
sentry.captureException(extra={"common_name": issuer_options.get("common_name", "")})
|
||||
metrics.send(
|
||||
"verisign_create_certificate_error",
|
||||
"counter",
|
||||
1,
|
||||
metric_tags={"common_name": issuer_options.get("common_name", "")},
|
||||
)
|
||||
sentry.captureException(
|
||||
extra={"common_name": issuer_options.get("common_name", "")}
|
||||
)
|
||||
raise Exception(f"Error with Verisign: {response.content}")
|
||||
# TODO add external id
|
||||
return cert, current_app.config.get('VERISIGN_INTERMEDIATE'), None
|
||||
return cert, current_app.config.get("VERISIGN_INTERMEDIATE"), None
|
||||
|
||||
@staticmethod
|
||||
def create_authority(options):
|
||||
@ -220,8 +236,8 @@ class VerisignIssuerPlugin(IssuerPlugin):
|
||||
:param options:
|
||||
:return:
|
||||
"""
|
||||
role = {'username': '', 'password': '', 'name': 'verisign'}
|
||||
return current_app.config.get('VERISIGN_ROOT'), "", [role]
|
||||
role = {"username": "", "password": "", "name": "verisign"}
|
||||
return current_app.config.get("VERISIGN_ROOT"), "", [role]
|
||||
|
||||
def get_available_units(self):
|
||||
"""
|
||||
@ -230,9 +246,11 @@ class VerisignIssuerPlugin(IssuerPlugin):
|
||||
|
||||
:return:
|
||||
"""
|
||||
url = current_app.config.get("VERISIGN_URL") + '/rest/services/getTokens'
|
||||
response = self.session.post(url, headers={'content-type': 'application/x-www-form-urlencoded'})
|
||||
return handle_response(response.content)['Response']['Order']
|
||||
url = current_app.config.get("VERISIGN_URL") + "/rest/services/getTokens"
|
||||
response = self.session.post(
|
||||
url, headers={"content-type": "application/x-www-form-urlencoded"}
|
||||
)
|
||||
return handle_response(response.content)["Response"]["Order"]
|
||||
|
||||
def clear_pending_certificates(self):
|
||||
"""
|
||||
@ -240,52 +258,54 @@ class VerisignIssuerPlugin(IssuerPlugin):
|
||||
|
||||
:return:
|
||||
"""
|
||||
url = current_app.config.get('VERISIGN_URL') + '/reportingws'
|
||||
url = current_app.config.get("VERISIGN_URL") + "/reportingws"
|
||||
|
||||
end = arrow.now()
|
||||
start = end.replace(days=-7)
|
||||
|
||||
data = {
|
||||
'reportType': 'detail',
|
||||
'certProductType': 'Server',
|
||||
'certStatus': 'Pending',
|
||||
'startDate': start.format("MM/DD/YYYY"),
|
||||
'endDate': end.format("MM/DD/YYYY")
|
||||
"reportType": "detail",
|
||||
"certProductType": "Server",
|
||||
"certStatus": "Pending",
|
||||
"startDate": start.format("MM/DD/YYYY"),
|
||||
"endDate": end.format("MM/DD/YYYY"),
|
||||
}
|
||||
response = self.session.post(url, data=data)
|
||||
|
||||
url = current_app.config.get('VERISIGN_URL') + '/rest/services/reject'
|
||||
for order_id in response.json()['orderNumber']:
|
||||
response = self.session.get(url, params={'transaction_id': order_id})
|
||||
url = current_app.config.get("VERISIGN_URL") + "/rest/services/reject"
|
||||
for order_id in response.json()["orderNumber"]:
|
||||
response = self.session.get(url, params={"transaction_id": order_id})
|
||||
|
||||
if response.status_code == 200:
|
||||
print("Rejecting certificate. TransactionId: {}".format(order_id))
|
||||
|
||||
|
||||
class VerisignSourcePlugin(SourcePlugin):
|
||||
title = 'Verisign'
|
||||
slug = 'verisign-source'
|
||||
description = 'Allows for the polling of issued certificates from the VICE2.0 verisign API.'
|
||||
title = "Verisign"
|
||||
slug = "verisign-source"
|
||||
description = (
|
||||
"Allows for the polling of issued certificates from the VICE2.0 verisign API."
|
||||
)
|
||||
version = verisign.VERSION
|
||||
|
||||
author = 'Kevin Glisson'
|
||||
author_url = 'https://github.com/netflix/lemur.git'
|
||||
author = "Kevin Glisson"
|
||||
author_url = "https://github.com/netflix/lemur.git"
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.session = requests.Session()
|
||||
self.session.cert = current_app.config.get('VERISIGN_PEM_PATH')
|
||||
self.session.cert = current_app.config.get("VERISIGN_PEM_PATH")
|
||||
super(VerisignSourcePlugin, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_certificates(self):
|
||||
url = current_app.config.get('VERISIGN_URL') + '/reportingws'
|
||||
url = current_app.config.get("VERISIGN_URL") + "/reportingws"
|
||||
end = arrow.now()
|
||||
start = end.replace(years=-5)
|
||||
data = {
|
||||
'reportType': 'detail',
|
||||
'startDate': start.format("MM/DD/YYYY"),
|
||||
'endDate': end.format("MM/DD/YYYY"),
|
||||
'structuredRecord': 'Y',
|
||||
'certStatus': 'Valid',
|
||||
"reportType": "detail",
|
||||
"startDate": start.format("MM/DD/YYYY"),
|
||||
"endDate": end.format("MM/DD/YYYY"),
|
||||
"structuredRecord": "Y",
|
||||
"certStatus": "Valid",
|
||||
}
|
||||
current_app.logger.debug(data)
|
||||
response = self.session.post(url, data=data)
|
||||
|
@ -1,4 +1,4 @@
|
||||
|
||||
def test_get_certificates(app):
|
||||
from lemur.plugins.base import plugins
|
||||
p = plugins.get('verisign-issuer')
|
||||
|
||||
p = plugins.get("verisign-issuer")
|
||||
|
@ -17,8 +17,8 @@ def get_plugin_option(name, options):
|
||||
:return:
|
||||
"""
|
||||
for o in options:
|
||||
if o.get('name') == name:
|
||||
return o.get('value', o.get('default'))
|
||||
if o.get("name") == name:
|
||||
return o.get("value", o.get("default"))
|
||||
|
||||
|
||||
def set_plugin_option(name, value, options):
|
||||
@ -27,5 +27,5 @@ def set_plugin_option(name, value, options):
|
||||
:param options:
|
||||
"""
|
||||
for o in options:
|
||||
if o.get('name') == name:
|
||||
o.update({'value': value})
|
||||
if o.get("name") == name:
|
||||
o.update({"value": value})
|
||||
|
@ -15,12 +15,13 @@ from lemur.schemas import plugins_output_schema, plugin_output_schema
|
||||
from lemur.common.schema import validate_schema
|
||||
from lemur.plugins.base import plugins
|
||||
|
||||
mod = Blueprint('plugins', __name__)
|
||||
mod = Blueprint("plugins", __name__)
|
||||
api = Api(mod)
|
||||
|
||||
|
||||
class PluginsList(AuthenticatedResource):
|
||||
""" Defines the 'plugins' endpoint """
|
||||
|
||||
def __init__(self):
|
||||
self.reqparse = reqparse.RequestParser()
|
||||
super(PluginsList, self).__init__()
|
||||
@ -69,17 +70,18 @@ class PluginsList(AuthenticatedResource):
|
||||
:reqheader Authorization: OAuth token to authenticate
|
||||
:statuscode 200: no error
|
||||
"""
|
||||
self.reqparse.add_argument('type', type=str, location='args')
|
||||
self.reqparse.add_argument("type", type=str, location="args")
|
||||
args = self.reqparse.parse_args()
|
||||
|
||||
if args['type']:
|
||||
return list(plugins.all(plugin_type=args['type']))
|
||||
if args["type"]:
|
||||
return list(plugins.all(plugin_type=args["type"]))
|
||||
|
||||
return list(plugins.all())
|
||||
|
||||
|
||||
class Plugins(AuthenticatedResource):
|
||||
""" Defines the 'plugins' endpoint """
|
||||
|
||||
def __init__(self):
|
||||
super(Plugins, self).__init__()
|
||||
|
||||
@ -118,5 +120,5 @@ class Plugins(AuthenticatedResource):
|
||||
return plugins.get(name)
|
||||
|
||||
|
||||
api.add_resource(PluginsList, '/plugins', endpoint='plugins')
|
||||
api.add_resource(Plugins, '/plugins/<name>', endpoint='pluginName')
|
||||
api.add_resource(PluginsList, "/plugins", endpoint="plugins")
|
||||
api.add_resource(Plugins, "/plugins/<name>", endpoint="pluginName")
|
||||
|
Reference in New Issue
Block a user