S3 destination (#371)

This commit is contained in:
kevgliss 2016-06-27 15:11:46 -07:00 committed by GitHub
parent fe9703dd94
commit f846d78778
4 changed files with 155 additions and 2 deletions

View File

@ -4,15 +4,41 @@
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more :copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details. :license: Apache, see LICENSE for more details.
Terraform example to setup the destination bucket:
resource "aws_s3_bucket" "certs_log_bucket" {
bucket = "certs-log-access-bucket"
acl = "log-delivery-write"
}
resource "aws_s3_bucket" "certs_lemur" {
bucket = "certs-lemur"
acl = "private"
logging {
target_bucket = "${aws_s3_bucket.certs_log_bucket.id}"
target_prefix = "log/lemur"
}
}
The IAM role Lemur is running as should have the following actions on the destination bucket:
"S3:PutObject",
"S3:PutObjectAcl"
The reader should have the following actions:
"s3:GetObject"
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com> .. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
.. moduleauthor:: Mikhail Khodorovskiy <mikhail.khodorovskiy@jivesoftware.com>
.. moduleauthor:: Harm Weites <harm@weites.com>
""" """
from flask import current_app from flask import current_app
from boto.exception import BotoServerError from boto.exception import BotoServerError
from lemur.plugins.bases import DestinationPlugin, SourcePlugin from lemur.plugins.bases import DestinationPlugin, SourcePlugin
from lemur.plugins.lemur_aws import iam
from lemur.plugins.lemur_aws.ec2 import get_regions from lemur.plugins.lemur_aws.ec2 import get_regions
from lemur.plugins.lemur_aws.elb import get_all_elbs, describe_load_balancer_policies, attach_certificate from lemur.plugins.lemur_aws.elb import get_all_elbs, describe_load_balancer_policies, attach_certificate
from lemur.plugins.lemur_aws import iam, s3
from lemur.plugins import lemur_aws as aws from lemur.plugins import lemur_aws as aws
@ -34,6 +60,7 @@ class AWSDestinationPlugin(DestinationPlugin):
'helpMessage': 'Must be a valid AWS account number!', 'helpMessage': 'Must be a valid AWS account number!',
} }
] ]
# 'elb': { # 'elb': {
# 'name': {'type': 'name'}, # 'name': {'type': 'name'},
# 'region': {'type': 'str'}, # 'region': {'type': 'str'},
@ -43,7 +70,8 @@ class AWSDestinationPlugin(DestinationPlugin):
def upload(self, name, body, private_key, cert_chain, options, **kwargs): def upload(self, name, body, private_key, cert_chain, options, **kwargs):
if private_key: if private_key:
try: try:
iam.upload_cert(self.get_option('accountNumber', options), name, body, private_key, cert_chain=cert_chain) iam.upload_cert(self.get_option('accountNumber', options), name, body, private_key,
cert_chain=cert_chain)
except BotoServerError as e: except BotoServerError as e:
if e.error_code != 'EntityAlreadyExists': if e.error_code != 'EntityAlreadyExists':
raise Exception(e) raise Exception(e)
@ -146,3 +174,84 @@ def format_elb_cipher_policy(policy):
ciphers.append(attr['AttributeName']) ciphers.append(attr['AttributeName'])
return dict(name=name, ciphers=ciphers) return dict(name=name, ciphers=ciphers)
class S3DestinationPlugin(DestinationPlugin):
title = 'AWS-S3'
slug = 'aws-s3'
description = 'Allow the uploading of certificates to Amazon S3'
author = 'Mikhail Khodorovskiy, Harm Weites <harm@weites.com>'
author_url = 'https://github.com/Netflix/lemur'
options = [
{
'name': 'bucket',
'type': 'str',
'required': True,
'validation': '/^$|\s+/',
'helpMessage': 'Must be a valid S3 bucket name!',
},
{
'name': 'accountNumber',
'type': 'int',
'required': True,
'validation': '/^[0-9]{12,12}$/',
'helpMessage': 'A valid AWS account number with permission to access S3',
},
{
'name': 'region',
'type': 'str',
'default': 'eu-west-1',
'required': False,
'validation': '/^\w+-\w+-\d+$/',
'helpMessage': 'Availability zone to use',
},
{
'name': 'encrypt',
'type': 'bool',
'required': False,
'helpMessage': 'Availability zone to use',
'default': True
},
{
'name': 'key',
'type': 'str',
'required': False,
'validation': '/^$|\s+/',
'helpMessage': 'Must be a valid S3 object key!',
},
{
'name': 'caKey',
'type': 'str',
'required': False,
'validation': '/^$|\s+/',
'helpMessage': 'Must be a valid S3 object key!',
},
{
'name': 'certKey',
'type': 'str',
'required': False,
'validation': '/^$|\s+/',
'helpMessage': 'Must be a valid S3 object key!',
}
]
def __init__(self, *args, **kwargs):
super(S3DestinationPlugin, self).__init__(*args, **kwargs)
def upload(self, name, body, private_key, cert_chain, options, **kwargs):
account_number = self.get_option('accountId', options)
encrypt = self.get_option('encrypt', options)
bucket = self.get_option('bucket', options)
key = self.get_option('key', options)
ca_key = self.get_option('caKey', options)
cert_key = self.get_option('certKey', options)
if key and ca_key and cert_key:
s3.write_to_s3(account_number, bucket, key, private_key, encrypt=encrypt)
s3.write_to_s3(account_number, bucket, ca_key, cert_chain, encrypt=encrypt)
s3.write_to_s3(account_number, bucket, cert_key, body, encrypt=encrypt)
else:
pem_body = key + '\n' + body + '\n' + cert_chain + '\n'
s3.write_to_s3(account_number, bucket, name, pem_body, encrypt=encrypt)

View File

@ -0,0 +1,26 @@
"""
.. module: lemur.plugins.lemur_aws.s3
:platform: Unix
:synopsis: Contains helper functions for interactive with AWS S3 Apis.
:copyright: (c) 2015 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <kglisson@netflix.com>
"""
from boto.s3.key import Key
from lemur.plugins.lemur_aws.sts import assume_service
def write_to_s3(account_number, bucket_name, key, data, encrypt=True):
"""
Use STS to write to an S3 bucket
:param account_number:
:param bucket_name:
:param data:
"""
conn = assume_service(account_number, 's3')
b = conn.get_bucket(bucket_name, validate=False) # validate=False removes need for ListObjects permission
k = Key(bucket=b, name=key)
k.set_contents_from_string(data, encrypt_key=encrypt)
k.set_canned_acl("bucket-owner-read")

View File

@ -39,6 +39,13 @@ def assume_service(account_number, service, region='us-east-1'):
aws_secret_access_key=role.credentials.secret_key, aws_secret_access_key=role.credentials.secret_key,
security_token=role.credentials.session_token) security_token=role.credentials.session_token)
elif service in 's3':
return boto.s3.connect_to_region(
region,
aws_access_key_id=role.credentials.access_key,
aws_secret_access_key=role.credentials.secret_key,
security_token=role.credentials.session_token)
def sts_client(service, service_type='client'): def sts_client(service, service_type='client'):
def decorator(f): def decorator(f):

View File

@ -0,0 +1,11 @@
from moto import mock_s3
import boto
@mock_s3()
def test_get_name_from_arn():
conn = boto.connect_s3()
conn.create_bucket('test')
from lemur.plugins.lemur_aws.s3 import write_to_s3
write_to_s3('11111111111111', 'test', 'key', 'body')
assert conn.get_bucket('test').get_key('key').get_contents_as_string() == 'body'