Mitch Garnaat

Merge pull request #3 from garnaat/refactor

WIP commit on significant refactoring of code.
1 +language: python
2 +python:
3 + - "2.7"
4 + - "3.3"
5 + - "3.4"
6 +install:
7 + - pip install -r requirements.txt
8 + - pip install coverage python-coveralls
9 +script: nosetests tests/unit --cover-erase --with-coverage --cover-package kappa
10 +after_success: coveralls
1 kappa 1 kappa
2 ===== 2 =====
3 3
4 +[![Build Status](https://travis-ci.org/garnaat/kappa.svg?branch=develop)](https://travis-ci.org/garnaat/kappa)
5 +
6 +[![Code Health](https://landscape.io/github/garnaat/kappa/develop/landscape.svg)](https://landscape.io/github/garnaat/kappa/develop)
7 +
4 **Kappa** is a command line tool that (hopefully) makes it easier to 8 **Kappa** is a command line tool that (hopefully) makes it easier to
5 deploy, update, and test functions for AWS Lambda. 9 deploy, update, and test functions for AWS Lambda.
6 10
......
...@@ -14,33 +14,8 @@ ...@@ -14,33 +14,8 @@
14 import logging 14 import logging
15 15
16 import click 16 import click
17 -import yaml
18 17
19 -from kappa import Kappa 18 +from kappa.context import Context
20 -
21 -FmtString = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
22 -
23 -
24 -def set_debug_logger(logger_names=['kappa'], stream=None):
25 - """
26 - Convenience function to quickly configure full debug output
27 - to go to the console.
28 - """
29 - for logger_name in logger_names:
30 - log = logging.getLogger(logger_name)
31 - log.setLevel(logging.DEBUG)
32 -
33 - ch = logging.StreamHandler(stream)
34 - ch.setLevel(logging.DEBUG)
35 -
36 - # create formatter
37 - formatter = logging.Formatter(FmtString)
38 -
39 - # add formatter to ch
40 - ch.setFormatter(formatter)
41 -
42 - # add ch to logger
43 - log.addHandler(ch)
44 19
45 20
46 @click.command() 21 @click.command()
...@@ -62,27 +37,26 @@ def set_debug_logger(logger_names=['kappa'], stream=None): ...@@ -62,27 +37,26 @@ def set_debug_logger(logger_names=['kappa'], stream=None):
62 type=click.Choice(['deploy', 'test', 'tail', 'add-event-source', 'delete']) 37 type=click.Choice(['deploy', 'test', 'tail', 'add-event-source', 'delete'])
63 ) 38 )
64 def main(config=None, debug=False, command=None): 39 def main(config=None, debug=False, command=None):
65 - if debug: 40 + ctx = Context(config, debug)
66 - set_debug_logger()
67 - config = yaml.load(config)
68 - kappa = Kappa(config)
69 if command == 'deploy': 41 if command == 'deploy':
70 click.echo('Deploying ...') 42 click.echo('Deploying ...')
71 - kappa.deploy() 43 + ctx.deploy()
72 click.echo('...done') 44 click.echo('...done')
73 elif command == 'test': 45 elif command == 'test':
74 click.echo('Sending test data ...') 46 click.echo('Sending test data ...')
75 - kappa.test() 47 + ctx.test()
76 click.echo('...done') 48 click.echo('...done')
77 elif command == 'tail': 49 elif command == 'tail':
78 - kappa.tail() 50 + events = ctx.tail()
51 + for event in events:
52 + print(event['message'])
79 elif command == 'delete': 53 elif command == 'delete':
80 click.echo('Deleting ...') 54 click.echo('Deleting ...')
81 - kappa.delete() 55 + ctx.delete()
82 click.echo('...done') 56 click.echo('...done')
83 elif command == 'add-event-source': 57 elif command == 'add-event-source':
84 click.echo('Adding event source ...') 58 click.echo('Adding event source ...')
85 - kappa.add_event_source() 59 + ctx.add_event_source()
86 click.echo('...done') 60 click.echo('...done')
87 61
88 62
......
This diff is collapsed. Click to expand it.
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import botocore.session
15 +
16 +
17 +class __AWS(object):
18 +
19 + def __init__(self, profile=None, region=None):
20 + self._client_cache = {}
21 + self._session = botocore.session.get_session()
22 + self._session.profile = profile
23 + self._region = region
24 +
25 + def create_client(self, client_name):
26 + if client_name not in self._client_cache:
27 + self._client_cache[client_name] = self._session.create_client(
28 + client_name, self._region)
29 + return self._client_cache[client_name]
30 +
31 +
32 +__Singleton_AWS = None
33 +
34 +
35 +def get_aws(context):
36 + global __Singleton_AWS
37 + if __Singleton_AWS is None:
38 + __Singleton_AWS = __AWS(context.profile, context.region)
39 + return __Singleton_AWS
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import logging
15 +import yaml
16 +
17 +import kappa.function
18 +import kappa.event_source
19 +import kappa.stack
20 +
21 +LOG = logging.getLogger(__name__)
22 +
23 +DebugFmtString = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
24 +InfoFmtString = '\t%(message)s'
25 +
26 +
27 +class Context(object):
28 +
29 + def __init__(self, config_file, debug=False):
30 + if debug:
31 + self.set_logger('kappa', logging.DEBUG)
32 + else:
33 + self.set_logger('kappa', logging.INFO)
34 + self.config = yaml.load(config_file)
35 + self._stack = kappa.stack.Stack(
36 + self, self.config['cloudformation'])
37 + self.function = kappa.function.Function(
38 + self, self.config['lambda'])
39 + self.event_sources = []
40 + self._create_event_sources()
41 +
42 + @property
43 + def profile(self):
44 + return self.config.get('profile', None)
45 +
46 + @property
47 + def region(self):
48 + return self.config.get('region', None)
49 +
50 + @property
51 + def cfn_config(self):
52 + return self.config.get('cloudformation', None)
53 +
54 + @property
55 + def lambda_config(self):
56 + return self.config.get('lambda', None)
57 +
58 + @property
59 + def exec_role_arn(self):
60 + return self._stack.invoke_role_arn
61 +
62 + @property
63 + def invoke_role_arn(self):
64 + return self._stack.invoke_role_arn
65 +
66 + def debug(self):
67 + self.set_logger('kappa', logging.DEBUG)
68 +
69 + def set_logger(self, logger_name, level=logging.INFO):
70 + """
71 + Convenience function to quickly configure full debug output
72 + to go to the console.
73 + """
74 + log = logging.getLogger(logger_name)
75 + log.setLevel(level)
76 +
77 + ch = logging.StreamHandler(None)
78 + ch.setLevel(level)
79 +
80 + # create formatter
81 + if level == logging.INFO:
82 + formatter = logging.Formatter(InfoFmtString)
83 + else:
84 + formatter = logging.Formatter(DebugFmtString)
85 +
86 + # add formatter to ch
87 + ch.setFormatter(formatter)
88 +
89 + # add ch to logger
90 + log.addHandler(ch)
91 +
92 + def _create_event_sources(self):
93 + for event_source_cfg in self.config['lambda']['event_sources']:
94 + _, _, svc, _ = event_source_cfg['arn'].split(':', 3)
95 + if svc == 'kinesis':
96 + self.event_sources.append(kappa.event_source.KinesisEventSource(
97 + self, event_source_cfg))
98 + elif svc == 's3':
99 + self.event_sources.append(kappa.event_source.S3EventSource(
100 + self, event_source_cfg))
101 + else:
102 + msg = 'Unsupported event source: %s' % event_source_cfg['arn']
103 + raise ValueError(msg)
104 +
105 + def add_event_sources(self):
106 + for event_source in self.event_sources:
107 + event_source.add(self.function)
108 +
109 + def deploy(self):
110 + if self._stack.exists():
111 + self._stack.update()
112 + else:
113 + self._stack.create()
114 + self.function.upload()
115 +
116 + def test(self):
117 + self.function.test()
118 +
119 + def tail(self):
120 + return self.function.tail()
121 +
122 + def delete(self):
123 + self._stack.delete()
124 + self.function.delete()
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import logging
15 +
16 +import kappa.aws
17 +
18 +LOG = logging.getLogger(__name__)
19 +
20 +
21 +class EventSource(object):
22 +
23 + def __init__(self, context, config):
24 + self._context = context
25 + self._config = config
26 +
27 + @property
28 + def arn(self):
29 + return self._config['arn']
30 +
31 + @property
32 + def batch_size(self):
33 + return self._config.get('batch_size', 100)
34 +
35 +
36 +class KinesisEventSource(EventSource):
37 +
38 + def __init__(self, context, config):
39 + super(KinesisEventSource, self).__init__(context, config)
40 + aws = kappa.aws.get_aws(context)
41 + self._lambda = aws.create_client('lambda')
42 +
43 + def _get_uuid(self, function):
44 + uuid = None
45 + response = self._lambda.list_event_sources(
46 + FunctionName=function.name,
47 + EventSourceArn=self.arn)
48 + LOG.debug(response)
49 + if len(response['EventSources']) > 0:
50 + uuid = response['EventSources'][0]['UUID']
51 + return uuid
52 +
53 + def add(self, function):
54 + try:
55 + response = self._lambda.add_event_source(
56 + FunctionName=function.name,
57 + Role=self.invoke_role_arn,
58 + EventSource=self.arn,
59 + BatchSize=self.batch_size)
60 + LOG.debug(response)
61 + except Exception:
62 + LOG.exception('Unable to add Kinesis event source')
63 +
64 + def remove(self, function):
65 + response = None
66 + uuid = self._get_uuid(function)
67 + if uuid:
68 + response = self._lambda.remove_event_source(
69 + UUID=uuid)
70 + LOG.debug(response)
71 + return response
72 +
73 +
74 +class S3EventSource(EventSource):
75 +
76 + def __init__(self, context, config):
77 + super(S3EventSource, self).__init__(context, config)
78 + aws = kappa.aws.get_aws(context)
79 + self._s3 = aws.create_client('s3')
80 +
81 + def _make_notification_id(self, function_name):
82 + return 'Kappa-%s-notification' % function_name
83 +
84 + def _get_bucket_name(self):
85 + return self.arn.split(':')[-1]
86 +
87 + def add(self, function):
88 + notification_spec = {
89 + 'CloudFunctionConfiguration': {
90 + 'Id': self._make_notification_id(function.name),
91 + 'Events': [e for e in self.config['events']],
92 + 'CloudFunction': function.arn(),
93 + 'InvocationRole': self.invoke_role_arn}}
94 + try:
95 + response = self._s3.put_bucket_notification(
96 + Bucket=self._get_bucket_name(),
97 + NotificationConfiguration=notification_spec)
98 + LOG.debug(response)
99 + except Exception:
100 + LOG.exception('Unable to add S3 event source')
101 +
102 + def remove(self, function):
103 + response = self._s3.get_bucket_notification(
104 + Bucket=self._get_bucket_name())
105 + LOG.debug(response)
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import logging
15 +import os
16 +import zipfile
17 +
18 +import kappa.aws
19 +import kappa.log
20 +
21 +LOG = logging.getLogger(__name__)
22 +
23 +
24 +class Function(object):
25 +
26 + def __init__(self, context, config):
27 + self._context = context
28 + self._config = config
29 + aws = kappa.aws.get_aws(context)
30 + self._lambda_svc = aws.create_client('lambda')
31 + self._arn = None
32 + self._log = None
33 +
34 + @property
35 + def name(self):
36 + return self._config['name']
37 +
38 + @property
39 + def runtime(self):
40 + return self._config['runtime']
41 +
42 + @property
43 + def handler(self):
44 + return self._config['handler']
45 +
46 + @property
47 + def mode(self):
48 + return self._config['mode']
49 +
50 + @property
51 + def description(self):
52 + return self._config['description']
53 +
54 + @property
55 + def timeout(self):
56 + return self._config['timeout']
57 +
58 + @property
59 + def memory_size(self):
60 + return self._config['memory_size']
61 +
62 + @property
63 + def zipfile_name(self):
64 + return self._config['zipfile_name']
65 +
66 + @property
67 + def path(self):
68 + return self._config['path']
69 +
70 + @property
71 + def test_data(self):
72 + return self._config['test_data']
73 +
74 + @property
75 + def arn(self):
76 + if self._arn is None:
77 + try:
78 + response = self._lambda_svc.get_function_configuration(
79 + FunctionName=self.name)
80 + LOG.debug(response)
81 + self._arn = response['FunctionARN']
82 + except Exception:
83 + LOG.debug('Unable to find ARN for function: %s' % self.name)
84 + return self._arn
85 +
86 + @property
87 + def log(self):
88 + if self._log is None:
89 + log_group_name = '/aws/lambda/%s' % self.name
90 + self._log = kappa.log.Log(self._context, log_group_name)
91 + return self._log
92 +
93 + def tail(self):
94 + LOG.debug('tailing function: %s', self.name)
95 + return self.log.tail()
96 +
97 + def _zip_lambda_dir(self, zipfile_name, lambda_dir):
98 + LOG.debug('_zip_lambda_dir: lambda_dir=%s', lambda_dir)
99 + LOG.debug('zipfile_name=%s', zipfile_name)
100 + relroot = os.path.abspath(lambda_dir)
101 + with zipfile.ZipFile(zipfile_name, 'w') as zf:
102 + for root, dirs, files in os.walk(lambda_dir):
103 + zf.write(root, os.path.relpath(root, relroot))
104 + for file in files:
105 + filename = os.path.join(root, file)
106 + if os.path.isfile(filename):
107 + arcname = os.path.join(
108 + os.path.relpath(root, relroot), file)
109 + zf.write(filename, arcname)
110 +
111 + def _zip_lambda_file(self, zipfile_name, lambda_file):
112 + LOG.debug('_zip_lambda_file: lambda_file=%s', lambda_file)
113 + LOG.debug('zipfile_name=%s', zipfile_name)
114 + with zipfile.ZipFile(zipfile_name, 'w') as zf:
115 + zf.write(lambda_file)
116 +
117 + def zip_lambda_function(self, zipfile_name, lambda_fn):
118 + if os.path.isdir(lambda_fn):
119 + self._zip_lambda_dir(zipfile_name, lambda_fn)
120 + else:
121 + self._zip_lambda_file(zipfile_name, lambda_fn)
122 +
123 + def upload(self):
124 + LOG.debug('uploading %s', self.zipfile_name)
125 + self.zip_lambda_function(self.zipfile_name, self.path)
126 + with open(self.zipfile_name, 'rb') as fp:
127 + exec_role = self._context.exec_role_arn
128 + try:
129 + response = self._lambda_svc.upload_function(
130 + FunctionName=self.name,
131 + FunctionZip=fp,
132 + Runtime=self.runtime,
133 + Role=exec_role,
134 + Handler=self.handler,
135 + Mode=self.mode,
136 + Description=self.description,
137 + Timeout=self.timeout,
138 + MemorySize=self.memory_size)
139 + LOG.debug(response)
140 + except Exception:
141 + LOG.exception('Unable to upload zip file')
142 +
143 + def delete(self):
144 + LOG.debug('deleting function %s', self.name)
145 + response = self._lambda_svc.delete_function(FunctionName=self.name)
146 + LOG.debug(response)
147 + return response
148 +
149 + def invoke_asynch(self, data_file):
150 + LOG.debug('_invoke_async %s', data_file)
151 + with open(data_file) as fp:
152 + response = self._lambda_svc.invoke_async(
153 + FunctionName=self.name,
154 + InvokeArgs=fp)
155 + LOG.debug(response)
156 +
157 + def test(self):
158 + self.invoke_asynch(self.test_data)
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import logging
15 +
16 +LOG = logging.getLogger(__name__)
17 +
18 +import kappa.aws
19 +
20 +
21 +class Log(object):
22 +
23 + def __init__(self, context, log_group_name):
24 + self._context = context
25 + self.log_group_name = log_group_name
26 + aws = kappa.aws.get_aws(self._context)
27 + self._log_svc = aws.create_client('logs')
28 +
29 + def streams(self):
30 + LOG.debug('getting streams for log group: %s', self.log_group_name)
31 + response = self._log_svc.describe_log_streams(
32 + logGroupName=self.log_group_name)
33 + LOG.debug(response)
34 + return response['logStreams']
35 +
36 + def tail(self):
37 + LOG.debug('tailing log group: %s', self.log_group_name)
38 + latest_stream = None
39 + streams = self.streams()
40 + for stream in streams:
41 + if not latest_stream:
42 + latest_stream = stream
43 + elif stream['lastEventTimestamp'] > latest_stream['lastEventTimestamp']:
44 + latest_stream = stream
45 + response = self._log_svc.get_log_events(
46 + logGroupName=self.log_group_name,
47 + logStreamName=latest_stream['logStreamName'])
48 + LOG.debug(response)
49 + return response['events']
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import logging
15 +import time
16 +
17 +import kappa.aws
18 +
19 +LOG = logging.getLogger(__name__)
20 +
21 +
22 +class Stack(object):
23 +
24 + completed_states = ('CREATE_COMPLETE', 'UPDATE_COMPLETE')
25 + failed_states = ('ROLLBACK_COMPLETE')
26 +
27 + def __init__(self, context, config):
28 + self._context = context
29 + self._config = config
30 + aws = kappa.aws.get_aws(self._context)
31 + self._cfn = aws.create_client('cloudformation')
32 + self._iam = aws.create_client('iam')
33 +
34 + @property
35 + def name(self):
36 + return self._config['stack_name']
37 +
38 + @property
39 + def template_path(self):
40 + return self._config['template']
41 +
42 + @property
43 + def exec_role(self):
44 + return self._config['exec_role']
45 +
46 + @property
47 + def exec_role_arn(self):
48 + return self._get_role_arn(self.exec_role)
49 +
50 + @property
51 + def invoke_role(self):
52 + return self._config['invoke_role']
53 +
54 + @property
55 + def invoke_role_arn(self):
56 + return self._get_role_arn(self.invoke_role)
57 +
58 + def _get_role_arn(self, role_name):
59 + role_arn = None
60 + try:
61 + resources = self._cfn.list_stack_resources(
62 + StackName=self.name)
63 + LOG.debug(resources)
64 + except Exception:
65 + LOG.exception('Unable to find role ARN: %s', role_name)
66 + for resource in resources['StackResourceSummaries']:
67 + if resource['LogicalResourceId'] == role_name:
68 + role = self._iam.get_role(
69 + RoleName=resource['PhysicalResourceId'])
70 + LOG.debug(role)
71 + role_arn = role['Role']['Arn']
72 + LOG.debug('role_arn: %s', role_arn)
73 + return role_arn
74 +
75 + def exists(self):
76 + """
77 + Does Cloudformation Stack already exist?
78 + """
79 + try:
80 + response = self._cfn.describe_stacks(StackName=self.name)
81 + LOG.debug('Stack %s exists', self.name)
82 + except Exception:
83 + LOG.debug('Stack %s does not exist', self.name)
84 + response = None
85 + return response
86 +
87 + def wait(self):
88 + done = False
89 + while not done:
90 + time.sleep(1)
91 + response = self._cfn.describe_stacks(StackName=self.name)
92 + LOG.debug(response)
93 + status = response['Stacks'][0]['StackStatus']
94 + LOG.debug('Stack status is: %s', status)
95 + if status in self.completed_states:
96 + done = True
97 + if status in self.failed_states:
98 + msg = 'Could not create stack %s: %s' % (self.name, status)
99 + raise ValueError(msg)
100 +
101 + def create(self):
102 + LOG.debug('create_stack: stack_name=%s', self.name)
103 + template_body = open(self.template_path).read()
104 + try:
105 + response = self._cfn.create_stack(
106 + StackName=self.name, TemplateBody=template_body,
107 + Capabilities=['CAPABILITY_IAM'])
108 + LOG.debug(response)
109 + except Exception:
110 + LOG.exception('Unable to create stack')
111 + self.wait()
112 +
113 + def update(self):
114 + LOG.debug('create_stack: stack_name=%s', self.name)
115 + template_body = open(self.template_path).read()
116 + try:
117 + response = self._cfn.update_stack(
118 + StackName=self.name, TemplateBody=template_body,
119 + Capabilities=['CAPABILITY_IAM'])
120 + LOG.debug(response)
121 + except Exception as e:
122 + if 'ValidationError' in str(e):
123 + LOG.info('No Updates Required')
124 + else:
125 + LOG.exception('Unable to update stack')
126 + self.wait()
127 +
128 + def delete(self):
129 + LOG.debug('delete_stack: stack_name=%s', self.name)
130 + try:
131 + response = self._cfn.delete_stack(StackName=self.name)
132 + LOG.debug(response)
133 + except Exception:
134 + LOG.exception('Unable to delete stack: %s', self.name)
1 -botocore==0.75.0 1 +botocore==0.80.0
2 click==3.3 2 click==3.3
3 PyYAML>=3.11 3 PyYAML>=3.11
4 +mock>=1.0.1
4 nose==1.3.1 5 nose==1.3.1
5 tox==1.7.1 6 tox==1.7.1
......
...@@ -16,6 +16,8 @@ lambda: ...@@ -16,6 +16,8 @@ lambda:
16 memory_size: 128 16 memory_size: 128
17 timeout: 3 17 timeout: 3
18 mode: event 18 mode: event
19 - event_source: arn:aws:kinesis:us-east-1:084307701560:stream/lambdastream 19 + event_sources:
20 + -
21 + arn: arn:aws:kinesis:us-east-1:084307701560:stream/lambdastream
20 test_data: input.json 22 test_data: input.json
21 23
...\ No newline at end of file ...\ No newline at end of file
......
...@@ -17,6 +17,8 @@ lambda: ...@@ -17,6 +17,8 @@ lambda:
17 timeout: 3 17 timeout: 3
18 mode: event 18 mode: event
19 test_data: input.json 19 test_data: input.json
20 - event_source: arn:aws:s3:::sourcebucket
21 - s3_events:
22 - - s3:ObjectCreated:*
...\ No newline at end of file ...\ No newline at end of file
20 + event_sources:
21 + -
22 + arn: arn:aws:s3:::test-1245812163
23 + events:
24 + - s3:ObjectCreated:*
......
...@@ -5,7 +5,7 @@ from setuptools import setup, find_packages ...@@ -5,7 +5,7 @@ from setuptools import setup, find_packages
5 import os 5 import os
6 6
7 requires = [ 7 requires = [
8 - 'botocore==0.75.0', 8 + 'botocore==0.80.0',
9 'click==3.3', 9 'click==3.3',
10 'PyYAML>=3.11' 10 'PyYAML>=3.11'
11 ] 11 ]
......
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
File mode changed
1 +import mock
2 +
3 +import tests.unit.responses as responses
4 +
5 +
6 +class MockAWS(object):
7 +
8 + def __init__(self, profile=None, region=None):
9 + pass
10 +
11 + def create_client(self, client_name):
12 + client = None
13 + if client_name == 'logs':
14 + client = mock.Mock()
15 + choices = responses.logs_describe_log_streams
16 + client.describe_log_streams = mock.Mock(
17 + side_effect=choices)
18 + choices = responses.logs_get_log_events
19 + client.get_log_events = mock.Mock(
20 + side_effect=choices)
21 + if client_name == 'cloudformation':
22 + client = mock.Mock()
23 + choices = responses.cfn_list_stack_resources
24 + client.list_stack_resources = mock.Mock(
25 + side_effect=choices)
26 + choices = responses.cfn_describe_stacks
27 + client.describe_stacks = mock.Mock(
28 + side_effect=choices)
29 + choices = responses.cfn_create_stack
30 + client.create_stack = mock.Mock(
31 + side_effect=choices)
32 + choices = responses.cfn_delete_stack
33 + client.delete_stack = mock.Mock(
34 + side_effect=choices)
35 + if client_name == 'iam':
36 + client = mock.Mock()
37 + choices = responses.iam_get_role
38 + client.get_role = mock.Mock(
39 + side_effect=choices)
40 + return client
41 +
42 +
43 +def get_aws(context):
44 + return MockAWS()
1 +import datetime
2 +from dateutil.tz import tzutc
3 +
4 +cfn_list_stack_resources = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'dd35f0ef-9699-11e4-ba38-c355c9515dbc'}, u'StackResourceSummaries': [{u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Role', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 54, 861000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestKinesis-InvokeRole-IF6VUXY9MBJN', u'LogicalResourceId': 'InvokeRole'}, {u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Role', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 55, 18000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestKinesis-ExecRole-567SAV6TZOET', u'LogicalResourceId': 'ExecRole'}, {u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Policy', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 58, 120000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestK-Invo-OMW5SDLQM8FM', u'LogicalResourceId': 'InvokeRolePolicies'}, {u'ResourceStatus': 'CREATE_COMPLETE', u'ResourceType': 'AWS::IAM::Policy', u'ResourceStatusReason': None, u'LastUpdatedTimestamp': datetime.datetime(2015, 1, 6, 17, 37, 58, 454000, tzinfo=tzutc()), u'PhysicalResourceId': 'TestK-Exec-APWRVKTBPPPT', u'LogicalResourceId': 'ExecRolePolicies'}]}]
5 +
6 +iam_get_role = [{u'Role': {u'AssumeRolePolicyDocument': {u'Version': u'2012-10-17', u'Statement': [{u'Action': u'sts:AssumeRole', u'Principal': {u'Service': u's3.amazonaws.com'}, u'Effect': u'Allow', u'Condition': {u'ArnLike': {u'sts:ExternalId': u'arn:aws:s3:::*'}}, u'Sid': u''}, {u'Action': u'sts:AssumeRole', u'Principal': {u'Service': u'lambda.amazonaws.com'}, u'Effect': u'Allow', u'Sid': u''}]}, u'RoleId': 'AROAIEVJHUJG2I4MG5PSC', u'CreateDate': datetime.datetime(2015, 1, 6, 17, 37, 44, tzinfo=tzutc()), u'RoleName': 'TestKinesis-InvokeRole-IF6VUXY9MBJN', u'Path': '/', u'Arn': 'arn:aws:iam::0123456789012:role/TestKinesis-InvokeRole-FOO'}, 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'dd6e8d42-9699-11e4-afe6-d3625e8b365b'}}]
7 +
8 +logs_describe_log_streams = [{u'logStreams': [{u'firstEventTimestamp': 1417042749449, u'lastEventTimestamp': 1417042749547, u'creationTime': 1417042748263, u'uploadSequenceToken': u'49540114640150833041490484409222729829873988799393975922', u'logStreamName': u'1cc48e4e613246b7974094323259d600', u'lastIngestionTime': 1417042750483, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:1cc48e4e613246b7974094323259d600', u'storedBytes': 712}, {u'firstEventTimestamp': 1417272406988, u'lastEventTimestamp': 1417272407088, u'creationTime': 1417272405690, u'uploadSequenceToken': u'49540113907504451034164105858363493278561872472363261986', u'logStreamName': u'2782a5ff88824c85a9639480d1ed7bbe', u'lastIngestionTime': 1417272408043, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:2782a5ff88824c85a9639480d1ed7bbe', u'storedBytes': 712}, {u'firstEventTimestamp': 1420569035842, u'lastEventTimestamp': 1420569035941, u'creationTime': 1420569034614, u'uploadSequenceToken': u'49540113907883563702539166025438885323514410026454245426', u'logStreamName': u'2d62991a479b4ebf9486176122b72a55', u'lastIngestionTime': 1420569036909, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:2d62991a479b4ebf9486176122b72a55', u'storedBytes': 709}, {u'firstEventTimestamp': 1418244027421, u'lastEventTimestamp': 1418244027541, u'creationTime': 1418244026907, u'uploadSequenceToken': u'49540113964795065449189116778452984186276757901477438642', u'logStreamName': u'4f44ffa128d6405591ca83b2b0f9dd2d', u'lastIngestionTime': 1418244028484, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:4f44ffa128d6405591ca83b2b0f9dd2d', u'storedBytes': 1010}, {u'firstEventTimestamp': 1418242565524, u'lastEventTimestamp': 1418242565641, u'creationTime': 1418242564196, u'uploadSequenceToken': u'49540113095132904942090446312687285178819573422397343074', u'logStreamName': u'69c5ac87e7e6415985116e8cb44e538e', u'lastIngestionTime': 1418242566558, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:69c5ac87e7e6415985116e8cb44e538e', u'storedBytes': 713}, {u'firstEventTimestamp': 1417213193378, u'lastEventTimestamp': 1417213193478, u'creationTime': 1417213192095, u'uploadSequenceToken': u'49540113336360065754596187770479764234792559857643841394', u'logStreamName': u'f68e3d87b8a14cdba338f6926f7cf50a', u'lastIngestionTime': 1417213194421, u'arn': u'arn:aws:logs:us-east-1:0123456789012:log-group:/aws/lambda/KinesisSample:log-stream:f68e3d87b8a14cdba338f6926f7cf50a', u'storedBytes': 711}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '2a6d4941-969b-11e4-947f-19d1c72ede7e'}}]
9 +
10 +logs_get_log_events = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '2a7deb71-969b-11e4-914b-8f1f3d7b023d'}, u'nextForwardToken': u'f/31679748107442531967654742688057700554200447759088287749', u'events': [{u'ingestionTime': 1420569036909, u'timestamp': 1420569035842, u'message': u'2015-01-06T18:30:35.841Z\tko2sss03iq7l2pdk\tLoading event\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035899, u'message': u'START RequestId: 23007242-95d2-11e4-a10e-7b2ab60a7770\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035940, u'message': u'2015-01-06T18:30:35.940Z\t23007242-95d2-11e4-a10e-7b2ab60a7770\t{\n "Records": [\n {\n "kinesis": {\n "partitionKey": "partitionKey-3",\n "kinesisSchemaVersion": "1.0",\n "data": "SGVsbG8sIHRoaXMgaXMgYSB0ZXN0IDEyMy4=",\n "sequenceNumber": "49545115243490985018280067714973144582180062593244200961"\n },\n "eventSource": "aws:kinesis",\n "eventID": "shardId-000000000000:49545115243490985018280067714973144582180062593244200961",\n "invokeIdentityArn": "arn:aws:iam::0123456789012:role/testLEBRole",\n "eventVersion": "1.0",\n "eventName": "aws:kinesis:record",\n "eventSourceARN": "arn:aws:kinesis:us-east-1:35667example:stream/examplestream",\n "awsRegion": "us-east-1"\n }\n ]\n}\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035940, u'message': u'2015-01-06T18:30:35.940Z\t23007242-95d2-11e4-a10e-7b2ab60a7770\tDecoded payload: Hello, this is a test 123.\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035941, u'message': u'END RequestId: 23007242-95d2-11e4-a10e-7b2ab60a7770\n'}, {u'ingestionTime': 1420569036909, u'timestamp': 1420569035941, u'message': u'REPORT RequestId: 23007242-95d2-11e4-a10e-7b2ab60a7770\tDuration: 98.51 ms\tBilled Duration: 100 ms \tMemory Size: 128 MB\tMax Memory Used: 26 MB\t\n'}], u'nextBackwardToken': u'b/31679748105234758193000210997045664445208259969996226560'}]
11 +
12 +cfn_describe_stacks = [
13 + {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': 'User Initiated', u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_IN_PROGRESS', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7d66debd-96b8-11e4-a647-4f4741ffff69'}},
14 + {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': 'User Initiated', u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_IN_PROGRESS', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7e36fff7-96b8-11e4-af44-6350f4f8c2ae'}},
15 + {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': 'User Initiated', u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_IN_PROGRESS', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7ef03e10-96b8-11e4-bc86-7f67e11abcfa'}},
16 + {u'Stacks': [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', u'Description': None, u'Tags': [], u'StackStatusReason': None, u'CreationTime': datetime.datetime(2015, 1, 7, 21, 59, 43, 208000, tzinfo=tzutc()), u'Capabilities': ['CAPABILITY_IAM'], u'StackName': 'TestKinesis', u'NotificationARNs': [], u'StackStatus': 'CREATE_COMPLETE', u'DisableRollback': False}], 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '8c2bff8e-96b8-11e4-be70-c5ad82c32f2d'}}]
17 +
18 +cfn_create_stack = [{u'StackId': 'arn:aws:cloudformation:us-east-1:084307701560:stack/TestKinesis/7c4ae730-96b8-11e4-94cc-5001dc3ed8d2', 'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': '7c2f2260-96b8-11e4-be70-c5ad82c32f2d'}}]
19 +
20 +cfn_delete_stack = [{'ResponseMetadata': {'HTTPStatusCode': 200, 'RequestId': 'f19af5b8-96bc-11e4-860e-11ba752b58a9'}}]
1 +# Copyright (c) 2014 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import unittest
15 +
16 +import mock
17 +
18 +from kappa.log import Log
19 +from tests.unit.mock_aws import get_aws
20 +
21 +
22 +class TestLog(unittest.TestCase):
23 +
24 + def setUp(self):
25 + self.aws_patch = mock.patch('kappa.aws.get_aws', get_aws)
26 + self.mock_aws = self.aws_patch.start()
27 +
28 + def tearDown(self):
29 + self.aws_patch.stop()
30 +
31 + def test_streams(self):
32 + mock_context = mock.Mock()
33 + log = Log(mock_context, 'foo/bar')
34 + streams = log.streams()
35 + self.assertEqual(len(streams), 6)
36 +
37 + def test_tail(self):
38 + mock_context = mock.Mock()
39 + log = Log(mock_context, 'foo/bar')
40 + events = log.tail()
41 + self.assertEqual(len(events), 6)
42 + self.assertEqual(events[0]['ingestionTime'], 1420569036909)
43 + self.assertIn('RequestId: 23007242-95d2-11e4-a10e-7b2ab60a7770',
44 + events[-1]['message'])
1 +# Copyright (c) 2015 Mitch Garnaat http://garnaat.org/
2 +#
3 +# Licensed under the Apache License, Version 2.0 (the "License"). You
4 +# may not use this file except in compliance with the License. A copy of
5 +# the License is located at
6 +#
7 +# http://aws.amazon.com/apache2.0/
8 +#
9 +# or in the "license" file accompanying this file. This file is
10 +# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11 +# ANY KIND, either express or implied. See the License for the specific
12 +# language governing permissions and limitations under the License.
13 +
14 +import unittest
15 +import os
16 +
17 +import mock
18 +
19 +from kappa.stack import Stack
20 +from tests.unit.mock_aws import get_aws
21 +
22 +Config = {
23 + 'template': 'roles.cf',
24 + 'stack_name': 'FooBar',
25 + 'exec_role': 'ExecRole',
26 + 'invoke_role': 'InvokeRole'}
27 +
28 +
29 +def path(filename):
30 + return os.path.join(os.path.dirname(__file__), 'data', filename)
31 +
32 +
33 +class TestStack(unittest.TestCase):
34 +
35 + def setUp(self):
36 + self.aws_patch = mock.patch('kappa.aws.get_aws', get_aws)
37 + self.mock_aws = self.aws_patch.start()
38 + Config['template'] = path(Config['template'])
39 +
40 + def tearDown(self):
41 + self.aws_patch.stop()
42 +
43 + def test_properties(self):
44 + mock_context = mock.Mock()
45 + stack = Stack(mock_context, Config)
46 + self.assertEqual(stack.name, Config['stack_name'])
47 + self.assertEqual(stack.template_path, Config['template'])
48 + self.assertEqual(stack.exec_role, Config['exec_role'])
49 + self.assertEqual(stack.invoke_role, Config['invoke_role'])
50 + self.assertEqual(
51 + stack.invoke_role_arn,
52 + 'arn:aws:iam::0123456789012:role/TestKinesis-InvokeRole-FOO')
53 +
54 + def test_exists(self):
55 + mock_context = mock.Mock()
56 + stack = Stack(mock_context, Config)
57 + self.assertTrue(stack.exists())
58 +
59 + def test_create(self):
60 + mock_context = mock.Mock()
61 + stack = Stack(mock_context, Config)
62 + stack.create()
63 +
64 + def test_delete(self):
65 + mock_context = mock.Mock()
66 + stack = Stack(mock_context, Config)
67 + stack.delete()