Skip to content

Commit

Permalink
Refactor the function to use async
Browse files Browse the repository at this point in the history
  • Loading branch information
mwhouser committed Nov 17, 2015
1 parent bb6fa26 commit d750cab
Show file tree
Hide file tree
Showing 2 changed files with 114 additions and 71 deletions.
20 changes: 19 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,10 +48,28 @@ Create an IAM role with the following policy:
}
```

### Building the Lambda Package

1. Clone this repo

```
git clone [email protected]:eleven41/aws-lambda-encrypt-s3-objects.git
cd aws-lambda-encrypt-s3-objects
```

2. Install requirements

```
npm install async
npm install aws-sdk
```

3. Zip up the folder using your favourite zipping utility

### Lambda Function

1. Create a new Lambda function.
2. Upload the file index.js as the code for your Lambda function.
2. Upload the ZIP package for the lambda function using `index.handler` as the handler.
3. Add an event source to your Lambda function:
* Event Source Type: S3
* Bucket: your source bucket
Expand Down
165 changes: 95 additions & 70 deletions index.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
// Load up all dependencies
var AWS = require('aws-sdk');
console.log("Version 0.3.0");

console.log("Version 0.2.0");
// Load up all dependencies
var AWS = require('aws-sdk');
var async = require('async');

// get reference to S3 client
var s3 = new AWS.S3();
var s3 = new AWS.S3({ apiVersion: '2006-03-01' });

// This is the entry-point to the Lambda function.
exports.handler = function (event, context) {
Expand All @@ -14,71 +15,86 @@ exports.handler = function (event, context) {
return;
}

// Process all records in the event.
for (var i = 0; i < event.Records.length; ++i) {

var record = event.Records[i];
if (record.s3 == null) {
context.fail('Error', "Event record is missing s3 structure.");
return;
// Process all records in the event asynchronously.
async.each(event.Records, processRecord, function (err) {
if (err) {
context.fail('Error', "One or more objects could not be encrypted.");
} else {
context.succeed();
}

// The bucket and key are part of the event data
var bucket = record.s3.bucket.name;
var key = decodeURIComponent(record.s3.object.key.replace(/\+/g, " "));

console.log('Processing ' + bucket + '/' + key);

// Get the head data to determine if the object is already encrypted.
console.log('Getting object head');
s3.headObject({
Bucket: bucket,
Key: key
}, function (err, data) {
if (err) {
console.log('Error getting object head:');
console.log(err, err.stack); // an error occurred
context.fail('Error', "Error getting object head: " + err);
} else if (data.ServerSideEncryption != 'AES256') {

readConfig(bucket, function (err, config) {
if (err) {
console.log('Error reading configuration from tags:');
console.log(err, err.stack); // an error occurred
context.fail('Error', "Error reading configuration from tags: " + err);
} else {
});
};

var storageClass = 'STANDARD';
if (config.setReducedRedundancy)
storageClass = 'REDUCED_REDUNDANCY';

// Copy the object adding the encryption
console.log('Updating object');
s3.copyObject({
Bucket: bucket,
Key: key,

CopySource: encodeURIComponent(bucket + '/' + key),
MetadataDirective: 'COPY',
ServerSideEncryption: 'AES256',
StorageClass: storageClass
}, function (err, data) {
if (err) {
console.log('Error updating object:');
console.log(err, err.stack); // an error occurred
context.fail('Error', "Error updating object: " + err);
} else {
context.succeed(bucket + '/' + key + ' updated.');
}
});
}
});
} else {
context.succeed(bucket + '/' + key + " is already encrypted using 'AES256'.");
}
});
// processRecord
//
// Iterator function for async.each (called by the handler above).
//
// 1. Gets the head of the object to determine it's current encryption state.
// 2. Gets the encryption configuration from the bucket's tags.
// 3. Copies the object with the desired encryption.
function processRecord(record, callback) {
if (record.s3 == null) {
callback("Event record is missing s3 structure.");
return;
}
};

// The bucket and key are part of the event data
var bucket = record.s3.bucket.name;
var key = decodeURIComponent(record.s3.object.key.replace(/\+/g, " "));

console.log('Processing ' + bucket + '/' + key);

// Get the head data to determine if the object is already encrypted.
console.log('Getting object head');
s3.headObject({
Bucket: bucket,
Key: key
}, function (err, data) {
if (err) {
console.log('Error getting object head:');
console.log(err, err.stack); // an error occurred
callback("Error getting object head: " + bucket + '/' + key);
} else if (data.ServerSideEncryption != 'AES256') {

readConfig(bucket, function (err, config) {
if (err) {
console.log('Error reading configuration from tags:');
console.log(err, err.stack); // an error occurred
callback("Error reading configuration from tags: " + err);
} else {

var storageClass = 'STANDARD';
if (config.setReducedRedundancy)
storageClass = 'REDUCED_REDUNDANCY';

// Copy the object adding the encryption
console.log('Updating object');
s3.copyObject({
Bucket: bucket,
Key: key,

CopySource: encodeURIComponent(bucket + '/' + key),
MetadataDirective: 'COPY',
ServerSideEncryption: 'AES256',
StorageClass: storageClass
}, function (err, data) {
if (err) {
console.log('Error updating object:');
console.log(err, err.stack); // an error occurred
callback("Error updating object: " + err);
} else {
console.log(bucket + '/' + key + ' updated.');
callback();
}
});
}
});
} else {
console.log(bucket + '/' + key + " is already encrypted using 'AES256'.");
callback();
}
});
}

// readConfig
//
Expand All @@ -87,18 +103,27 @@ exports.handler = function (event, context) {
// Once found, it calls the callback function passing
// the configuration.
function readConfig(bucketName, callback) {

var defaultConfig = {
setReducedRedundancy: false
};

console.log("Getting tags for bucket '" + bucketName + "'");
s3.getBucketTagging({
Bucket: bucketName
}, function (err, data) {
if (err) {
callback(err, null);
if (err.code == 'NoSuchTagSet') {
// No tags on the bucket, so just send the defaults
callback(null, defaultConfig);
} else {
// Some other error
callback(err, null);
}
} else {

// Set defaults
var config = {
setReducedRedundancy: false
};
var config = defaultConfig;

var tags = data.TagSet;

Expand Down

0 comments on commit d750cab

Please sign in to comment.