npm install s3-zipper --save
Takes an amazon s3 bucket folder and zips it to a:
- Stream
- Local File
- Local File Fragments (zip multiple files broken up by max number of files or size)
- S3 File (ie uploads the zip back to s3)
- S3 File Fragments (upload multiple zip files broken up by max number of files or size)
It also allows you to do differential zips. You can save the key of the last file you zipped and then zip files that have been uploaded after the last zip.
If a zip file has the potential of getting too big. You can provide limits to breakup the compression into multiple zip files. You can limit based on file count or total size (pre zip)
You can filter out files you dont want zipped based on any criteria you need
var S3Zipper = require ('aws-s3-zipper');
var config ={
accessKeyId: "XXXX",
secretAccessKey: "XXXX",
region: "us-west-2",
bucket: 'XXX'
};
var zipper = new S3Zipper(config);
zipper.filterOutFiles= function(file){
if(file.Key.indexOf('.tmp') >= 0) // filter out temp files
return null;
else
return file;
};
zipper.zipToFile ({
s3FolderName: 'myBucketFolderName'
, startKey: 'keyOfLastFileIZipped' // could keep null
, zipFileName: './myLocalFile.zip'
, recursive: true
}
,function(err,result){
if(err)
console.error(err);
else{
var lastFile = result.zippedFiles[result.zippedFiles.length-1];
if(lastFile)
console.log('last key ', lastFile.Key); // next time start from here
}
});
app.all('/', function (request, response) {
response.set('content-type', 'application/zip') // optional
zipper.streamZipDataTo({
pipe: response
, folderName: 'myBucketFolderName'
, startKey: 'keyOfLastFileIZipped' // could keep null
, recursive: true
}
,function (err, result) {
if(err)
console.error(err);
else{
console.log(result)
}
})
})
zipper.zipToFileFragments ({
s3FolderName:'myBucketFolderName'
,startKey: null
,zipFileName './myLocalFile.zip'
,maxFileCount: 5
,maxFileSize: 1024*1024
}, function(err,results){
if(err)
console.error(err);
else{
if(results.length > 0) {
var result = results[results.length - 1];
var lastFile = result.zippedFiles[result.zippedFiles.length - 1];
if (lastFile)
console.log('last key ', lastFile.Key); // next time start from here
}
}
});
/// if no path is given to S3 zip file then it will be placed in the same folder
zipper.zipToS3File ({
s3FolderName: 'myBucketFolderName'
, startKey: 'keyOfLastFileIZipped' // optional
, s3ZipFileName: 'myS3File.zip'
, tmpDir: "/tmp" // optional, defaults to node_modules/aws-s3-zipper
},function(err,result){
if(err)
console.error(err);
else{
var lastFile = result.zippedFiles[result.zippedFiles.length-1];
if(lastFile)
console.log('last key ', lastFile.Key); // next time start from here
}
});
zipper.zipToS3FileFragments({
s3FolderName: 'myBucketFolderName'
, startKey: 'keyOfLastFileIZipped' // optional
, s3ZipFileName: 'myS3File.zip'
, maxFileCount: 5
, maxFileSize: 1024*1024
, tmpDir: "/tmp" // optional, defaults to node_modules/aws-s3-zipper
},function(err, results){
if(err)
console.error(err);
else if(results.length > 0) {
var result = results[results.length - 1];
var lastFile = result.zippedFiles[result.zippedFiles.length - 1];
if (lastFile)
console.log('last key ', lastFile.Key); // next time start from here
}
});
Either from the constructor or from the init(config)
function you can pass along the AWS config object
{
accessKeyId: [Your access id],
secretAccessKey: [your access key],
region: [the region of your S3 bucket],
bucket: [your bucket name],
endpoint: [optional, for use with S3-compatible services]
}
Override this function when you want to filter out certain files. The file
param that is passed to you is the format of the aws file
- file
/// as of when this document was written
{
Key: [file key], // this is what you use to keep track of where you left off
ETag: [file tag],
LastModified: [i'm sure you get it],
Owner: {},
Size: [in bytes],
StorageClass: [type of storage]
}
Get a list of files in the bucket folder
params
objectfolderName
: the name of the folder in the bucketstartKey
: optional. return files listed after this file keyrecursive
: bool optional. to zip nested folders or not
callback(err,result)
: the function you want called when the list returnserr
: error object if it existsresult
: *files
: array of files found *totalFilesScanned
: total number of files scanned including filter out files from thefilterOutFiles
function
If you want to get a stream to pipe raw data to. For example if you wanted to stream the zip file directly to an http response
params
objectpipe
: the pipe to which you want the stream to feedfolderName
: the name of the bucket folder you want to streamstartKey
: optional. start zipping after this file keyrecursive
: bool optional. to zip nested folders or not
callback(err,result)
: call this function when doneerr
: the error object if anyresult
: the resulting archiver zip object with attached property 'manifest' whcih is an array of files it zipped
Zip files in an s3 folder and place the zip file back on s3
params
objects3FolderName
: the name of the bucket folder you want to streamstartKey
: optional. start zipping after this file keys3FilerName
: the name of the new s3 zip file including its path. if no path is given it will defult to the same s3 folderrecursive
: bool optional. to zip nested folders or not
callback(err,result)
: call this function when doneerr
: the error object if anyresult
: the resulting archiver zip object with attached property 'manifest' whcih is an array of files it zipped
params
objects3FolderName
: the name of the bucket folder you want to streamstartKey
: optional. start zipping after this file keys3ZipFileName
: the pattern of the name of the S3 zip files to be uploaded. Fragments will have an underscore and index at the end of the file name example ["allImages_1.zip","allImages_2.zip","allImages_3.zip"]maxFileCount
: Optional. maximum number of files to zip in a single fragment.maxFileSize
: Optional. Maximum Bytes to fit into a single zip fragment. Note: If a file is found larger than the limit a separate fragment will becreated just for it.recursive
: bool optional. to zip nested folders or not
callback(err,result)
: call this function when doneerr
: the error object if anyresults
: the array of results
Zip files to a local zip file.
params
objects3FolderName
: the name of the bucket folder you want to streamstartKey
: optional. start zipping after this file keyzipFileName
: the name of the new local zip file including its path.recursive
: bool optional. to zip nested folders or not
callback(err,result)
: call this function when doneerr
: the error object if anyresult
: the resulting archiver zip object with attached property 'manifest' whcih is an array of files it zipped
params
objects3FolderName
: the name of the bucket folder you want to streamstartKey
: optional. start zipping after this file keyzipFileName
: the pattern of the name of the zip files to be uploaded. Fragments will have an underscore and index at the end of the file name example ["allImages_1.zip","allImages_2.zip","allImages_3.zip"]maxFileCount
: Optional. maximum number of files to zip in a single fragment.maxFileSize
: Optional. Maximum Bytes to fit into a single zip fragment. Note: If a file is found larger than the limit a separate fragment will becreated just for it.recursive
: bool optional. to zip nested folders or not
callback(err,result)
: call this function when doneerr
: the error object if anyresults
: the array of results