-
Notifications
You must be signed in to change notification settings - Fork 2
/
lambdafolder.py
110 lines (98 loc) · 4.31 KB
/
lambdafolder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import boto3
import os
import json
from datetime import datetime, timedelta
''' Global Variable defined for Bucket, Object and Filenames '''
SRC_BUCKET_NAME = 'bucket-name'
DEV_DST_BUCKET_NAME = 'bucket-name'
INT_DST_BUCKET_NAME = 'bucket-name'
TST_DST_BUCKET_NAME = 'bucket-name'
def download_s3_file(myfile):
s3_source = boto3.client('s3')
# Splitting Myfile into different segments,folders and files
items = myfile.split('/')
fparent = items[0]
ffile = items[1]
folder_name = fparent
# take the folder with file and save it to '/tmp/file',only files will be saved in /tmp
s3_source.download_file(
SRC_BUCKET_NAME, folder_name+'/'+ffile, '/tmp/'+ffile)
def upload_s3_file(myfile):
sts = boto3.client('sts')
# DEV
sts_result_dev = sts.assume_role(
RoleArn='arn:aws:iam::'+'acctid'+':role/Role-Name', RoleSessionName='session')
s3_dest_dev = boto3.client('s3', aws_access_key_id=sts_result_dev['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_result_dev['Credentials']['SecretAccessKey'],
aws_session_token=sts_result_dev['Credentials']['SessionToken'])
items = myfile.split('/')
fparent = items[0]
ffile = items[1]
folder_name = fparent
s3_dest_dev.put_object(Bucket=DEV_DST_BUCKET_NAME, Key=(folder_name+'/'))
s3_dest_dev.upload_file(
'/tmp/'+ffile, DEV_DST_BUCKET_NAME, folder_name+'/'+ffile)
#To trigger SNS Message only Once Logic
objs = s3_dest_dev.list_objects_v2(Bucket=DEV_DST_BUCKET_NAME,Prefix=folder_name)
message = 'List of Objects \n'
fileCount = objs['KeyCount']
print(fileCount)
if fileCount == 11:
for object in objs['Contents']:
message = message + object['Key'] + '\n'
response = sns.publish(
TopicArn ='arn:aws:sns:us-east-1:a/c no.:my_sns',
Message = message
)
# INT
sts_result_int = sts.assume_role(
RoleArn='arn:aws:iam::'+'acctid'+':role/Role-Name', RoleSessionName='session')
s3_dest_int = boto3.client('s3', aws_access_key_id=sts_result_int['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_result_int['Credentials']['SecretAccessKey'],
aws_session_token=sts_result_int['Credentials']['SessionToken'])
items = myfile.split('/')
fparent = items[0]
ffile = items[1]
folder_name = fparent
s3_dest_int.put_object(Bucket=INT_DST_BUCKET_NAME, Key=(folder_name+'/'))
s3_dest_int.upload_file(
'/tmp/'+ffile, INT_DST_BUCKET_NAME, folder_name+'/'+ffile)
# TST
sts_result_tst = sts.assume_role(
RoleArn='arn:aws:iam::'+'acctid'+':role/Role-Name', RoleSessionName='session')
s3_dest_tst = boto3.client('s3', aws_access_key_id=sts_result_tst['Credentials']['AccessKeyId'],
aws_secret_access_key=sts_result_tst['Credentials']['SecretAccessKey'],
aws_session_token=sts_result_tst['Credentials']['SessionToken'])
items = myfile.split('/')
fparent = items[0]
ffile = items[1]
folder_name = fparent
s3_dest_tst.put_object(Bucket=TST_DST_BUCKET_NAME, Key=(folder_name+'/'))
s3_dest_tst.upload_file(
'/tmp/'+ffile, TST_DST_BUCKET_NAME, folder_name+'/'+ffile)
def lambda_handler(event, context):
try:
sns = boto3.client('sns')
print(event)
myfile = event['Records'][0]['s3']['object']['key']
print(myfile)
#This Would trigger SNS Notifications on every event,say 10 events have 10 diff. notification,Not Good practice
response = sns.publish(
TopicArn ='arn:aws:sns:us-east-1:a/c number:sns_name',
Message = myfile,
)
download_s3_file(myfile)
upload_s3_file(myfile)
except Exception as e:
print(e)
##Remember to Increase the Lambda Basic Execution Time to let's say 10 mins,as there are multiple file transfer b/w several a/c's.
## Adding SNS Topic To trigger Lambda Function
def lambda_handler(event, context):
try:
key =json.loads(event['Records'][0]['Sns']['Message'])
myfile =key['Records'][0]['s3']['object']['key']
print(myfile)
download_s3_file(myfile)
upload_s3_file(myfile)
except Exception as e:
print(e)