-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathcheck_file_lambda.py
110 lines (73 loc) · 2.43 KB
/
check_file_lambda.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import boto3
import json
import pickle
import csv
ec2 = boto3.client('ec2')
vpc_file = "vpc_list.txt"
bucket_name = "diya-bucket"
region = "us-east-2"
vpc_rows= []
# def check_bucket_or_create(s3_client):
# s3 = boto3.resource('s3')
# exist = s3.Bucket('diya-test-bucket') in s3.buckets.all()
# if exist:
# print("Bucket Exist")
# else:
# print("Bucket does not exist")
# s3_client.create_bucket(Bucket='diya-test-bucket')
def bucket_exists():
s3 = boto3.resource('s3')
return s3.Bucket(bucket_name) in s3.buckets.all()
def create_bucket():
s3_client = boto3.client('s3', region_name=region)
location = {'LocationConstraint': region}
s3_client.create_bucket(Bucket=bucket_name, CreateBucketConfiguration=location)
def key_create(s3, vpc_rows):
response = s3.put_object(
Bucket=bucket_name,
Key=vpc_file,
Body=vpc_rows,
)
def s3_upload(vpc_rows):
s3 = boto3.client('s3')
vpc_rows = str(vpc_rows)
if bucket_exists():
print("BUCKET EXISTS")
else:
print("BUCKET DOES NOT EXISTS")
create_bucket()
key_create(s3, vpc_rows)
return
s3r = boto3.resource('s3')
obj = s3r.Object(bucket_name, vpc_file)
body2 = obj.get()['Body'].read().decode(encoding="utf-8",errors="ignore")
if body2 == vpc_rows:
print("VPC REMAINS UNCHANGED")
else:
print("VPC CHANGED")
print(body2[1])
print(vpc_rows)
key_create(s3, vpc_rows)
def lambda_handler(event, context):
# TODO implement
# Retrieves all regions/endpoints that work with EC2
response = ec2.describe_regions()
regions = response['Regions']
for rgn in regions:
region = rgn['RegionName']
ec2_regions = boto3.client('ec2', region_name=region)
response = ec2_regions.describe_vpcs()
vpcs = response['Vpcs']
for vpc in vpcs:
CidrBlock = vpc['CidrBlock']
VpcId = vpc['VpcId']
vpc_row = {'region': region, 'CidrBlock': CidrBlock, 'VpcId': VpcId }
vpc_rows.append(vpc_row)
# print(" ::::::::::::::::::::::: VPC ROW ::::::::::: ", vpc_row)
# write_file(vpc_rows)
s3_upload(vpc_rows)
return {
# 'vpc_lists': vpc_rows,
'statusCode': 200,
'body': json.dumps('Execution successfull')
}