AWS Lambda Function for Snapshots

You can back up the data on your EBS volumes to Amazon S3 by taking point-in-time snapshots. Lambda function can be used to automate this for daily or weekly backups. Using Lambda is best option than running a machine with Bash or PowerShell scripts. This function also deletes the snapshots that are 30 days or old.

import boto3
 import collections
 import datetime
 import logging
 logger = logging.getLogger()
 logger.setLevel(logging.INFO)
#Retention for 30 days - Change accordingly
retention_days = 30
def _getRegions():
 #logger.info('Getting list of regions')
 ec2Client = boto3.client('ec2')
 regions = []
 try:
 for region in ec2Client.describe_regions().get('Regions', []):
 regions.append(region)
 return regions
 except Exception as e:
 raise e
 def lambda_handler(event, context):
count = 0
 regions = _getRegions()
 #logger.info(regions)
 for region in regions:
 #logger.info(region.get('RegionName', ''))
 ec = boto3.client('ec2',region_name=region.get('RegionName', ''))
 reservations = ec.describe_instances()['Reservations']
 for reservation in reservations:
 for instance in reservation['Instances']:
 count = count + 1
 #logger.info(count)
#try:
 # retention_days = [int(t.get('Value')) for t in instance['Tags'] if t['Key'] == 'Retention'][0]
 #except:
 # retention_days = 14
try:
 instance_name = [str(t.get('Value')) for t in instance['Tags'] if t['Key'] == 'Name'][0]
 except:
 #logger.error("Name not defined for " + instance['InstanceId'])
 instance_name = str(instance['InstanceId'])
for dev in instance['BlockDeviceMappings']:
 if dev.get('Ebs', None) is None:
 continue
 vol_id = dev['Ebs']['VolumeId']
 #logger.info(instance_name + '-' + str(dev['DeviceName']) + '-' + datetime.datetime.now().strftime("%Y-%m-%d-%H:%M"))
 #logger.info('Found EBS volume %s on instance %s' % (vol_id, instance['InstanceId'])
 snap = ec.create_snapshot(VolumeId=vol_id,)
 value = instance_name + '-' + str(dev['DeviceName']) + '-' + datetime.datetime.utcnow().strftime("%Y:%m:%d:%H:%M")
 ec.create_tags(Resources=[snap['SnapshotId'],],Tags=[{'Key': 'Name', 'Value': value },])
#logger.info(regionname)
 delete_date = datetime.datetime.utcnow() - datetime.timedelta(days=retention_days)
 delete_date = delete_date.strftime('%Y-%m-%dT%H:%M:%S.000Z')
 #logger.info('Deleting snapshots older than %d days (before %s) in region %s' % (retention_days, delete_date, regionname) )
 #logger.info('setting date to delete:' + str(delete_date))
 snapshotsobject = ec.describe_snapshots(OwnerIds=['XXXXXXXXXXX'])
 snapshots = snapshotsobject['Snapshots']
 for snapshot in snapshots:
 start_date = snapshot['StartTime'].strftime('%Y-%m-%dT%H:%M:%S.000Z')
 if start_date < delete_date:
 try:
 logger.info('Deleting snapshot: snapshotid-' + snapshot['SnapshotId'] +' createddate-'+str(snapshot['StartTime'] ))
 ec.delete_snapshot(SnapshotId=snapshot['SnapshotId'])
 except:
 logger.error('error deleting snapshot: snapshotid-' + snapshot['SnapshotId'] +' createddate-'+str(snapshot['StartTime'] ))
return 'success'

Leave a Reply

Your email address will not be published. Required fields are marked *