· 8 years ago · Nov 27, 2017, 01:26 PM
1#!/usr/bin/env python3
2
3import argparse
4import os
5import sys
6from datetime import datetime, timedelta
7
8import boto3
9import pytz
10from botocore.client import ClientError, Config
11from dateutil.parser import parse
12
13# "backup_bucket" must be a universally unique name, so choose something
14# specific to your setup.
15# The bucket will be created in your account if it does not already exist
16backup_bucket = os.environ['MYBUCKETNAME']
17access_key = os.environ['MYACCESSKEY']
18secret_key = os.environ['MYSECRETKEY']
19endpoint_url = os.environ['MYENDPOINTURL']
20region_name = os.environ['MYREGIONNAME']
21
22
23class Space():
24 def __init__(self, bucket):
25 self.session = boto3.session.Session()
26 self.client = self.session.client('s3',
27 region_name=region_name,
28 endpoint_url=endpoint_url,
29 aws_access_key_id=access_key,
30 aws_secret_access_key=secret_key,
31 config=Config(signature_version='s3')
32 )
33 self.bucket = bucket
34 self.paginator = self.client.get_paginator('list_objects')
35
36 def create_bucket(self):
37 try:
38 self.client.head_bucket(Bucket=self.bucket)
39 except ClientError as e:
40 if e.response['Error']['Code'] == '404':
41 self.client.create_bucket(Bucket=self.bucket)
42 elif e.response['Error']['Code'] == '403':
43 print("The bucket name \"{}\" is already being used by "
44 "someone. Please try using a different bucket "
45 "name.".format(self.bucket))
46 sys.exit(1)
47 else:
48 print("Unexpected error: {}".format(e))
49 sys.exit(1)
50
51 def upload_files(self, files):
52 for filename in files:
53 self.client.upload_file(Filename=filename, Bucket=self.bucket,
54 Key=os.path.basename(filename))
55 print("Uploaded {} to \"{}\"".format(filename, self.bucket))
56
57 def remove_file(self, filename):
58 self.client.delete_object(Bucket=self.bucket,
59 Key=os.path.basename(filename))
60
61 def prune_backups(self, days_to_keep):
62 oldest_day = datetime.now(pytz.utc) - timedelta(days=int(days_to_keep))
63 try:
64 # Create an iterator to page through results
65 page_iterator = self.paginator.paginate(Bucket=self.bucket)
66 # Collect objects older than the specified date
67 objects_to_prune = [filename['Key'] for page in page_iterator
68 for filename in page['Contents']
69 if filename['LastModified'] < oldest_day]
70 except KeyError:
71 # If the bucket is empty
72 sys.exit()
73 for object in objects_to_prune:
74 print("Removing \"{}\" from {}".format(object, self.bucket))
75 self.remove_file(object)
76
77 def download_file(self, filename):
78 self.client.download_file(Bucket=self.bucket,
79 Key=filename, Filename=filename)
80
81 def get_day(self, day_to_get):
82 try:
83 # Attempt to parse the date format the user provided
84 input_date = parse(day_to_get)
85 except ValueError:
86 print("Cannot parse the provided date: {}".format(day_to_get))
87 sys.exit(1)
88 day_string = input_date.strftime("-%m-%d-%Y_")
89 print_date = input_date.strftime("%A, %b. %d %Y")
90 print("Looking for objects from {}".format(print_date))
91 try:
92 # create an iterator to page through results
93 page_iterator = self.paginator.paginate(Bucket=self.bucket)
94 objects_to_grab = [filename['Key'] for page in page_iterator
95 for filename in page['Contents']
96 if day_string in filename['Key']]
97 except KeyError:
98 print("No objects currently in bucket")
99 sys.exit()
100 if objects_to_grab:
101 for object in objects_to_grab:
102 print("Downloading \"{}\" from {}".format(object, self.bucket))
103 self.download_file(object)
104 else:
105 print("No objects found from: {}".format(print_date))
106 sys.exit()
107
108
109def is_valid_file(filename):
110 if os.path.isfile(filename):
111 return filename
112 else:
113 raise argparse.ArgumentTypeError("File \"{}\" does not exist."
114 .format(filename))
115
116
117def parse_arguments():
118 parser = argparse.ArgumentParser(
119 description='''Client to perform backup-related tasks with
120 object storage.''')
121 subparsers = parser.add_subparsers()
122
123 # parse arguments for the "upload" command
124 parser_upload = subparsers.add_parser('upload')
125 parser_upload.add_argument('files', type=is_valid_file, nargs='+')
126 parser_upload.set_defaults(func=upload)
127
128 # parse arguments for the "prune" command
129 parser_prune = subparsers.add_parser('prune')
130 parser_prune.add_argument('--days-to-keep', default=30)
131 parser_prune.set_defaults(func=prune)
132
133 # parse arguments for the "download" command
134 parser_download = subparsers.add_parser('download')
135 parser_download.add_argument('filename')
136 parser_download.set_defaults(func=download)
137
138 # parse arguments for the "get_day" command
139 parser_get_day = subparsers.add_parser('get_day')
140 parser_get_day.add_argument('day')
141 parser_get_day.set_defaults(func=get_day)
142
143 return parser.parse_args()
144
145
146def upload(space, args):
147 space.upload_files(args.files)
148
149
150def prune(space, args):
151 space.prune_backups(args.days_to_keep)
152
153
154def download(space, args):
155 space.download_file(args.filename)
156
157
158def get_day(space, args):
159 space.get_day(args.day)
160
161
162def main():
163 args = parse_arguments()
164 space = Space(bucket=backup_bucket)
165 space.create_bucket()
166 args.func(space, args)
167
168
169if __name__ == '__main__':
170 main()