· 5 years ago · Jan 22, 2020, 10:04 AM
1ACCESS_KEY = 'AKIATOVRUVCJC74RNKVI'
2SECRET_KEY = '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'
3import boto3
4def get_metadata(bucket_name,folder_name):
5 s3_client = boto3.client('s3',aws_access_key_id= ACCESS_KEY,aws_secret_access_key= SECRET_KEY)
6 paginator = s3_client.get_paginator('list_objects_v2')
7 page_iterator = paginator.paginate(Bucket='vakildesk')
8 total_content_length=0
9 for bucket in page_iterator:
10 for file in bucket['Contents']:
11 if file['Key'].startswith(folder_name+'/'):
12 print("yes")
13 try:
14 metadata = s3_client.head_object(Bucket='vakildesk', Key=file['Key'])
15 print(metadata)
16 total_content_length+=int(metadata['ResponseMetadata']['HTTPHeaders']['content-length'])
17
18 except Exception as e:
19 print("Exception!! ", e)
20 print("content_length:",str(total_content_length)+' '+'bytes')
21
22
23
24
25
26ACCESS_KEY = 'AKIATOVRUVCJC74RNKVI'
27SECRET_KEY = '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'
28import boto3
29# s3_client = boto3.client('s3',aws_access_key_id= ACCESS_KEY,aws_secret_access_key= SECRET_KEY)
30# paginator = s3_client.get_paginator('list_objects_v2')
31# page_iterator = paginator.paginate(Bucket='vakildesk')
32s3 = boto3.resource('s3',aws_access_key_id= ACCESS_KEY,aws_secret_access_key= SECRET_KEY)
33bucket =s3.Bucket('vakildesk')
34# for bucket in page_iterator:
35for file in bucket.objects.filter(Prefix='8572076163/'):
36 print(file)
37 try:
38 metadata = s3.head_object(Bucket='vakildesk', Key=file['Key'])
39 print('METADATA ', metadata)
40 except Exception as e:
41 print("Exception!! ", e)
42
43
44
45
46#today move
47def move_folder(folder_name,bucket_name,second_foldername=None):
48 s3_ = boto3.client('s3',aws_access_key_id=ACCESS_KEY_ID,aws_secret_access_key= ACCESS_SECRET_KEY,)
49 directory_name=None
50 if second_foldername:
51 directory_name = second_foldername+'/'+folder_name #it's name of your folders
52 else:
53 split_folder=folder_name.split('/')
54 folder_name1 = split_folder[-1]
55 directory_name=folder_name
56 s3 = boto3.resource('s3',aws_access_key_id= ACCESS_KEY_ID,aws_secret_access_key= ACCESS_SECRET_KEY)
57 bucket =s3.Bucket(bucket_name)
58 all_files=[]
59 for obj in bucket.objects.filter(Prefix=folder_name):
60 all_files.append(obj.key)
61 s3_.put_object(Bucket=bucket_name, Key=(directory_name+'/'))
62 if len(all_files)>1:
63 all_files=all_files[1:]
64 for element in all_files:
65 copy_source = {
66 'Bucket': bucket_name,
67 'Key': str(element)
68 }
69 print("Element--------: ",element)
70 file_split=element.split('/')
71 print("+++++++++++++++++++++++++++")
72 print("file split: ",file_split)
73 print("===================")
74 filename =file_split[-1]
75 print("filename: ",filename)
76 if filename!='':
77 key ='{}/{}'.format(second_foldername,'/'.join(file_split))
78 print("Key: ",key)
79 s3.meta.client.copy(copy_source,bucket_name, Key=key)
80 print("copied--------------")
81 else:
82 directory_name2=second_foldername+'/'+element
83 s3_.put_object(Bucket=bucket_name, Key=(directory_name2))
84 print("created")
85
86
87 else:
88 print("no any file insider folder")
89 for obj in bucket.objects.filter(Prefix=folder_name):
90 s3.Object(bucket.name,obj.key).delete()
91 print('done.')