· 7 years ago · May 28, 2018, 02:28 PM
1# Constants
2$sourceDrive = "C:\"
3$sourceFolder = "source\path"
4$sourcePath = $sourceDrive + $sourceFolder
5$s3Bucket = "bucket-name"
6$s3Folder = "bucket-path"
7
8# Constants – Amazon S3 Credentials
9$accessKeyID="aws-access-key"
10$secretAccessKey="aws-secret-access-key"
11
12Initialize-AWSDefaultConfiguration -AccessKey $accessKeyID -SecretKey $secretAccessKey -Region us-west-2
13
14# FUNCTION – Iterate through subfolders and upload files to S3
15function RecurseFolders([string]$path) {
16 $fc = New-Object -com Scripting.FileSystemObject
17 $folder = $fc.GetFolder($path)
18 foreach ($i in $folder.SubFolders) {
19 $thisFolder = $i.Path
20
21 # Transform the local directory path to notation compatible with S3 Buckets and Folders
22 # 1. Trim off the drive letter and colon from the start of the Path
23 $s3Path = $thisFolder.ToString()
24 $s3Path = $s3Path.SubString(2)
25 # 2. Replace back-slashes with forward-slashes
26 # Escape the back-slash special character with a back-slash so that it reads it literally, like so: "\\"
27 $s3Path = $s3Path -replace "\\", "/"
28 $s3Path = "/" + $s3Folder + $s3Path
29
30 # Upload directory to S3
31 Write-S3Object -BucketName $s3Bucket -Folder $thisFolder -KeyPrefix $s3Path
32 }
33
34 # If subfolders exist in the current folder, then iterate through them too
35 foreach ($i in $folder.subfolders) {
36 RecurseFolders($i.path)
37 }
38}
39
40# Upload root directory files to S3
41$s3Path = "/" + $s3Folder + "/" + $sourceFolder
42Write-S3Object -BucketName $s3Bucket -Folder $sourcePath -KeyPrefix $s3Path
43
44# Upload subdirectories to S3
45RecurseFolders($sourcePath)