· 5 years ago · Oct 04, 2020, 09:14 PM
1#!/bin/bash
2shopt -s expand_aliases
3alias jq='/home/bakerboy448/bin/jq-linux64'
4####################################################################################################################################################
5## CONFIG ##########################################################################################################################################
6####################################################################################################################################################
7
8# Notifications
9APPRISE_URL=""
10
11# Library
12LOCALS=( /home/bakerboy448/local/media )
13GDRIVE=( /home/bakerboy448/cloud/media )
14REMOTES=( crypt:/media )
15
16# Rclone
17RCLONE_CONFIG_FILE="/home/bakerboy448/.config/rclone/rclone.conf"
18RCLONE_SA_FILES_FOLDER="/home/bakerboy448/.config/Gdrive/SA"
19RCLONE_BANDWIDTH_LIMIT="1G"
20RCLONE_TPS_LIMIT=10
21RCLONE_CHUNK_SIZE="128M"
22RCLONE_USER_AGENT="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36"
23
24# Behaviour
25UPLOAD_FILES_OLDER_THAN_SECONDS=600
26SLEEP_TIME_BETWEEN_RUNS_SECONDS=600
27SLEEP_TIME_BEFORE_FILE_DELETION_SECONDS=300
28WARN_IF_SA_REACHES_PERCENT=90
29
30# SA usage limits
31SA_USAGE_MAX_GB=750
32SA_PREVENT_USAGE_IF_REACHED_MORE_GB=700
33
34# Daily Google Drive Quota reset time and timezone (visible in Drive API Quota usage)
35USAGE_RESET_TIME="00:10:00"
36USAGE_RESET_TZ="America/Los_Angeles"
37
38####################################################################################################################################################
39####################################################################################################################################################
40
41trap 'rm -rf "${TEMPDIR}"; echo $(date) Uploader interrupted >&2; exit 2' INT TERM
42
43TEMPDIR=$(mktemp -d -t uploader-"$(date +'%Y-%m-%d_%H.%M.%S')"-XXXXXXXXXX)
44
45if ! type jq > /dev/null 2>&1; then
46 echo "The command \"jq\" is not installed. Exiting..."
47 exit 1
48fi
49
50RcloneCopy() {
51 if type docker > /dev/null 2>&1; then
52 docker run --rm \
53 -v "${TEMPDIR}":"${TEMPDIR}" \
54 -v "${LOCALS[i]}":"${LOCALS[i]}" \
55 -v "${RCLONE_SA_FILES_FOLDER}":"${RCLONE_SA_FILES_FOLDER}":ro \
56 -v "${RCLONE_CONFIG_FILE}":"${RCLONE_CONFIG_FILE}":ro \
57 hotio/rclone copy "$@"
58 elif type rclone > /dev/null 2>&1; then
59 rclone copy "$@"
60 else
61 echo "The command \"rclone\" is not installed. Exiting..."
62 exit 1
63 fi
64}
65
66RcloneSize() {
67 if type docker > /dev/null 2>&1; then
68 docker run --rm \
69 -v "${RCLONE_SA_FILES_FOLDER}":"${RCLONE_SA_FILES_FOLDER}":ro \
70 -v "${RCLONE_CONFIG_FILE}":"${RCLONE_CONFIG_FILE}":ro \
71 hotio/rclone size "$@"
72 elif type rclone > /dev/null 2>&1; then
73 rclone size "$@"
74 else
75 echo "The command \"rclone\" is not installed. Exiting..."
76 exit 1
77 fi
78}
79
80Apprise() {
81 if type docker > /dev/null 2>&1; then
82 docker run --rm \
83 hotio/apprise "$@"
84 elif type apprise > /dev/null 2>&1; then
85 apprise "$@"
86 else
87 echo "The command \"apprise\" is not installed. Exiting..."
88 exit 1
89 fi
90}
91
92DockerPull() {
93 if type docker > /dev/null 2>&1; then
94 echo "Pulling docker images..."
95 [[ -n ${APPRISE_URL} ]] && docker pull hotio/apprise 1> /dev/null
96 docker pull hotio/rclone 1> /dev/null
97 fi
98}
99
100Log() {
101 clean=${1//\*/}
102 printf "%s - %s\n" "$(date +'%Y/%m/%d %H:%M:%S')" "${clean//\`/----------------}"
103 if [[ -n $2 ]] && [[ -n ${APPRISE_URL} ]]; then
104 Apprise -n "${2}" -b "${1}" "${APPRISE_URL}"
105 fi
106}
107
108PrettyPrintSeconds() {
109 ((h=${1}/3600))
110 ((m=(${1}%3600)/60))
111 ((s=${1}%60))
112 printf "%02d:%02d:%02d\n" "$h" "$m" "$s"
113}
114
115PrettyPrintSize() {
116 if [[ $1 -lt 1024 ]]; then
117 printf "%0.2fB" "$(echo "${1}" | awk '{print $1}')"
118 elif [[ $1 -lt 1048576 ]]; then
119 printf "%0.2fKB" "$(echo "${1}" | awk '{print $1/1024}')"
120 elif [[ $1 -lt 1073741824 ]]; then
121 printf "%0.2fMB" "$(echo "${1}" | awk '{print $1/1024/1024}')"
122 else
123 printf "%0.2fGB" "$(echo "${1}" | awk '{print $1/1024/1024/1024}')"
124 fi
125}
126
127Sleep() {
128 Log "Sleeping for [${SLEEP_TIME_BETWEEN_RUNS_SECONDS}] seconds..."
129 sleep ${SLEEP_TIME_BETWEEN_RUNS_SECONDS}
130}
131
132GetRemoteUsage() {
133 json_response=$(RcloneSize "${1}" --drive-service-account-file "${2}" --json --config "${RCLONE_CONFIG_FILE}" --user-agent "${RCLONE_USER_AGENT}")
134 count=$(echo "${json_response}" | jq -r .count)
135 bytes=$(echo "${json_response}" | jq -r .bytes)
136 echo "${count}/${bytes}"
137}
138
139SendResultsNotification() {
140 _exit_code=${1}
141 _rclone_stats_bytes=${2}
142 _rclone_stats_transfers=${3}
143 _rclone_stats_elapsedtime=${4}
144 _raw_new_sa_usage=${5}
145 _local=${6}
146 _remote=${7}
147 _sa=${8}
148 sa_percentage=$(awk "BEGIN {printf \"%.0f\n\", (${_raw_new_sa_usage}/$((SA_USAGE_MAX_GB*1024*1024*1024)))*100}")
149 [[ ${sa_percentage} -ge ${WARN_IF_SA_REACHES_PERCENT} ]] && Log "The usage for [${_sa##*/}] has reached [${sa_percentage}%]." warning
150 local_filecount=$(find "${_local}" -type f -print | wc -l)
151 remote_filecount=$(dirname "$(GetRemoteUsage "${_remote}" "${_sa}")")
152 remote_bytes=$(basename "$(GetRemoteUsage "${_remote}" "${_sa}")")
153 local_used_size=$(PrettyPrintSize "$(du -B1 -s "${_local}" | awk '{print $1}')")
154 remote_used_size=$(PrettyPrintSize "${remote_bytes}")
155 local_free_size=$(PrettyPrintSize "$(df -B1 --output=avail "${_local}" | tail -n1)")
156 remote_free_size="-"
157 length2=${#local_filecount} && [[ ${#remote_filecount} -gt ${#local_filecount} ]] && length2=${#remote_filecount} ; [[ ${length2} -lt 5 ]] && length2=5
158 length3=${#local_used_size} && [[ ${#remote_used_size} -gt ${#local_used_size} ]] && length3=${#remote_used_size} ; [[ ${length3} -lt 4 ]] && length3=4
159 length4=${#local_free_size} && [[ ${#remote_free_size} -gt ${#local_free_size} ]] && length4=${#remote_free_size} ; [[ ${length4} -lt 4 ]] && length4=4
160 Log "**Upload:**
161\`\`\`
162Local: ${_local}
163Remote: ${_remote}
164Uploaded: $(PrettyPrintSize "${_rclone_stats_bytes}") [${_rclone_stats_transfers} files]
165Duration: $(PrettyPrintSeconds "${_rclone_stats_elapsedtime}")
166SA-file: ${_sa##*/} [$(PrettyPrintSize "${_raw_new_sa_usage}")] [${sa_percentage}%]
167Exit-code: ${_exit_code}
168\`\`\`
169**Usage:**
170\`\`\`
171$(printf "%-7s %${length3}s %${length4}s %${length2}s" " " "Used" "Free" "Files" )
172$(printf "%-7s %${length3}s %${length4}s %${length2}s" "Local:" "${local_used_size}" "${local_free_size}" "${local_filecount}" )
173$(printf "%-7s %${length3}s %${length4}s %${length2}s" "Remote:" "${remote_used_size}" "${remote_free_size}" "${remote_filecount}" )
174\`\`\`
175" success
176}
177
178GetSA() {
179 find "${RCLONE_SA_FILES_FOLDER}" -type f -iname "*.json" ! -iname "*.metadata.json" -print0 |
180 while IFS= read -r -d '' sa; do
181 if [[ ! -f "${sa%.*}.metadata.json" ]]; then
182 jq --arg key1 TotalUsageBytes \
183 --arg value1 "0" \
184 --arg key2 LastResetEpoch \
185 --arg value2 "$(date +%s)" \
186 --arg key3 SAFile \
187 --arg value3 "${sa##*/}" \
188 '. | .[$key1]=$value1 | .[$key2]=$value2 | .[$key3]=$value3' <<<'{}' > "${sa%.*}.metadata.json"
189 fi
190 reset_epoch=$(TZ=${USAGE_RESET_TZ} date -d "${USAGE_RESET_TIME}" +%s)
191 sa_last_reset_epoch=$(jq -r '.LastResetEpoch' < "${sa%.*}.metadata.json")
192 if [[ ${sa_last_reset_epoch} -le ${reset_epoch} ]]; then
193 json=$(jq '.TotalUsageBytes = "0"' < "${sa%.*}.metadata.json")
194 json=$(jq '.LastResetEpoch = "'"$(date +%s)"'"' <<< "${json}" > "${sa%.*}.metadata.json")
195 fi
196 [[ -z ${smallest_usage} ]] && smallest_usage=$(( SA_PREVENT_USAGE_IF_REACHED_MORE_GB * 1024 * 1024 * 1024 ))
197 sa_total_usage_bytes=$(jq -r '.TotalUsageBytes' < "${sa%.*}.metadata.json")
198 if [[ ${sa_total_usage_bytes} -le ${smallest_usage} ]]; then
199 smallest_usage=${sa_total_usage_bytes}
200 cp "${sa%.*}.metadata.json" "${TEMPDIR}/sa.metadata.json"
201 fi
202 done
203 if [[ -f "${TEMPDIR}/sa.metadata.json" ]]; then
204 sa_usage=$(jq -r '.TotalUsageBytes' < "${TEMPDIR}/sa.metadata.json")
205 sa_free=$(( (SA_USAGE_MAX_GB * 1024 * 1024 * 1024) - sa_usage ))
206 sa=$(jq -r '.SAFile' < "${TEMPDIR}/sa.metadata.json")
207 rm "${TEMPDIR}/sa.metadata.json"
208 echo "${RCLONE_SA_FILES_FOLDER}/${sa};${sa_free};${sa_usage}"
209 else
210 echo "0;0;0"
211 fi
212}
213
214DeleteFiles() {
215 gdrive=${1}
216 local=${2}
217 filelist=${3}
218 while read -r rel_file_to_delete; do
219 abs_file_to_check=${gdrive}/${rel_file_to_delete}
220 abs_file_to_delete=${local}/${rel_file_to_delete}
221 [[ -f ${abs_file_to_check} ]] && rm "${abs_file_to_delete}" && Log "[${local}] - Removed: ${abs_file_to_delete}"
222 done <<< "${filelist}"
223 find "${local}" -mindepth 1 -type d -empty -delete
224}
225
226DockerPull
227
228while true; do
229 for i in "${!LOCALS[@]}"; do
230 Log "[${LOCALS[i]}] - Finding available SA..."
231 DEFAULTIFS="${IFS}" && IFS=';' && read -r -a sa <<< "$(GetSA)" && IFS="${DEFAULTIFS}"
232 if [[ ${sa[0]} != "0" ]]; then
233 sa_file=${sa[0]}
234 sa_free=${sa[1]}
235 sa_usage=${sa[2]}
236
237 Log "[${LOCALS[i]}] - Upload has started using [${sa_file##*/}]..."
238 Log "Rclone Log file is at: "${TEMPDIR}/rclone.log""
239 start_time=$(date +%s)
240 RcloneCopy "${LOCALS[i]}" "${REMOTES[i]}" \
241 --log-level INFO --stats 10s --stats-file-name-length 0 --stats-one-line --use-json-log --log-file "${TEMPDIR}/rclone.log" \
242 --drive-chunk-size "${RCLONE_CHUNK_SIZE}" \
243 --fast-list \
244 --drive-stop-on-upload-limit \
245 --bwlimit "${RCLONE_BANDWIDTH_LIMIT}" \
246 --tpslimit "${RCLONE_TPS_LIMIT}" \
247 --drive-service-account-file "${sa_file}" \
248 --max-transfer "${sa_free}" \
249 --cutoff-mode=cautious \
250 --min-age ${UPLOAD_FILES_OLDER_THAN_SECONDS}s \
251 --error-on-no-transfer \
252 --config "${RCLONE_CONFIG_FILE}" \
253 --user-agent "${RCLONE_USER_AGENT}"
254 exit_code=$?
255 end_time=$(date +%s)
256
257 if [[ ${exit_code} -eq 0 ]] || [[ ${exit_code} -eq 8 ]]; then
258 rclone_filelist=$(jq -r '. | select(.msg | startswith ("Copied")) | .object' "${TEMPDIR}/rclone.log")
259 rclone_stats_bytes=$(jq -r '. | select(.stats) | .stats.bytes' "${TEMPDIR}/rclone.log" | tail -n1)
260 rclone_stats_transfers=$(jq -r '. | select(.stats) | .stats.transfers' "${TEMPDIR}/rclone.log" | tail -n1)
261 elapsedtime=$(( end_time - start_time ))
262 raw_new_sa_usage=$(( sa_usage + rclone_stats_bytes ))
263 json=$(jq '.TotalUsageBytes = "'"${raw_new_sa_usage}"'"' < "${sa_file%.*}.metadata.json")
264 echo "${json}" > "${sa_file%.*}.metadata.json"
265 Log "[${LOCALS[i]}] - Waiting [${SLEEP_TIME_BEFORE_FILE_DELETION_SECONDS}] seconds for mount to update cache..." && sleep ${SLEEP_TIME_BEFORE_FILE_DELETION_SECONDS}
266 DeleteFiles "${GDRIVE[i]}" "${LOCALS[i]}" "${rclone_filelist}"
267 Log "Refresh Cache"
268 rclone rc vfs/refresh -v --fast-list recursive=true
269 SendResultsNotification "${exit_code}" "${rclone_stats_bytes}" "${rclone_stats_transfers}" "${elapsedtime}" "${raw_new_sa_usage}" "${LOCALS[i]}" "${REMOTES[i]}" "${sa_file}"
270 elif [[ ${exit_code} -eq 9 ]]; then
271 Log "[${LOCALS[i]}] - There was nothing to transfer."
272 else
273 rclone_message=$(jq -r '. | select(.msg | startswith ("Received upload limit error:")) | .object' "${TEMPDIR}/rclone.log" | tail -n1)
274 if [[ ${rclone_message} == *"userRateLimitExceeded"* ]]; then
275 raw_new_sa_usage=$(( SA_USAGE_MAX_GB * 1024 * 1024 * 1024 ))
276 json=$(jq '.TotalUsageBytes = "'"${raw_new_sa_usage}"'"' < "${sa_file%.*}.metadata.json")
277 else
278 rclone_stats_bytes=$(jq -r '. | select(.stats) | .stats.bytes' "${TEMPDIR}/rclone.log" | tail -n1)
279 raw_new_sa_usage=$(( sa_usage + rclone_stats_bytes ))
280 json=$(jq '.TotalUsageBytes = "'"${raw_new_sa_usage}"'"' < "${sa_file%.*}.metadata.json")
281 fi
282 echo "${json}" > "${sa_file%.*}.metadata.json"
283 Log "[${LOCALS[i]}] - Something went wrong during the upload!" failure
284 cat "${TEMPDIR}/rclone.log"
285 fi
286
287 rm "${TEMPDIR}/rclone.log"
288 else
289 Log "No available Service Accounts found!" failure
290 fi
291 done
292 Sleep
293done