· 6 years ago · Jan 05, 2020, 06:32 AM
1#!/bin/bash
2
3#----------------------------------------------------------------------------
4# Here you can add your own custom script eg.
5# rclone copy /local/path remote:path # copies /local/path to the remote
6# rclone sync /local/path remote:path # syncs /local/path to the remote
7#----------------------------------------------------------------------------
8#backup the rclone log overwriting the old one. We keep today's and yesterday's online - we use a share named work
9#----------------------------------------------------------------------------
10
11file="/mnt/user/work/rclone-bak.log"
12file2="/mnt/user/work/rclone.log"
13if [ ! -f "$file" ]
14then
15 rm /mnt/user/work/rclone-bak.log # remove the old backup
16fi
17
18if [ ! -f "$file2" ]
19then
20 mv /mnt/user/work/rclone.log /mnt/user/work/rclone-bak.log # create the new backup
21fi
22
23
24#----------------------------------------------------------------------------
25#sync the drives excluding the ones in exclude.txt which is in the mnt directory and NOT visible from a share!
26#Sync to a drive named google_crypt, sync all shares under /mnt/user/ (that's everything in the array)
27# max file size backed up is 40gig, if there are link files we copy them, 3 transfers in parallel, 4 workers looking for changed files, bandwidth limited to 60megs, anything in exclude.txt is skipped.
28#----------------------------------------------------------------------------
29#this command does the backup!
30#----------------------------------------------------------------------------
31rclone sync --log-file /mnt/user/work/rclone.log --max-size=40G --copy-links --transfers=3 --exclude-from "/mnt/user/exclude.txt" --bwlimit=60M -vv /mnt/user/ google_crypt:/ --checkers=4
32
33
34
35# Exclude.txt presents directories from the point of view of /mnt/user/ so if you have a share named "movies" that's at
36# /mnt/user/movies/ you didn't want backed up you would have an entry that simply says movies/ and it would be skipped!
37#skip the appdata directory, plex data directories, and any specific docker data directories. To back those up you'll want a script to
38# backup that data elsewhere with docker stopped and then allow rclone to push it to the cloud.
39
40# rclone.log allows you to see what rclone is doing by using tail. this config stores it in a share named "work"
41# it can be followed via a terminal session using the command "tail -F --lines=1000 /mnt/user/work/rclone.log"
42#--vv means very verbose so the log will show you all sorts of things including errors and retries, if you've not generated your own
43# API key and placed it in the rclone config you will get TONS of errors!
44# test this cript by running it in the foreground....