Use dynamic file listing script

This commit is contained in:
Hannes Körber
2019-04-28 20:28:18 +02:00
parent 198f642e04
commit 4b817f45c6

View File

@@ -16,13 +16,10 @@ export GNUPGHOME="$(mktemp -d)"
bucket="${1}" ; shift bucket="${1}" ; shift
name="${1}" ; shift name="${1}" ; shift
backup_sources_file="${1}" ; shift filelist_script="${1}" ; shift
gpg_pubkey_file="${1}" ; shift gpg_pubkey_file="${1}" ; shift
gpg_pubkey_id="${1}" ; shift gpg_pubkey_id="${1}" ; shift
declare -a backup_sources
readarray backup_sources < "${backup_sources_file}"
install --directory --owner $(id -u) --group $(id -g) --mode 700 "${GNUPGHOME}" install --directory --owner $(id -u) --group $(id -g) --mode 700 "${GNUPGHOME}"
cleanup() { cleanup() {
@@ -46,51 +43,32 @@ tmpgpg -k
timestamp="$(date --utc -Iseconds)" timestamp="$(date --utc -Iseconds)"
for backup_dir in "${backup_sources[@]}" ; do "${filelist_script}" | while read filelist ; do
backup_dir_expanded=($(eval "echo $backup_dir")) filepath="$(echo "$filelist" | cut -d ':' -f 1)"
for dir in "${backup_dir_expanded[@]}" ; do fifo="$(echo "$filelist" | cut -d ':' -f 2)"
echo $dir mkdir -p "$(dirname "${filepath}")"
set -x echo "$fifo"
find \ <"$fifo" tar \
"${dir[@]}" \ --create \
\( \ --verbose \
-regex "${dir}.*/files_trashbin" \ --no-auto-compress \
-o \ --ignore-failed-read \
-regex "${dir}.*nextcloud.log.*" \ --acls \
-o \ --selinux \
-regex "${dir}.*registry/docker/registry" \ --xattrs \
-o \ --null \
-regex "${dir}.*/gogs.log.*" \ --force-local \
-o \ --no-recursion \
-regex "${dir}.*gogs/data/sessions/.*" \ --files-from - \
-o \ --file - \
-regex "${dir}.*/cache/.*" \ | gzip \
\) \ --to-stdout \
-prune \ | tmpgpg \
-o \ --output - \
-print0 \ --encrypt \
| tar \ --recipient "${gpg_pubkey_id}" \
--create \ | aws \
--verbose \ s3 cp \
--no-auto-compress \ - \
--ignore-failed-read \ "s3://${bucket}/${name}-${timestamp}/${filepath}.tar.gz.gpg"
--acls \
--selinux \
--xattrs \
--null \
--force-local \
--no-recursion \
--files-from - \
--file - \
| gzip \
--to-stdout \
| tmpgpg \
--output - \
--encrypt \
--recipient "${gpg_pubkey_id}" \
| aws \
s3 cp \
- \
"s3://${bucket}/${name}-${timestamp}/${dir##/}.tar.gz.gpg"
done
done done