#!/bin/bash set -euo pipefail scriptpath="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" dumpfile="dump.sql" tmpdir="$(mktemp -d -p "${PWD}")" function check_for_hash() { local ret=0 echo -n "Checking if hash ${1} is present: " aws s3 ls "s3://${S3_BUCKET}/sums/${1}" || ret=$? case "$ret" in 0) echo "Yes." ;; *) echo "No." ;; esac return $ret } function create_and_upload() { local sum=$1 local backup_file backup_file="$(date +%Y/%m/${DATABASE_URL}_backup-%d-%H-%M-%S.tar.gz)" echo "Uploading ${backup_file}" tar -zc . | aws s3 cp - "s3://${S3_BUCKET}/${backup_file}" aws s3api put-object --bucket "${S3_BUCKET}" --key "sums/${sum}" } chmod ugo+wX "${tmpdir}" pushd "${tmpdir}" rm -rf "${dumpfile}" touch "${dumpfile}" chmod ugo+w "${dumpfile}" sudo -u postgres -- pg_dump --no-owner --no-privileges --clean --if-exists --quote-all-identifiers "bitwarden_${DATABASE_URL}" -f "${dumpfile}" "${scriptpath}/pgdump-sort" "${dumpfile}" "sorted.sql" cp -r "${ROOT_DIR}/data/${DATABASE_URL}" "./data" cp "${ETC_DIR}/${DATABASE_URL}.conf" "./.env" cp "${ROOT_DIR}/${DATABASE_URL}.conf" "./${DATABASE_URL}.conf" rm -rf ./data/icon_cache sum=$(find . -type f -not -name "${dumpfile}" -exec md5sum {} + | LC_ALL=C sort | md5sum | cut -d ' ' -f 1) rm sorted.sql check_for_hash "$sum" || create_and_upload "${sum}" popd rm -rf "${tmpdir}"