Victor Lavaud 8 mesi fa
parent
commit
96dfdf6e2c

+ 20 - 0
gdrive-backup/Dockerfile

@@ -0,0 +1,20 @@
+FROM alpine:latest
+RUN apk add --no-cache wget tar openssh rsync
+
+# Download gdrive
+RUN wget https://github.com/glotlabs/gdrive/releases/download/3.9.0/gdrive_linux-x64.tar.gz
+RUN tar xzvf gdrive_linux-x64.tar.gz
+RUN mv gdrive /usr/local/bin
+RUN chmod a+rx /usr/local/bin/gdrive
+
+# Prepare scripts
+COPY ./backup-gdrive.sh /usr/local/bin
+COPY ./download-gdrive.sh /usr/local/bin
+COPY ./backup.sh /usr/local/bin
+RUN chmod a+rx /usr/local/bin/backup-gdrive.sh /usr/local/bin/download-gdrive.sh /usr/local/bin/backup.sh
+
+# gdrive looks for credentials in this folder. Allow user to mount it.
+RUN mkdir /root/.config
+VOLUME "/root/.config/gdrive3"
+
+ENTRYPOINT ["sh", "/usr/local/bin/backup-gdrive.sh"]

+ 4 - 0
gdrive-backup/backup-gdrive.sh

@@ -0,0 +1,4 @@
+#!/bin/sh
+echo "Backup Google Drive, running as $(id)"
+download-gdrive.sh || exit 10
+backup.sh || exit 11

+ 53 - 0
gdrive-backup/backup.sh

@@ -0,0 +1,53 @@
+#!/bin/sh
+#
+# This script backs a directory up, and uploads the backup to a SSH server.
+#
+# On the server, a directory containing the date of the backup is created,
+# for instance 20230402 if the script was launched on the 2nd of April 2023.
+# The files are copied inside this directory.
+#
+# If a former directory exists, the files that are unchanged are linked to
+# it. So for instance, if 20230101/big_file exists and 20230102/big_file is
+# unchanged, then 20230102 will be a hard (not symbolic) link. This is achieved
+# using --link-dest from rsync (see man rsync).
+#
+# The following environment variables must be defined:
+# - LOCAL_FOLDER_PATH: a relative or absolute path to the directory to backup.
+#     The script will exit with value 10 if the path does not exist, or
+#     if the file is not a directory.
+# - REMOTE_FOLDER_PATH: a relative or absolute path to the directory in which
+#     the backups are created.
+# - REMOTE_USER: The username to use when connecting to the SSH server.
+# - REMOTE_SERVER: The server to connect to.
+
+# This functions returns the last folder of the form 20230402 on the remote
+# SSH server, or an empty string if none is found.
+find_newest_folder() {
+  ssh -o StrictHostKeyChecking=no -l "${REMOTE_USER}" "${REMOTE_SERVER}" "cd ${REMOTE_FOLDER_PATH} && ls -td ./*/ | head -1"
+}
+
+[[ -d "${LOCAL_FOLDER_PATH}" ]] || exit 10
+
+echo "Listing file in local folder"
+ls -l ${LOCAL_FOLDER_PATH}
+
+# E.g. 20230402.
+remote_folder=${REMOTE_FOLDER_PATH}/$(date +'%Y%m%d')
+
+echo "Looking for previous folder..."
+last_folder=$(find_newest_folder)
+if [[ -n "$last_folder" ]]; then
+  echo "Found previous folder: ${REMOTE_FOLDER_PATH}/${last_folder}"
+  link_parameter="--link-dest=${REMOTE_FOLDER_PATH}/${last_folder}"
+else
+  echo "No previous folder found."
+  link_parameter="--progress"
+fi
+
+echo "Creating remote folder: ${remote_folder}"
+ssh -o StrictHostKeyChecking=no -l "${REMOTE_USER}" "${REMOTE_SERVER}" "mkdir -p ${remote_folder}"
+
+cd ${LOCAL_FOLDER_PATH}
+echo "Copying files."
+rsync -e "ssh -o StrictHostKeyChecking=no" --verbose --archive "${link_parameter}" * "${REMOTE_USER}"@"${REMOTE_SERVER}":"${remote_folder}" || exit 20
+echo "Done copying files."

+ 87 - 0
gdrive-backup/backup.yaml

@@ -0,0 +1,87 @@
+apiVersion: batch/v1
+kind: CronJob
+metadata:
+  name: gdrive-backup
+spec:
+  schedule: "@weekly"
+  jobTemplate:
+    spec:
+      template:
+        spec:
+          restartPolicy: "Never"
+          affinity:
+            nodeAffinity:
+              requiredDuringSchedulingIgnoredDuringExecution:
+                nodeSelectorTerms:
+                - matchExpressions:
+                  - key: arch
+                    operator: In
+                    values:
+                    - x86
+          initContainers:
+          - name: prepare-gdrive-credentials
+            image: busybox
+            command: ['sh', '-c', 'cp -R /credentials/* /root/.config/gdrive3']
+            volumeMounts:
+            - name: credentials
+              mountPath: "/credentials/victor.lavaud@gmail.com"
+              subPath: "victor.lavaud@gmail.com"
+            - name: accounts
+              mountPath: "/credentials/accounts.json"
+              subPath: "accounts.json"
+            - name: gdrive-config
+              mountPath: "/root/.config/gdrive3"
+          containers:
+          - name: gdrive-backup
+            image: qdii/backup-gdrive:latest
+            imagePullPolicy: "Always"
+            env:
+            - name: LOCAL_FOLDER_PATH
+              value: "/tmp/app"
+            - name: REMOTE_USER
+              value: "uab10c1ba4bc"
+            - name: REMOTE_SERVER
+              value: "bck.dodges.it"
+            - name: REMOTE_FOLDER_PATH
+              value: "/home/uab10c1ba4bc/data/vol0/gdrive"
+            - name: ACCOUNT
+              value: "victor.lavaud@gmail.com"
+            volumeMounts:
+            - name: ephemeral
+              mountPath: "/tmp/app"
+            - name: gdrive-config
+              mountPath: "/root/.config/gdrive3"
+            - name: backup
+              mountPath: "/root/.ssh/id_rsa"
+              subPath: ssh_private_key
+            - name: backup
+              mountPath: "/root/.ssh/id_rsa.pub"
+              subPath: ssh_public_key
+          volumes:
+            - name: credentials
+              projected:
+                sources:
+                  - secret:
+                      name: gdrive
+                      items:
+                      - key: secret.json
+                        path: "victor.lavaud@gmail.com/secret.json"
+                      - key: tokens.json
+                        path: "victor.lavaud@gmail.com/tokens.json"
+            - name: accounts
+              secret:
+                secretName: gdrive
+                items:
+                - key: accounts.json
+                  path: "accounts.json"
+            - name: backup
+              secret:
+                secretName: backup
+                defaultMode: 0400
+            - name: ephemeral
+              emptyDir:
+                sizeLimit: 50Gi
+            - name: gdrive-config
+              emptyDir:
+                sizeLimit: 10Mi
+

+ 29 - 0
gdrive-backup/download-gdrive.sh

@@ -0,0 +1,29 @@
+#!/bin/sh
+# This script downloads all files from a Google drive into a local directory.
+# It relies on https://github.com/glotlabs/gdrive, which expects to find
+# google drive credentials on /root/.config/gdrive3.
+#
+# Limitations: does not export Google Docs.
+#
+# Environment variables:
+# LOCAL_FOLDER_PATH: the directory where the files will be downloaded. Must exist.
+# ACCOUNT: the email address of the accoumt to use.
+[[ -d "$LOCAL_FOLDER_PATH" ]] || mkdir -p "$LOCAL_FOLDER_PATH"
+[[ -z "$ACCOUNT" ]] && echo "ACCOUNT must be set to an email address" >&2 && exit 1
+
+echo "Copying all Google drive files to $LOCAL_FOLDER_PATH"
+
+cd "$LOCAL_FOLDER_PATH"
+gdrive account switch "$ACCOUNT"
+
+for folder in $(gdrive files list | grep folder | cut -d' ' -f1)
+do
+  echo "Downloading folder with id $folder"
+  gdrive files download --recursive "$folder" || exit 10
+done
+
+for document in $(gdrive files list | grep regular | cut -d' ' -f1)
+do
+  echo "Downloading file with id $document"
+  gdrive files download "$document"  || exit 10
+done