#!/usr/bin/env bash # # backup2mdisc.sh # # Purpose: # 1. Scans all files in a source directory. # 2. Groups them into "chunks" so that each chunk is <= a specified size (default 100GB). # 3. Creates a TAR archive of each chunk, compresses it with lz4, and encrypts it with GPG (AES256). # 4. Each .tar.lz4.gpg is fully independent (no other parts/discs needed to restore that chunk). # 5. (Optional) Creates ISO images from each encrypted chunk if --create-iso is provided. # 6. (Optional) Burns each chunk or ISO to M-Disc if --burn is provided. # # Usage: # ./backup2mdisc.sh /path/to/source /path/to/destination [CHUNK_SIZE] [--create-iso] [--burn] # # Examples: # ./backup2mdisc.sh /home/user/data /mnt/backup 100G --create-iso # ./backup2mdisc.sh /data /backup 50G --burn # # Dependencies: # - bash # - gpg (for encryption) # - lz4 (for fast compression) # - tar # - split or file-based grouping approach # - sha256sum (or 'shasum -a 256' on macOS/FreeBSD) # - genisoimage or mkisofs (for creating ISOs if --create-iso) # - growisofs (Linux) or hdiutil (macOS) for burning if --burn # # Notes: # - This script sorts files by size and accumulates them until the chunk is "full." # - If a file alone is bigger than CHUNK_SIZE, this script won't handle it gracefully. # - Each chunk gets a separate .tar.lz4.gpg file. If one disc is lost, only that chunk's files are lost. # - Keep your GPG passphrase safe; you'll need it to decrypt any chunk. # set -e ##################################### # CONFIGURATION & DEFAULTS # ##################################### DEFAULT_CHUNK_SIZE="100G" # Adjust if you want a different default MANIFEST_NAME="manifest_individual_chunks.txt" ##################################### # FUNCTIONS # ##################################### function usage() { echo "Usage: $0 /path/to/source /path/to/destination [CHUNK_SIZE] [--create-iso] [--burn]" echo echo "Example: $0 /home/user/docs /mnt/backup 100G --create-iso --burn" exit 1 } # Cross-platform SHA-256 function compute_sha256() { if command -v sha256sum >/dev/null 2>&1; then sha256sum "$1" else shasum -a 256 "$1" fi } ##################################### # MAIN SCRIPT # ##################################### # Parse primary arguments SOURCE_DIR="$1" DEST_DIR="$2" CHUNK_SIZE="${3:-$DEFAULT_CHUNK_SIZE}" # Shift away the first 3 arguments if present shift 3 || true CREATE_ISO=false BURN_MEDIA=false # Parse flags for arg in "$@"; do case "$arg" in --create-iso) CREATE_ISO=true ;; --burn) BURN_MEDIA=true ;; *) ;; esac done # Basic checks if [[ -z "$SOURCE_DIR" || -z "$DEST_DIR" ]]; then usage fi if [[ ! -d "$SOURCE_DIR" ]]; then echo "ERROR: Source directory '$SOURCE_DIR' does not exist." exit 1 fi if [[ ! -d "$DEST_DIR" ]]; then echo "ERROR: Destination directory '$DEST_DIR' does not exist." exit 1 fi # Prompt for GPG passphrase echo -n "Enter GPG passphrase (will not be displayed): " read -s GPG_PASSPHRASE echo # Create a working directory WORK_DIR="${DEST_DIR}/individual_chunks_$(date +%Y%m%d_%H%M%S)" mkdir -p "$WORK_DIR" # Create a manifest file to track chunk -> files mapping and checksums MANIFEST_FILE="${WORK_DIR}/${MANIFEST_NAME}" touch "$MANIFEST_FILE" echo "Manifest for independent chunks backup" > "$MANIFEST_FILE" echo "Source: $SOURCE_DIR" >> "$MANIFEST_FILE" echo "Timestamp: $(date)" >> "$MANIFEST_FILE" echo "Chunk size limit: $CHUNK_SIZE" >> "$MANIFEST_FILE" echo >> "$MANIFEST_FILE" # Step 1: Collect all files with their sizes and sort them (ascending by size). TEMP_FILE_LIST=$(mktemp) find "$SOURCE_DIR" -type f -printf "%s %p\n" | sort -n > "$TEMP_FILE_LIST" CHUNK_INDEX=1 CURRENT_CHUNK_SIZE=0 TMP_CHUNK_LIST=$(mktemp) function bytes_from_iec() { # Convert something like '100G' or '50G' into bytes using numfmt numfmt --from=iec "$1" } MAX_CHUNK_BYTES=$(bytes_from_iec "$CHUNK_SIZE") function start_new_chunk() { rm -f "$TMP_CHUNK_LIST" touch "$TMP_CHUNK_LIST" CURRENT_CHUNK_SIZE=0 } function finalize_chunk() { # Called when we have a list of files in TMP_CHUNK_LIST and we want to # 1) TAR them # 2) Compress with lz4 # 3) Encrypt with GPG # 4) Possibly create ISO # 5) Possibly burn # 6) Update manifest local chunk_name chunk_name=$(printf "chunk_%03d.tar.lz4.gpg" "$CHUNK_INDEX") echo echo "==> Creating chunk #$CHUNK_INDEX: $chunk_name" # Tar + lz4 + gpg pipeline tar -cf - -T "$TMP_CHUNK_LIST" \ | lz4 -c \ | gpg --batch --yes --cipher-algo AES256 --passphrase "$GPG_PASSPHRASE" -c \ > "${WORK_DIR}/${chunk_name}" # Generate a SHA-256 sum local chunk_path="${WORK_DIR}/${chunk_name}" local sum_line sum_line=$(compute_sha256 "$chunk_path") # Add chunk info to manifest echo "Chunk #$CHUNK_INDEX -> $chunk_name" >> "$MANIFEST_FILE" echo "Files in this chunk:" >> "$MANIFEST_FILE" cat "$TMP_CHUNK_LIST" >> "$MANIFEST_FILE" echo "" >> "$MANIFEST_FILE" echo "SHA256: $sum_line" >> "$MANIFEST_FILE" echo "-----------------------------------" >> "$MANIFEST_FILE" echo >> "$MANIFEST_FILE" # Optionally create ISO local iso_name iso_name=$(printf "chunk_%03d.iso" "$CHUNK_INDEX") if [ "$CREATE_ISO" = true ]; then echo "==> Creating ISO for chunk #$CHUNK_INDEX" mkdir -p "${WORK_DIR}/iso_chunks" local temp_iso_dir="${WORK_DIR}/temp_iso_dir_$CHUNK_INDEX" mkdir -p "$temp_iso_dir" # Copy the encrypted archive into a temp directory cp "$chunk_path" "$temp_iso_dir"/ # Build the ISO local iso_output="${WORK_DIR}/iso_chunks/${iso_name}" if command -v genisoimage >/dev/null 2>&1; then genisoimage -quiet -o "$iso_output" -V "ENCRYPTED_BACKUP_${CHUNK_INDEX}" "$temp_iso_dir" else # Try mkisofs mkisofs -quiet -o "$iso_output" -V "ENCRYPTED_BACKUP_${CHUNK_INDEX}" "$temp_iso_dir" fi rm -rf "$temp_iso_dir" # If --burn is also requested, burn the ISO if [ "$BURN_MEDIA" = true ]; then echo echo "Please insert a blank M-Disc for chunk #$CHUNK_INDEX (ISO): $iso_name" read -rp "Press [Enter] when ready to burn..." if command -v growisofs >/dev/null 2>&1; then growisofs -Z /dev/sr0="$iso_output" elif [[ "$OSTYPE" == "darwin"* ]]; then # macOS example hdiutil burn "$iso_output" else echo "No recognized burner found. Please burn ${iso_output} manually." fi fi else # If we are not creating ISO but we are burning the chunk file directly if [ "$BURN_MEDIA" = true ]; then echo echo "Please insert a blank M-Disc for chunk #$CHUNK_INDEX: $chunk_name" read -rp "Press [Enter] when ready to burn..." if command -v growisofs >/dev/null 2>&1; then growisofs -Z /dev/sr0="$chunk_path" elif [[ "$OSTYPE" == "darwin"* ]]; then # hdiutil doesn't burn a raw file easily, typically it expects .iso echo "On macOS, consider creating an ISO or using a different burning tool for $chunk_name." else echo "No recognized burner found. Please burn ${chunk_path} manually." fi fi fi ((CHUNK_INDEX++)) start_new_chunk } # Initialize the first chunk start_new_chunk # Step 2: Go through each file, add to chunk if it fits, otherwise finalize and start a new chunk. while IFS= read -r line; do FILE_SIZE=$(echo "$line" | awk '{print $1}') FILE_PATH=$(echo "$line" | cut -d' ' -f2-) # If adding this file exceeds the chunk limit, finalize the current chunk now if [[ $((CURRENT_CHUNK_SIZE + FILE_SIZE)) -gt $MAX_CHUNK_BYTES ]]; then # Finalize current chunk if it has at least 1 file if [[ $(wc -l < "$TMP_CHUNK_LIST") -gt 0 ]]; then finalize_chunk fi fi # Add the file to the chunk echo "$FILE_PATH" >> "$TMP_CHUNK_LIST" CURRENT_CHUNK_SIZE=$((CURRENT_CHUNK_SIZE + FILE_SIZE)) done < "$TEMP_FILE_LIST" # Finalize the last chunk if it has leftover files if [[ $(wc -l < "$TMP_CHUNK_LIST") -gt 0 ]]; then finalize_chunk fi echo echo "=== All chunks created ===" echo "Your chunks (and possibly ISOs) are located in:" echo " $WORK_DIR" echo echo "Manifest: $MANIFEST_FILE" echo "-----------------------------------" echo "Done!" # Cleanup rm -f "$TEMP_FILE_LIST" "$TMP_CHUNK_LIST" exit 0