#!/bin/bash # Enter the specific 'work directory'Ensure we collect logs and other files in the one place. cd /opt/AZURE || exit 1 # Configurable variables # Basic variables need for this scrip[t to operate correctly. BUSINESS_HOURS_START=10 BUSINESS_HOURS_END=20 AZURE_ACCOUNT="" AZURE_SAS="" LOCK_FILE="/tmp/run_azcopy.lock" # Arguments # From the comand line. the first is mandatory. There is a check for it if its not provided. SOURCE_LIST_FILE="$1" LOGGING="${2:-false}" # Default to false BANDWIDTH_CAP="${3:-0}" # Default is 0 (no cap) # Report files # A bunch of temp files used to generate logs and a simple completion report. TIMESTAMP=$(date +"%Y%m%d_%H%M%S") LOG_FILE="azcopy_log_$TIMESTAMP.txt" COMPLETION_REPORT="completion_report_$TIMESTAMP.txt" # Ensure source list file is provided # This the check for source files no ne3ed to run if that is not present. if [[ -z "$SOURCE_LIST_FILE" || ! -f "$SOURCE_LIST_FILE" ]]; then echo "Usage: $0 [log=true|false] [bandwidth_mbps]" exit 1 fi # Lock file to prevent multiple instances # Part of checks to prevent doubling of running proccesses. if [[ -f "$LOCK_FILE" ]]; then PID=$(cat "$LOCK_FILE") if kill -0 "$PID" 2>/dev/null; then echo "Another instance (PID $PID) is already running. Exiting..." exit 1 else echo "Stale lock file found. Removing..." rm -f "$LOCK_FILE" fi fi echo $$ > "$LOCK_FILE" # Function to check business hours # This will ensure we do not run at times when the server might need to do other things. is_business_hours() { HOUR=$(date +%H | sed 's/^0*//') # Remove leading zeros causing errors at morning times #HOUR=$(printf "%d" "$(date +%H)") # Convert to decimal safely / this had an issue for some reason not working [[ $HOUR -ge $BUSINESS_HOURS_START && $HOUR -lt $BUSINESS_HOURS_END ]] } # Stop if running during business hours # Uses the previous funcion to kill the procces if needed. if is_business_hours; then echo "Business hours detected ($BUSINESS_HOURS_START:00 - $BUSINESS_HOURS_END:00). Exiting..." rm -f "$LOCK_FILE" exit 1 fi echo "Starting sync job at $(date)" | tee -a "$LOG_FILE" # Read the directory list file into an array # This is used like that becouse of how cron uses a seperate shell enviroment. mapfile -t SYNC_JOBS < "$SOURCE_LIST_FILE" # The actual part of the script that does the job. # Loops through the array and process each entry # This also does a check to kill the process if it does not complete before the restriction window apply. # Also checks if a given container exists will stop if it does not. for LINE in "${SYNC_JOBS[@]}"; do IFS=">" read -r SOURCE_DIR DEST_CONTAINER <<< "$LINE" SOURCE_DIR=$(echo "$SOURCE_DIR" | xargs) # Trim spaces DEST_CONTAINER=$(echo "$DEST_CONTAINER" | xargs) # Trim spaces if [[ -z "$SOURCE_DIR" || ! -d "$SOURCE_DIR" ]]; then echo "ERROR: Invalid directory: $SOURCE_DIR. Exiting." | tee -a "$LOG_FILE" rm -f "$LOCK_FILE" exit 1 fi if [[ -z "$DEST_CONTAINER" ]]; then echo "ERROR: No destination container specified for $SOURCE_DIR. Exiting." | tee -a "$LOG_FILE" rm -f "$LOCK_FILE" exit 1 fi DEST_URL="$AZURE_ACCOUNT/$DEST_CONTAINER" echo "Syncing $SOURCE_DIR to container: $DEST_CONTAINER" | tee -a "$LOG_FILE" # Check if the container exists by attempting to write a small test file TEST_FILE="$SOURCE_DIR/.azcopy_test_file" touch "$TEST_FILE" azcopy cp "$TEST_FILE" "$DEST_URL?$AZURE_SAS" > /dev/null 2>&1 if [[ $? -ne 0 ]]; then echo "ERROR: Destination container $DEST_CONTAINER does not exist or is inaccessible. Exiting." | tee -a "$LOG_FILE" rm -f "$TEST_FILE" rm -f "$LOCK_FILE" exit 1 fi rm -f "$TEST_FILE" # Run azcopy in the background (one directory at a time) if [[ "$LOGGING" == "true" ]]; then azcopy sync "$SOURCE_DIR" "$DEST_URL?$AZURE_SAS" --recursive --cap-mbps "$BANDWIDTH_CAP" | tee -a "$LOG_FILE" & else azcopy sync "$SOURCE_DIR" "$DEST_URL?$AZURE_SAS" --recursive --cap-mbps "$BANDWIDTH_CAP" > /dev/null 2>&1 & fi AZCOPY_PID=$! # Monitor the process every 30 seconds while kill -0 $AZCOPY_PID 2>/dev/null; do if is_business_hours; then echo -e "\nBusiness hours started! Stopping azcopy..." | tee -a "$LOG_FILE" kill $AZCOPY_PID wait $AZCOPY_PID 2>/dev/null # Ensure process stops completely rm -f "$LOCK_FILE" exit 1 fi sleep 30 # Check every 30 seconds done # Check if sync failed if [[ $? -ne 0 ]]; then echo "ERROR: Sync failed for $SOURCE_DIR to $DEST_CONTAINER. Stopping script." | tee -a "$LOG_FILE" rm -f "$LOCK_FILE" exit 1 fi done echo "All directories synced successfully!" | tee -a "$LOG_FILE" # Generate completion report echo "Sync Completed: $(date)" > "$COMPLETION_REPORT" echo "All directories listed in $SOURCE_LIST_FILE have been synced." >> "$COMPLETION_REPORT" echo "Completion report generated: $COMPLETION_REPORT" rm -f "$LOCK_FILE" exit 0