-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbackup.sh
More file actions
executable file
·935 lines (824 loc) · 37.7 KB
/
backup.sh
File metadata and controls
executable file
·935 lines (824 loc) · 37.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
#!/bin/bash
# backup.sh - Main backup script for backup-webdev
# Performs full, incremental, or differential backups of web development projects
#
# STRUCTURE: (1) Parse CLI args → (2) Validate config & discover projects →
# (3) Main backup loop per project → (4) Reports & history
# Source the shared modules
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "$SCRIPT_DIR/config.sh"
source "$SCRIPT_DIR/utils.sh" # Contains check_required_tools and other utility functions
source "$SCRIPT_DIR/ui.sh"
source "$SCRIPT_DIR/fs.sh" # Contains find_projects and file operations
source "$SCRIPT_DIR/reporting.sh"
# Default values (verification default comes from config.sh DEFAULT_VERIFY_BACKUP)
SILENT_MODE=false
INCREMENTAL_BACKUP=false
DIFFERENTIAL_BACKUP=false
VERIFY_BACKUP="${DEFAULT_VERIFY_BACKUP:-true}"
THOROUGH_VERIFY=false
COMPRESSION_LEVEL=6
EMAIL_NOTIFICATION=""
CLOUD_PROVIDER=""
BANDWIDTH_LIMIT=0
PARALLEL_THREADS=1
CUSTOM_BACKUP_DIR=""
CUSTOM_SOURCE_DIRS=()
DRY_RUN=false
EXTERNAL_BACKUP=false # Track if this is an external (cloud) backup
QUICK_BACKUP=false # --quick: same as silent but show per-folder progress like interactive
EXCLUDE_FILE="" # Temp file for exclusions; trap cleans on exit
VERIFY_EXPLICITLY_SET=false # True if --verify/--no-verify/--thorough-verify passed
trap 'rm -f "${EXCLUDE_FILE}" 2>/dev/null' EXIT
# Parse command line arguments
source "$SCRIPT_DIR/parse-backup-args.sh"
# Show dashboard and per-folder status when interactive, or when --quick (so actual backup looks like dry-run)
SHOW_PROGRESS=false
if [ "$SILENT_MODE" = false ] || [ "$QUICK_BACKUP" = true ]; then
SHOW_PROGRESS=true
fi
# Set source directories (use custom if provided, otherwise default)
SOURCE_DIRS=()
if [ ${#CUSTOM_SOURCE_DIRS[@]} -gt 0 ]; then
# Expand tilde in custom source directories if not already expanded
for dir in "${CUSTOM_SOURCE_DIRS[@]}"; do
dir="${dir/#\~/$HOME}"
SOURCE_DIRS+=("$dir")
done
else
SOURCE_DIRS=("${DEFAULT_SOURCE_DIRS[@]}")
fi
# For backward compatibility
SOURCE_DIR="${SOURCE_DIRS[0]}"
# Verify source directories exist
for dir in "${SOURCE_DIRS[@]}"; do
if [[ ! -d "$dir" ]]; then
echo -e "${RED}ERROR: Source directory does not exist: $dir${NC}"
echo "Please specify a valid source directory using the -s option"
exit 1
fi
done
# Set backup directory (use custom if provided, otherwise default from config.sh)
if [ -n "$CUSTOM_BACKUP_DIR" ]; then
BACKUP_DIR="$CUSTOM_BACKUP_DIR"
fi
# Verify backup directory. In dry-run we must not create or write anything.
if [ "$DRY_RUN" = true ]; then
if [ -d "$BACKUP_DIR" ]; then
if [ ! -w "$BACKUP_DIR" ]; then
echo -e "${RED}ERROR: Backup destination directory is not writable: $BACKUP_DIR${NC}"
exit 1
fi
else
parent=$(dirname "$BACKUP_DIR")
if [ ! -d "$parent" ]; then
echo -e "${RED}ERROR: Parent of backup destination does not exist: $parent${NC}"
echo "No files were backed up. For dry-run the destination path need not exist, but its parent must."
exit 1
fi
if [ ! -w "$parent" ]; then
echo -e "${RED}ERROR: Parent of backup destination is not writable: $parent${NC}"
exit 1
fi
fi
else
if ! verify_directory "$BACKUP_DIR" "Backup destination" true; then
echo "No files were backed up. Please check directory permissions."
exit 1
fi
fi
# Set backup type string for reporting
if [ "$INCREMENTAL_BACKUP" = true ]; then
BACKUP_TYPE="incremental"
elif [ "$DIFFERENTIAL_BACKUP" = true ]; then
BACKUP_TYPE="differential"
else
BACKUP_TYPE="full"
fi
# Set dependent paths (will be updated after verification prompt if needed)
# Add VERIFIED suffix to backup name if verification is enabled
# Use BACKUP_PREFIX from config.sh for OS-agnostic naming
if [ "$VERIFY_BACKUP" = true ]; then
BACKUP_NAME="${BACKUP_PREFIX}_VERIFIED_$DATE"
else
BACKUP_NAME="${BACKUP_PREFIX}_$DATE"
fi
FULL_BACKUP_PATH="$BACKUP_DIR/$BACKUP_NAME"
LOG_FILE="$FULL_BACKUP_PATH/backup_log.log"
STATS_FILE="$FULL_BACKUP_PATH/backup_stats.txt"
METADATA_FILE="$FULL_BACKUP_PATH/backup_metadata.json"
# Record start time
START_TIME=$(date '+%Y-%m-%d %H:%M:%S')
# Also record the start time in seconds since epoch for duration calculation
START_TIME_SECONDS=$(date +%s)
# Start logging
log "Starting backup script" "$LOG_FILE" "$SILENT_MODE"
log "Source directories: ${SOURCE_DIRS[*]}" "$LOG_FILE" "$SILENT_MODE"
if [ "${SOURCE_DIRS[*]}" = "${DEFAULT_SOURCE_DIRS[*]}" ]; then
log "Using default source directories: ${DEFAULT_SOURCE_DIRS[*]}" "$LOG_FILE" "$SILENT_MODE"
fi
log "Backup destination: $FULL_BACKUP_PATH" "$LOG_FILE" "$SILENT_MODE"
if [ "$BACKUP_DIR" = "$DEFAULT_BACKUP_DIR" ]; then
log "Using default backup directory: $DEFAULT_BACKUP_DIR" "$LOG_FILE" "$SILENT_MODE"
fi
log "Backup type: $BACKUP_TYPE" "$LOG_FILE" "$SILENT_MODE"
log "Compression level: $COMPRESSION_LEVEL" "$LOG_FILE" "$SILENT_MODE"
if [ "$PARALLEL_THREADS" -gt 1 ]; then
log "Using parallel compression with $PARALLEL_THREADS threads" "$LOG_FILE" "$SILENT_MODE"
fi
if [ "$SILENT_MODE" = true ]; then
log "Running in silent mode (non-interactive)" "$LOG_FILE" "$SILENT_MODE"
fi
if [ "$DRY_RUN" = true ]; then
log "Running in dry-run mode (no actual backups will be created)" "$LOG_FILE" "$SILENT_MODE"
echo -e "\n${YELLOW}DRY RUN MODE: Simulating backup operations without making changes${NC}"
fi
# Verify required tools are installed
check_required_tools tar gzip || handle_error 3 "Required tools not installed" "$LOG_FILE" "$SILENT_MODE"
# Check for pigz if parallel compression requested
if [ "$PARALLEL_THREADS" -gt 1 ] && ! command -v pigz >/dev/null 2>&1; then
log "Warning: pigz not found, parallel compression not available. Using standard compression." "$LOG_FILE" "$SILENT_MODE"
fi
# Get list of projects
projects=()
for dir in "${SOURCE_DIRS[@]}"; do
log "Searching for projects in: $dir" "$LOG_FILE" "$SILENT_MODE"
# Improve robustness for Quick Backup by setting a timeout
if [ "$QUICK_BACKUP" = true ]; then
# For --quick, use a timeout to prevent hanging
# Always include .ssh directory even though it's hidden
dir_project_output=$(run_with_timeout 30 find "$dir" -maxdepth 1 -mindepth 1 -type d \( -name ".ssh" -o -not -path "*/\.*" \) -not -path "*/node_modules*" | sort)
if [ -n "$dir_project_output" ]; then
dir_projects=()
while IFS= read -r line; do dir_projects+=("$line"); done <<< "$dir_project_output"
log "Found ${#dir_projects[@]} projects in $dir" "$LOG_FILE" "$SILENT_MODE"
projects+=("${dir_projects[@]}")
else
log "No projects found in $dir (or search timed out)" "$LOG_FILE" "$SILENT_MODE"
fi
else
# Standard approach for normal backups (portable: no process substitution for Bash 3.2)
dir_projects=()
while IFS= read -r line; do dir_projects+=("$line"); done < <(find_projects "$dir" 1)
log "Found ${#dir_projects[@]} projects in $dir" "$LOG_FILE" "$SILENT_MODE"
projects+=("${dir_projects[@]}")
fi
done
# Always ensure .ssh directory is included if backing up home directory
for dir in "${SOURCE_DIRS[@]}"; do
if [ "$dir" = "$HOME" ] && [ -d "$HOME/.ssh" ]; then
# Check if .ssh is already in projects list
ssh_found=false
for proj in "${projects[@]}"; do
if [ "$proj" = "$HOME/.ssh" ]; then
ssh_found=true
break
fi
done
# Add .ssh if not already present
if [ "$ssh_found" = false ]; then
projects+=("$HOME/.ssh")
log "Added mandatory .ssh directory to backup list" "$LOG_FILE" "$SILENT_MODE"
fi
fi
done
if [ ${#projects[@]} -eq 0 ]; then
handle_error 2 "No projects found in ${SOURCE_DIRS[*]}" "$LOG_FILE" "$SILENT_MODE"
fi
# Create a temporary file for excluded projects
EXCLUDE_FILE=$(mktemp)
# Handle project selection based on mode
log "Found ${#projects[@]} projects in ${SOURCE_DIRS[*]}" "$LOG_FILE" "$SILENT_MODE"
# Determine if we should use interactive project selection
# Quick backup option should backup all projects without prompt, just like silent mode
if [ "$SILENT_MODE" = false ]; then
# Interactive mode - show project list and ask for exclusions
echo -e "\n${CYAN}===== WebDev Backup Tool =====${NC}"
echo -e "${CYAN}Started at: $(date)${NC}\n"
# Extract just the project names from full paths
project_names=()
for project_path in "${projects[@]}"; do
project_names+=($(basename "$project_path"))
done
# Interactive project selection - show project list and ask for exclusions
echo "Projects to backup (all selected by default):"
for ((i=0; i<${#project_names[@]}; i++)); do
echo "[$i] ${project_names[$i]}"
done
echo -e "\nTo exclude projects from backup, enter their numbers separated by spaces."
echo "Press Enter to backup all projects."
read -p "> " response
for num in $response; do
if [[ "$num" =~ ^[0-9]+$ ]] && [ "$num" -lt "${#project_names[@]}" ]; then
log "Excluding project: ${project_names[$num]}" "$LOG_FILE" "$SILENT_MODE"
echo "${project_names[$num]}" >> "$EXCLUDE_FILE"
else
log "Warning: Invalid project number: $num" "$LOG_FILE" "$SILENT_MODE"
echo -e "${YELLOW}Warning: Invalid project number: $num${NC}"
fi
done
else
# Silent mode or Quick backup - backup everything, no interaction
log "Silent mode or Quick backup: Backing up all projects" "$LOG_FILE" "$SILENT_MODE"
fi
# Get excluded projects list
EXCLUDED_PROJECTS=()
while IFS= read -r line; do
EXCLUDED_PROJECTS+=("$line")
done < "$EXCLUDE_FILE"
# Prepare for backup
log "Starting backup process..." "$LOG_FILE" "$SILENT_MODE"
# Initialize dashboard when showing progress (interactive or --quick)
if [ "$SHOW_PROGRESS" = true ]; then
print_dashboard_header
# Add a note about compression and backup type
echo -e "Backup Type: $(capitalize "$BACKUP_TYPE")"
echo -e "Compression Level: $COMPRESSION_LEVEL"
# Show storage information and path in a more visible way
echo -e "${YELLOW}=============================================${NC}"
if [ "$EXTERNAL_BACKUP" = true ]; then
echo -e "${CYAN}STORAGE TYPE: CLOUD ($CLOUD_PROVIDER)${NC}"
echo -e "${CYAN}BACKUP PATH: Cloud provider: $CLOUD_PROVIDER${NC}"
else
if [ "$BACKUP_DIR" = "$DEFAULT_BACKUP_DIR" ]; then
echo -e "${GREEN}STORAGE TYPE: LOCAL PROJECT STORAGE (Default)${NC}"
else
echo -e "${GREEN}STORAGE TYPE: EXTERNAL VOLUME STORAGE${NC}"
fi
echo -e "${GREEN}BACKUP PATH: $FULL_BACKUP_PATH${NC}"
fi
echo -e "${YELLOW}=============================================${NC}"
echo -e "Note: Each project's node_modules directory will be excluded"
# Ask about verification if not already set via command line (default from config)
if [ "$SILENT_MODE" = false ] && [ "$VERIFY_EXPLICITLY_SET" = false ]; then
echo ""
echo -e "${CYAN}Backup Verification:${NC}"
if [ "${DEFAULT_VERIFY_BACKUP}" = true ]; then
echo -e " Verification is ${GREEN}ON${NC} by default (recommended)"
read -p " Enable verification? [Y/n]: " verify_choice
if [[ "$verify_choice" =~ ^[Nn]$ ]]; then
VERIFY_BACKUP=false
echo -e " ${YELLOW}Verification disabled for this run${NC}"
else
VERIFY_BACKUP=true
echo -e " ${GREEN}Verification enabled${NC}"
fi
else
echo -e " Verification is ${YELLOW}OFF${NC} by default (faster backups)"
read -p " Enable verification? [y/N]: " verify_choice
if [[ "$verify_choice" =~ ^[Yy]$ ]]; then
VERIFY_BACKUP=true
echo -e " ${GREEN}Verification enabled${NC}"
else
VERIFY_BACKUP=false
echo -e " ${YELLOW}Verification disabled${NC}"
fi
fi
elif [ "$VERIFY_BACKUP" = true ]; then
echo -e "${GREEN}Backup verification will be performed after completion${NC}"
else
echo -e "${YELLOW}Backup verification is disabled (use --verify to enable)${NC}"
fi
echo ""
# Update backup name if verification status changed
# Use BACKUP_PREFIX from config.sh for OS-agnostic naming
if [ "$VERIFY_BACKUP" = true ]; then
BACKUP_NAME="${BACKUP_PREFIX}_VERIFIED_$DATE"
else
BACKUP_NAME="${BACKUP_PREFIX}_$DATE"
fi
FULL_BACKUP_PATH="$BACKUP_DIR/$BACKUP_NAME"
LOG_FILE="$FULL_BACKUP_PATH/backup_log.log"
STATS_FILE="$FULL_BACKUP_PATH/backup_stats.txt"
METADATA_FILE="$FULL_BACKUP_PATH/backup_metadata.json"
fi
# Create backup directory and files only when not in dry-run (dry-run must not write anything)
if [ "$DRY_RUN" != true ]; then
if ! mkdir -p "$FULL_BACKUP_PATH"; then
echo -e "${RED}ERROR: Failed to create backup directory: $FULL_BACKUP_PATH${NC}"
echo "No files were backed up. Please check directory permissions."
exit 1
fi
for file in "$LOG_FILE" "$STATS_FILE" "$METADATA_FILE"; do
if ! touch "$file"; then
echo -e "${RED}ERROR: Failed to create file: $file${NC}"
echo "No files were backed up. The filesystem may be read-only or full."
exit 1
fi
done
fi
# Track total sizes
TOTAL_SRC_SIZE=0
TOTAL_BACKUP_SIZE=0
SUCCESSFUL_PROJECTS=0
FAILED_PROJECTS=0
# Process each project
for project_path in "${projects[@]}"; do
project=$(basename "$project_path")
# Skip excluded projects, BUT always include .ssh (mandatory)
if [[ " ${EXCLUDED_PROJECTS[*]} " == *" $project "* ]] && [[ "$project" != ".ssh" ]]; then
log "Skipping excluded project: $project" "$LOG_FILE" "$SILENT_MODE"
continue
fi
# Ensure .ssh is always backed up (mandatory)
if [[ "$project" == ".ssh" ]]; then
log "Backing up mandatory .ssh directory: $project_path" "$LOG_FILE" "$SILENT_MODE"
fi
PROJECT_BACKUP_FILE="$FULL_BACKUP_PATH/${project}_${DATE}.tar.gz"
# Get source project size (excluding node_modules)
PROJECT_SRC_SIZE=$(get_directory_size "$project_path" "node_modules")
TOTAL_SRC_SIZE=$((TOTAL_SRC_SIZE + PROJECT_SRC_SIZE))
# Format size for display
FORMATTED_SRC_SIZE=$(format_size "$PROJECT_SRC_SIZE")
log "Processing project: $project (Size: $FORMATTED_SRC_SIZE)" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
print_dashboard_row "$project" "$FORMATTED_SRC_SIZE" "COMPRESSING..."
fi
# Determine backup type and execute (with live spinner when showing progress)
SPIN_PID=""
if [ "$DRY_RUN" = true ]; then
# Simulate backup for dry run
log "DRY RUN: Would create backup of $project to $PROJECT_BACKUP_FILE" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
echo -e "${YELLOW}DRY RUN: Would create backup of $project (Size: $FORMATTED_SRC_SIZE)${NC}"
fi
# Simulate success
success=true
# Use a random but reasonable compression ratio for simulation
RATIO=$(awk "BEGIN {printf \"%.1f\", 2.0 + rand()}")
ARCHIVE_SIZE=$(awk "BEGIN {printf \"%.0f\", $PROJECT_SRC_SIZE / $RATIO}")
elif [ "$INCREMENTAL_BACKUP" = true ]; then
# Find the latest snapshot if any
SNAPSHOT_DIR="$BACKUP_DIR/snapshots"
mkdir -p "$SNAPSHOT_DIR"
SNAPSHOT_FILE="$SNAPSHOT_DIR/${project}_snapshot.snar"
if [ "$SHOW_PROGRESS" = true ] && [ "$SILENT_MODE" = true ]; then
( spinner_loop "$project" "$FORMATTED_SRC_SIZE" "COMPRESSING" ) &
SPIN_PID=$!
fi
if create_incremental_backup \
"$(dirname "$project_path")" \
"$project" \
"$PROJECT_BACKUP_FILE" \
"$SNAPSHOT_FILE" \
"$LOG_FILE" \
"$COMPRESSION_LEVEL"; then
success=true
else
success=false
fi
if [ -n "$SPIN_PID" ]; then stop_backup_spinner "$SPIN_PID"; fi
elif [ "$DIFFERENTIAL_BACKUP" = true ]; then
# Find the base snapshot if any
SNAPSHOT_DIR="$BACKUP_DIR/snapshots"
mkdir -p "$SNAPSHOT_DIR"
BASE_SNAPSHOT="$SNAPSHOT_DIR/${project}_base_snapshot.snar"
if [ "$SHOW_PROGRESS" = true ] && [ "$SILENT_MODE" = true ]; then
( spinner_loop "$project" "$FORMATTED_SRC_SIZE" "COMPRESSING" ) &
SPIN_PID=$!
fi
if create_differential_backup \
"$(dirname "$project_path")" \
"$project" \
"$PROJECT_BACKUP_FILE" \
"$BASE_SNAPSHOT" \
"$LOG_FILE" \
"$COMPRESSION_LEVEL"; then
success=true
else
success=false
fi
if [ -n "$SPIN_PID" ]; then stop_backup_spinner "$SPIN_PID"; fi
else
# Create standard full backup (spinner only when silent—interactive uses monitor_file_progress in fs.sh)
if [ "$SHOW_PROGRESS" = true ] && [ "$SILENT_MODE" = true ]; then
( spinner_loop "$project" "$FORMATTED_SRC_SIZE" "COMPRESSING" ) &
SPIN_PID=$!
fi
if create_backup_archive \
"$(dirname "$project_path")" \
"$project" \
"$PROJECT_BACKUP_FILE" \
"$LOG_FILE" \
"$COMPRESSION_LEVEL" \
"*/node_modules/*" \
"$PARALLEL_THREADS" \
"$SILENT_MODE"; then
success=true
else
success=false
fi
if [ -n "$SPIN_PID" ]; then stop_backup_spinner "$SPIN_PID"; fi
fi
# Process the result
if [ "$success" = true ]; then
# Get archive size (unless we're in dry-run mode, where we already set it)
if [ "$DRY_RUN" != true ]; then
ARCHIVE_SIZE=$(get_file_size_bytes "$PROJECT_BACKUP_FILE")
fi
FORMATTED_ARCHIVE_SIZE=$(format_size "$ARCHIVE_SIZE")
TOTAL_BACKUP_SIZE=$((TOTAL_BACKUP_SIZE + ARCHIVE_SIZE))
# Calculate compression ratio (safely)
if [ "$ARCHIVE_SIZE" -gt 0 ] && [ "$PROJECT_SRC_SIZE" -gt 0 ]; then
RATIO=$(awk "BEGIN {printf \"%.1f\", ($PROJECT_SRC_SIZE/$ARCHIVE_SIZE)}")
else
RATIO="1.0"
fi
log "Project $project backed up successfully (Compressed: $FORMATTED_ARCHIVE_SIZE, Ratio: ${RATIO}x)" "$LOG_FILE" "$SILENT_MODE"
# Generate structure file, stats, verification, and cloud upload only when not in dry-run
if [ "$DRY_RUN" != true ]; then
# Generate ASCII file structure for the project
# Store structure files inside the backup folder, not in root backup directory
PROJECT_STRUCTURE_FILE="${FULL_BACKUP_PATH}/${project}_structure.txt"
mkdir -p "${FULL_BACKUP_PATH}/structures"
# Create ASCII tree structure using find and a custom script
{
echo "Structure of $project ($project_path):"
echo "----------------------------------------"
find "$project_path" -type d -o -type f | sort | while read -r path; do
# Skip node_modules and most hidden files/dirs for clarity, but include .env files
if [[ "$path" == *"node_modules"* ]]; then
continue
fi
# Skip hidden files/dirs except .env files
if [[ "$(basename "$path")" == .* ]] && [[ "$(basename "$path")" != ".env"* ]]; then
continue
fi
# Calculate depth and render ASCII tree
rel_path="${path#$project_path/}"
if [ "$path" = "$project_path" ]; then
echo "$project/"
continue
fi
depth=$(echo "$rel_path" | tr -cd '/' | wc -c)
prefix=""
for ((i=0; i<depth; i++)); do
prefix="${prefix}│ "
done
# Check if this is the last item at its level
is_last=false
parent_dir=$(dirname "$path")
if [[ "$(find "$parent_dir" -mindepth 1 | sort | tail -n1)" == "$path" ]]; then
is_last=true
fi
# Replace the last "│ " with "└── " or "├── " based on whether it's the last item
if [ "$depth" -gt 0 ]; then
if [ "$is_last" = true ]; then
prefix="${prefix%│ }└── "
else
prefix="${prefix%│ }├── "
fi
fi
# Display file or directory
filename=$(basename "$path")
if [ -d "$path" ]; then
echo "$prefix$filename/"
else
echo "$prefix$filename"
fi
done
} > "$PROJECT_STRUCTURE_FILE" 2>/dev/null
# Add to stats file with full project path and structure file location
echo "$project,$project_path,$PROJECT_SRC_SIZE,$ARCHIVE_SIZE,$RATIO,$PROJECT_STRUCTURE_FILE" >> "$STATS_FILE"
# Verify backup if requested
if [ "$VERIFY_BACKUP" = true ]; then
# Start verification with appropriate level of checking
log "Starting backup verification for $project" "$LOG_FILE" "$SILENT_MODE"
VERIFY_SPIN_PID=""
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "VERIFYING..."
( spinner_loop "$project" "$FORMATTED_ARCHIVE_SIZE" "VERIFYING" ) &
VERIFY_SPIN_PID=$!
fi
# If thorough verification was requested, we'll do a more comprehensive check
if verify_backup "$PROJECT_BACKUP_FILE" "$LOG_FILE" "$SILENT_MODE" "$THOROUGH_VERIFY"; then
if [ -n "$VERIFY_SPIN_PID" ]; then stop_backup_spinner "$VERIFY_SPIN_PID"; fi
if [ "$THOROUGH_VERIFY" = true ]; then
log "Thorough backup verification passed for $project" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "✓ FULLY VERIFIED (${RATIO}x)"
fi
else
log "Backup verification passed for $project" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "✓ VERIFIED (${RATIO}x)"
fi
fi
else
if [ -n "$VERIFY_SPIN_PID" ]; then stop_backup_spinner "$VERIFY_SPIN_PID"; fi
log "Backup verification FAILED for $project" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "⚠ VERIFY FAILED"
fi
FAILED_PROJECTS=$((FAILED_PROJECTS + 1))
continue
fi
elif [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "✓ DONE (${RATIO}x)"
fi
# Upload to cloud if requested
if [ -n "$CLOUD_PROVIDER" ]; then
UPLOAD_SPIN_PID=""
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "UPLOADING..."
( spinner_loop "$project" "$FORMATTED_ARCHIVE_SIZE" "UPLOADING" ) &
UPLOAD_SPIN_PID=$!
fi
if upload_to_cloud "$PROJECT_BACKUP_FILE" "$CLOUD_PROVIDER" "$LOG_FILE" "$BANDWIDTH_LIMIT" "$SILENT_MODE"; then
if [ -n "$UPLOAD_SPIN_PID" ]; then stop_backup_spinner "$UPLOAD_SPIN_PID"; fi
log "Project $project uploaded to $CLOUD_PROVIDER" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "✓ UPLOADED (${RATIO}x)"
fi
else
if [ -n "$UPLOAD_SPIN_PID" ]; then stop_backup_spinner "$UPLOAD_SPIN_PID"; fi
log "Failed to upload project $project to $CLOUD_PROVIDER" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_ARCHIVE_SIZE" "⚠ UPLOAD FAILED"
fi
fi
fi
fi
SUCCESSFUL_PROJECTS=$((SUCCESSFUL_PROJECTS + 1))
else
# Record detailed error information
error_details="Failed to back up project: $project (Path: $PROJECT_BACKUP_FILE)"
log "$error_details" "$LOG_FILE" "$SILENT_MODE"
if [ "$SHOW_PROGRESS" = true ]; then
printf "\033[1A"
print_dashboard_row "$project" "$FORMATTED_SRC_SIZE" "❌ FAILED"
# Show error details for interactive mode
echo -e "${RED}ERROR: Backup failed for $project${NC}"
echo -e "${YELLOW}File: $PROJECT_BACKUP_FILE${NC}"
fi
# Create a record of failed backups for easier troubleshooting (skip in dry-run)
if [ "$DRY_RUN" != true ]; then
local failed_log="${LOGS_DIR}/failed_backups.log"
mkdir -p "$(dirname "$failed_log")"
echo "$(date '+%Y-%m-%d %H:%M:%S') - FAILED BACKUP: $project" >> "$failed_log"
echo " Source: $PROJECT_SRC_PATH" >> "$failed_log"
echo " Target: $PROJECT_BACKUP_FILE" >> "$failed_log"
echo " Backup Time: $(date)" >> "$failed_log"
echo "--------------------------------------------------" >> "$failed_log"
fi
FAILED_PROJECTS=$((FAILED_PROJECTS + 1))
fi
done
# Record end time
END_TIME=$(date '+%Y-%m-%d %H:%M:%S')
# Also record the end time in seconds since epoch for duration calculation
END_TIME_SECONDS=$(date +%s)
# Calculate duration in seconds
DURATION_SECONDS=$((END_TIME_SECONDS - START_TIME_SECONDS))
# Format duration in human-readable form
DURATION_FORMATTED=$(printf "%02d:%02d:%02d" $((DURATION_SECONDS/3600)) $((DURATION_SECONDS%3600/60)) $((DURATION_SECONDS%60)))
# Format total size
TOTAL_FORMATTED_SIZE=$(format_size "$TOTAL_BACKUP_SIZE")
# Create metadata file only when not in dry-run
if [ "$DRY_RUN" != true ]; then
cat > "$METADATA_FILE" << EOF
{
"backup_type": "$BACKUP_TYPE",
"backup_date": "$DATE",
"start_time": "$START_TIME",
"end_time": "$END_TIME",
"source_directory": "${SOURCE_DIRS[*]}",
"backup_directory": "$FULL_BACKUP_PATH",
"storage_type": "$([ "$EXTERNAL_BACKUP" = true ] && echo "external" || echo "internal")",
"cloud_provider": "${CLOUD_PROVIDER}",
"compression_level": $COMPRESSION_LEVEL,
"parallel_threads": $PARALLEL_THREADS,
"projects_total": ${#projects[@]},
"projects_successful": $SUCCESSFUL_PROJECTS,
"projects_failed": $FAILED_PROJECTS,
"source_size_bytes": $TOTAL_SRC_SIZE,
"backup_size_bytes": $TOTAL_BACKUP_SIZE,
"verified": $VERIFY_BACKUP,
"thorough_verification": $THOROUGH_VERIFY,
"dry_run": $DRY_RUN
}
EOF
fi
# Display summary when showing progress (interactive or --quick)
if [ "$SHOW_PROGRESS" = true ]; then
print_dashboard_footer "$TOTAL_FORMATTED_SIZE"
# Calculate overall ratio (use awk -v for macOS BSD awk compatibility)
if [ "$TOTAL_BACKUP_SIZE" -gt 0 ] && [ "$TOTAL_SRC_SIZE" -gt 0 ]; then
OVERALL_RATIO=$(awk -v s="$TOTAL_SRC_SIZE" -v b="$TOTAL_BACKUP_SIZE" 'BEGIN { printf "%.1f", s/b }')
else
OVERALL_RATIO="1.0"
fi
display_backup_summary \
"$SUCCESSFUL_PROJECTS" \
"$FAILED_PROJECTS" \
"$TOTAL_SRC_SIZE" \
"$TOTAL_BACKUP_SIZE" \
"$FULL_BACKUP_PATH" \
"$EXTERNAL_BACKUP" \
"$CLOUD_PROVIDER" \
"$START_TIME" \
"$END_TIME" \
"$DURATION_SECONDS" \
"$DURATION_FORMATTED"
fi
# At the end of the process, also display the backup path clearly
if [ "$SILENT_MODE" = false ]; then
# After the summary, emphasize the backup location
echo -e "\n${YELLOW}=============================================${NC}"
echo -e "${GREEN}BACKUP COMPLETE${NC}"
if [ "$EXTERNAL_BACKUP" = true ]; then
echo -e "${CYAN}STORAGE TYPE: CLOUD ($CLOUD_PROVIDER)${NC}"
else
if [ "$BACKUP_DIR" = "$DEFAULT_BACKUP_DIR" ]; then
echo -e "${GREEN}STORAGE TYPE: LOCAL PROJECT STORAGE (Default)${NC}"
else
echo -e "${GREEN}STORAGE TYPE: EXTERNAL VOLUME STORAGE${NC}"
fi
fi
echo -e "${GREEN}BACKUP PATH: $FULL_BACKUP_PATH${NC}"
echo -e "${YELLOW}TIME INFORMATION:${NC}"
echo -e " Started: $START_TIME"
echo -e " Finished: $END_TIME"
echo -e " Duration: $DURATION_FORMATTED ($DURATION_SECONDS seconds)"
echo -e "${YELLOW}=============================================${NC}"
fi
# Generate HTML report (skip in dry-run). Create when interactive, email requested, or when showing progress (e.g. --quick) so "view report" can be offered.
if [ "$DRY_RUN" != true ] && { [ "$SILENT_MODE" = false ] || [ -n "$EMAIL_NOTIFICATION" ] || [ "$SHOW_PROGRESS" = true ]; }; then
REPORT_FILE=$(create_backup_report \
"$FULL_BACKUP_PATH" \
"$SUCCESSFUL_PROJECTS" \
"$FAILED_PROJECTS" \
"$TOTAL_SRC_SIZE" \
"$TOTAL_BACKUP_SIZE" \
"$START_TIME" \
"$END_TIME" \
"$BACKUP_TYPE")
if [ "$SILENT_MODE" = false ]; then
echo -e "Detailed report saved to: $REPORT_FILE"
fi
fi
# Send email notification if requested (skip in dry-run)
if [ "$DRY_RUN" != true ] && [ -n "$EMAIL_NOTIFICATION" ]; then
log "Sending email notification to $EMAIL_NOTIFICATION" "$LOG_FILE" "$SILENT_MODE"
EMAIL_SUBJECT="WebDev Backup Report - $BACKUP_TYPE backup $(date '+%Y-%m-%d')"
EMAIL_BODY=$(create_email_report \
"$FULL_BACKUP_PATH" \
"$SUCCESSFUL_PROJECTS" \
"$FAILED_PROJECTS" \
"$TOTAL_SRC_SIZE" \
"$TOTAL_BACKUP_SIZE" \
"$START_TIME" \
"$END_TIME" \
"$BACKUP_TYPE")
if send_email_notification "$EMAIL_SUBJECT" "$EMAIL_BODY" "$EMAIL_NOTIFICATION" "$REPORT_FILE"; then
log "Email notification sent successfully" "$LOG_FILE" "$SILENT_MODE"
if [ "$SILENT_MODE" = false ]; then
echo -e "${GREEN}✓ Email notification sent to $EMAIL_NOTIFICATION${NC}"
fi
else
log "Failed to send email notification" "$LOG_FILE" "$SILENT_MODE"
if [ "$SILENT_MODE" = false ]; then
echo -e "${RED}Failed to send email notification${NC}"
fi
fi
fi
# Generate backup history chart (skip in dry-run)
if [ "$DRY_RUN" != true ] && [ "$SILENT_MODE" = false ]; then
if command -v gnuplot >/dev/null 2>&1; then
HISTORY_CHART=$(generate_history_chart "$BACKUP_HISTORY_LOG" "$FULL_BACKUP_PATH/backup_history_chart.png" 10)
if [ -n "$HISTORY_CHART" ]; then
echo -e "Backup history chart saved to: $HISTORY_CHART"
fi
fi
fi
# Update dashboard if not in silent mode (skip in dry-run)
if [ "$DRY_RUN" != true ] && [ "$SILENT_MODE" = false ] && command -v gnuplot >/dev/null 2>&1; then
DASHBOARD_FILE=$(create_visual_dashboard "$FULL_BACKUP_PATH" "$BACKUP_HISTORY_LOG")
if [ -n "$DASHBOARD_FILE" ]; then
echo -e "Visual dashboard available at: $DASHBOARD_FILE"
fi
fi
# Cleanup
rm -f "$EXCLUDE_FILE"
# Clean up any structure files or other files in backup directory (skip in dry-run; we didn't create anything)
if [ "$DRY_RUN" != true ] && [ -d "$BACKUP_DIR" ]; then
find "$BACKUP_DIR" -maxdepth 1 -name "*_structure.txt" -type f -delete 2>/dev/null
if [ -d "$BACKUP_DIR/structures" ]; then
rm -rf "$BACKUP_DIR/structures" 2>/dev/null
fi
find "$BACKUP_DIR" -maxdepth 1 -type f \( -name "*.txt" -o -name "*.log" -o -name "*.json" \) ! -name ".*" -delete 2>/dev/null
fi
# Add backup record to history log only when not in dry-run
if [ "$DRY_RUN" != true ]; then
# Create logs directory if it doesn't exist
mkdir -p "$(dirname "$BACKUP_HISTORY_LOG")"
# Add backup record to history log (in reverse chronological order)
BACKUP_ENTRY="$(date '+%Y-%m-%d %H:%M:%S') - BACKUP: "
if [ "$FAILED_PROJECTS" -eq 0 ]; then
BACKUP_ENTRY+="SUCCESS\n"
else
BACKUP_ENTRY+="PARTIAL (WITH ERRORS)\n"
fi
BACKUP_ENTRY+=" Type: $(capitalize "$BACKUP_TYPE")\n"
BACKUP_ENTRY+=" Storage: $([ "$EXTERNAL_BACKUP" = true ] && echo "EXTERNAL (${CLOUD_PROVIDER})" || echo "INTERNAL")\n"
BACKUP_ENTRY+=" Projects: ${SUCCESSFUL_PROJECTS} succeeded, ${FAILED_PROJECTS} failed\n"
BACKUP_ENTRY+=" Total Size: ${TOTAL_FORMATTED_SIZE}\n"
BACKUP_ENTRY+=" Source: ${SOURCE_DIRS[*]}\n"
BACKUP_ENTRY+=" Destination: ${FULL_BACKUP_PATH}\n"
BACKUP_ENTRY+=" Started: ${START_TIME}\n"
BACKUP_ENTRY+=" Finished: ${END_TIME}\n"
BACKUP_ENTRY+=" Duration: ${DURATION_FORMATTED} (${DURATION_SECONDS} seconds)\n"
# Add more details for failed backups
if [ "$FAILED_PROJECTS" -gt 0 ]; then
BACKUP_ENTRY+=" Failed Projects Log: ${LOGS_DIR}/failed_backups.log\n"
# Add a summary of failed projects if available
if [ -f "${LOGS_DIR}/failed_backups.log" ]; then
BACKUP_ENTRY+=" Failed Projects Summary:\n"
RECENT_FAILURES=$(grep "FAILED BACKUP:" "${LOGS_DIR}/failed_backups.log" | tail -${FAILED_PROJECTS} | sed 's/.*FAILED BACKUP: / - /')
BACKUP_ENTRY+="$RECENT_FAILURES\n"
fi
fi
BACKUP_ENTRY+="--------------------------------------------------\n\n"
# Update history log in reverse chronological order
if [ -f "$BACKUP_HISTORY_LOG" ]; then
# Read existing log and prepend new entry
TEMP_LOG=$(mktemp)
echo -e "$BACKUP_ENTRY" > "$TEMP_LOG"
cat "$BACKUP_HISTORY_LOG" >> "$TEMP_LOG"
mv "$TEMP_LOG" "$BACKUP_HISTORY_LOG"
else
# Create new log
echo -e "$BACKUP_ENTRY" > "$BACKUP_HISTORY_LOG"
fi
log "Backup record added to history log at $BACKUP_HISTORY_LOG" "$LOG_FILE" "$SILENT_MODE"
fi
# Final status when no progress was shown (silent, not --quick)
if [ "$SILENT_MODE" = true ] && [ "$SHOW_PROGRESS" = false ]; then
if [ "$DRY_RUN" = true ]; then
echo "DRY RUN COMPLETED: Would backup $SUCCESSFUL_PROJECTS projects, Estimated size $TOTAL_FORMATTED_SIZE"
echo "Source: ${SOURCE_DIRS[*]}"
echo "Destination: $FULL_BACKUP_PATH"
elif [ $FAILED_PROJECTS -eq 0 ]; then
echo "BACKUP SUCCESSFUL: $SUCCESSFUL_PROJECTS projects, Size $TOTAL_FORMATTED_SIZE"
echo "Source: ${SOURCE_DIRS[*]}"
echo "Destination: $FULL_BACKUP_PATH"
else
echo "BACKUP COMPLETED WITH ERRORS: $FAILED_PROJECTS failed, $SUCCESSFUL_PROJECTS succeeded"
echo "Source: ${SOURCE_DIRS[*]}"
echo "Destination: $FULL_BACKUP_PATH"
echo "Failed projects log: ${LOGS_DIR}/failed_backups.log"
# List recent failed backups
if [ -f "${LOGS_DIR}/failed_backups.log" ]; then
echo "Recent failures:"
tail -n 20 "${LOGS_DIR}/failed_backups.log" | grep "FAILED BACKUP:" | cut -d':' -f2-
fi
fi
else
if [ "$DRY_RUN" = true ]; then
echo -e "\n${YELLOW}DRY RUN COMPLETED: No actual backups were created${NC}"
echo -e "${YELLOW}Would have backed up $SUCCESSFUL_PROJECTS projects, Estimated size $TOTAL_FORMATTED_SIZE${NC}"
echo -e "${YELLOW}Source: ${GREEN}${SOURCE_DIRS[*]}${NC}"
echo -e "${YELLOW}Destination: ${GREEN}$FULL_BACKUP_PATH${NC}"
elif [ $FAILED_PROJECTS -gt 0 ]; then
echo -e "\n${RED}BACKUP COMPLETED WITH ERRORS: $FAILED_PROJECTS failed, $SUCCESSFUL_PROJECTS succeeded${NC}"
echo -e "${YELLOW}Path details:${NC}"
echo -e " Source: ${GREEN}${SOURCE_DIRS[*]}${NC}"
echo -e " Destination: ${GREEN}$FULL_BACKUP_PATH${NC}"
echo -e " Failed projects log: ${GREEN}${LOGS_DIR}/failed_backups.log${NC}"
# Show failed backups details directly in the output
if [ -f "${LOGS_DIR}/failed_backups.log" ]; then
echo -e "\n${RED}Recent failures:${NC}"
tail -n 6 "${LOGS_DIR}/failed_backups.log" | grep -A3 "FAILED BACKUP:" | head -6 | sed 's/^/ /'
echo -e "\n${YELLOW}For complete list of failures, check:${NC}"
echo -e " ${GREEN}less ${LOGS_DIR}/failed_backups.log${NC}"
fi
fi
echo -e "${CYAN}Finished at: $(date)${NC}\n"
# Ask if the user wants to view the report in browser (default Y)
if [ "$DRY_RUN" != true ] && [ -n "$REPORT_FILE" ] && [ -f "$REPORT_FILE" ]; then
echo -e "\n${YELLOW}Would you like to view the backup report in your browser?${NC}"
if safe_confirm "Open report in browser?" "y"; then
echo -e "${GREEN}Opening backup report in browser...${NC}"
if open_in_browser "$REPORT_FILE"; then
echo -e "${GREEN}Report opened in browser.${NC}"
else
echo -e "${YELLOW}Could not open browser automatically. You can find the report here:${NC}"
echo -e "${GREEN}$REPORT_FILE${NC}"
fi
else
echo -e "${YELLOW}Report not opened. You can find it here:${NC}"
echo -e "${GREEN}$REPORT_FILE${NC}"
fi
fi
# Exit gracefully
echo -e "\n${GREEN}Backup operation completed. Thanks for using WebDev Backup Tool!${NC}"
fi
exit 0