Skip to content

Commit a6b19ce

Browse files
Update exoscale_backups.md
1 parent a68ba87 commit a6b19ce

File tree

1 file changed

+160
-18
lines changed

1 file changed

+160
-18
lines changed

sphinx/server_management/exoscale_backups.md

Lines changed: 160 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -41,31 +41,83 @@ Now that this is configured, we can use a script to backup the databases nightly
4141
```sh
4242
#!/bin/bash
4343

44-
# Get the current date in YYYY_mm_dd format
45-
current_date=$(date +"%Y_%m_%d")
44+
# This script is used to clean up old backups in an S3 bucket based on a retention policy.
45+
# The retention policy is as follows:
46+
#
47+
# - Backups less than a week old are kept.
48+
# - Friday backups within the last month are kept.
49+
# - The first Friday backup of each month is kept forever.
50+
# - All other backups are deleted.
4651

47-
# Create the date directory in the backup bucket
48-
bucket_dir="s3://swiss-backups/${current_date}"
52+
# Set the S3 bucket directory
53+
bucket_dir="s3://swiss-backups/"
4954

50-
# Fetch the list of databases dynamically
51-
databases=$(mysql -h 0.0.0.0 -P 3306 -uroot -p"$MYSQL_ROOT_PASSWORD" -e 'SHOW DATABASES;' | grep -Ev '^(Database|information_schema|performance_schema|mysql|sys)$')
55+
# Get the current date in YYYY-MM-DD format
56+
current_date=$(date +"%Y-%m-%d")
5257

53-
# Loop through each database and perform the backup
54-
for db in $databases; do
55-
# Create the backup file name for the upload
56-
backup_file_name="${db}_${current_date}.sql.gz"
58+
# Convert the current date to a timestamp
59+
current_timestamp=$(date -d "$current_date" +%s)
5760

58-
echo "Backing up database: $db"
61+
# Array to keep track of the first Friday of each month
62+
declare -A first_friday_backups
5963

60-
# Perform the mysqldump and upload directly to the Exoscale bucket
61-
mysqldump --max-allowed-packet=2G --single-transaction -h 0.0.0.0 -P 3306 -uroot -p"$MYSQL_ROOT_PASSWORD" "$db" | gzip | s3cmd put - "${bucket_dir}/${backup_file_name}"
64+
# Loop through each backup directory in the S3 bucket
65+
for backup_dir in $(s3cmd ls "$bucket_dir" | awk '{print $2}' | sed 's/\/$//'); do
66+
# Extract the date from the directory name
67+
backup_date=$(basename "$backup_dir")
6268

63-
# Check if the upload was successful
64-
if [ $? -eq 0 ]; then
65-
echo "Backup of $db completed and uploaded successfully to $bucket_dir."
66-
else
67-
echo "Backup of $db failed or upload failed." >&2
69+
# Convert the backup date to a timestamp
70+
# Assuming the backup date format is YYYY-MM-DD or YYYY_MM_DD
71+
backup_date_formatted=${backup_date//_/ } # Replace underscores with spaces
72+
backup_date_formatted=${backup_date_formatted// /-} # Replace spaces with hyphens
73+
backup_timestamp=$(date -d "$backup_date_formatted" +%s 2>/dev/null)
74+
75+
# Check if the timestamp was successfully created
76+
if [ -z "$backup_timestamp" ]; then
77+
echo "Skipping invalid date format for backup directory: $backup_dir"
78+
continue
79+
fi
80+
81+
# Calculate the difference in days
82+
diff_days=$(( (current_timestamp - backup_timestamp) / 86400 ))
83+
84+
# Determine the day of the week (1=Monday, ..., 7=Sunday)
85+
day_of_week=$(date -d "$backup_date_formatted" +%u)
86+
87+
# Determine the month and year for the backup date
88+
backup_month=$(date -d "$backup_date_formatted" +%Y-%m)
89+
90+
# Check if the backup is less than a week old
91+
if [ "$diff_days" -lt 7 ]; then
92+
echo "Keeping backup: $backup_dir (less than a week old)"
93+
continue
94+
fi
95+
96+
# Check if it's a Friday backup (day_of_week == 5)
97+
if [ "$day_of_week" -eq 5 ]; then
98+
# If it's the first Friday of the month, keep it
99+
if [ -z "${first_friday_backups[$backup_month]}" ]; then
100+
first_friday_backups[$backup_month]="$backup_dir"
101+
echo "Keeping first Friday backup: $backup_dir"
102+
continue # Skip deletion since it's kept indefinitely
103+
else
104+
# If it's a Friday backup within the last month, keep it
105+
if [ "$diff_days" -lt 30 ]; then
106+
echo "Keeping Friday backup: $backup_dir (within the last month)"
107+
continue # Skip deletion since it's kept
108+
fi
109+
fi
68110
fi
111+
112+
# If none of the conditions are met, delete the backup
113+
echo "Deleting backup: $backup_dir (does not meet retention criteria)"
114+
s3cmd del "$backup_dir" --recursive
115+
done
116+
117+
# Final output of the first Friday backups kept indefinitely
118+
echo "First Friday backups kept indefinitely:"
119+
for backup in "${first_friday_backups[@]}"; do
120+
echo "$backup"
69121
done
70122
```
71123

@@ -125,4 +177,94 @@ ubuntu@sp7cloud-swiss-1:~/.backup$ s3cmd ls s3://swiss-backups/2024_08_14/
125177
2024-08-14 00:45 6450282 s3://swiss-backups/2024_08_14/nmb_rinvert_2024_08_14.sql.gz
126178
2024-08-14 00:45 22505447 s3://swiss-backups/2024_08_14/sp7demofish_swiss_2024_08_14.sql.gz
127179
ubuntu@sp7cloud-swiss-1:~/.backup$
180+
```
181+
182+
Then the backups are cleaned with another cron job:
183+
184+
```sh
185+
# m h dom mon dow command
186+
0 1 * * * /home/ubuntu/.backup/cleanup_script.sh
187+
```
188+
189+
```sh
190+
#!/bin/bash
191+
192+
# This script is used to clean up old backups in an S3 bucket based on a retention policy.
193+
# The retention policy is as follows:
194+
#
195+
# - Backups less than a week old are kept.
196+
# - Friday backups within the last month are kept.
197+
# - The first Friday backup of each month is kept forever.
198+
# - All other backups are deleted.
199+
200+
# Set the S3 bucket directory
201+
bucket_dir="s3://swiss-backups/"
202+
203+
# Get the current date in YYYY-MM-DD format
204+
current_date=$(date +"%Y-%m-%d")
205+
206+
# Convert the current date to a timestamp
207+
current_timestamp=$(date -d "$current_date" +%s)
208+
209+
# Array to keep track of the first Friday of each month
210+
declare -A first_friday_backups
211+
212+
# Loop through each backup directory in the S3 bucket
213+
for backup_dir in $(s3cmd ls "$bucket_dir" | awk '{print $2}' | sed 's/\/$//'); do
214+
# Extract the date from the directory name
215+
backup_date=$(basename "$backup_dir")
216+
217+
# Convert the backup date to a timestamp
218+
# Assuming the backup date format is YYYY-MM-DD or YYYY_MM_DD
219+
backup_date_formatted=${backup_date//_/ } # Replace underscores with spaces
220+
backup_date_formatted=${backup_date_formatted// /-} # Replace spaces with hyphens
221+
backup_timestamp=$(date -d "$backup_date_formatted" +%s 2>/dev/null)
222+
223+
# Check if the timestamp was successfully created
224+
if [ -z "$backup_timestamp" ]; then
225+
echo "Skipping invalid date format for backup directory: $backup_dir"
226+
continue
227+
fi
228+
229+
# Calculate the difference in days
230+
diff_days=$(( (current_timestamp - backup_timestamp) / 86400 ))
231+
232+
# Determine the day of the week (1=Monday, ..., 7=Sunday)
233+
day_of_week=$(date -d "$backup_date_formatted" +%u)
234+
235+
# Determine the month and year for the backup date
236+
backup_month=$(date -d "$backup_date_formatted" +%Y-%m)
237+
238+
# Check if the backup is less than a week old
239+
if [ "$diff_days" -lt 7 ]; then
240+
echo "Keeping backup: $backup_dir (less than a week old)"
241+
continue
242+
fi
243+
244+
# Check if it's a Friday backup (day_of_week == 5)
245+
if [ "$day_of_week" -eq 5 ]; then
246+
# If it's the first Friday of the month, keep it
247+
if [ -z "${first_friday_backups[$backup_month]}" ]; then
248+
first_friday_backups[$backup_month]="$backup_dir"
249+
echo "Keeping first Friday backup: $backup_dir"
250+
continue # Skip deletion since it's kept indefinitely
251+
else
252+
# If it's a Friday backup within the last month, keep it
253+
if [ "$diff_days" -lt 30 ]; then
254+
echo "Keeping Friday backup: $backup_dir (within the last month)"
255+
continue # Skip deletion since it's kept
256+
fi
257+
fi
258+
fi
259+
260+
# If none of the conditions are met, delete the backup
261+
echo "Deleting backup: $backup_dir (does not meet retention criteria)"
262+
s3cmd del "$backup_dir" --recursive
263+
done
264+
265+
# Final output of the first Friday backups kept indefinitely
266+
echo "First Friday backups kept indefinitely:"
267+
for backup in "${first_friday_backups[@]}"; do
268+
echo "$backup"
269+
done
128270
```

0 commit comments

Comments
 (0)