Skip to content

Commit

Permalink
Moving to functions
Browse files Browse the repository at this point in the history
  • Loading branch information
keithhubner committed May 2, 2024
1 parent bf1afd9 commit f75e0c7
Showing 1 changed file with 57 additions and 61 deletions.
118 changes: 57 additions & 61 deletions backup_script.sh
@@ -1,85 +1,81 @@
#!/bin/bash

function create_directorys() {
echo "Creating backup directory..."
mkdir -p "$APP_DIR"
mkdir -p "$APP_DIR/backups"
mkdir -p "$APP_DIR/logs"
}

create_directorys 2>&1 | tee -a $APP_DIR/logs/event.log

# # Create the backup directory if it doesn't exist
# echo "Creating backup directory..." >> $APP_DIR/logs/event.log
# mkdir -p "$APP_DIR"
# mkdir -p "$APP_DIR/backups"
# mkdir -p "$APP_DIR/logs"


# Cleanup function
cleanup() {
# Perform cleanup tasks here
echo "Cleaning up before exit..." >> $APP_DIR/logs/event.log
echo "No errors occurred. Exiting..."
echo "Cleaning up before exit..."
# Example: Close file descriptors, remove temporary files, etc.
exit 0
}
# Error function
err() {
# Perform cleanup tasks here
echo "An error occurred. Exiting..."
# Example: Close file descriptors, remove temporary files, etc.
exit 0
}

trap 'cleanup' SIGINT SIGTERM
trap 'err' ERR

set -e

echo "Backup directory created successfully."

echo "Starting backup..."

# Generate a timestamp for the backup file
TIMESTAMP=$(date +"%Y%m%d%H%M%S")
LOG_TIMESTAMP=$(date +"%Y-%m-%d %H:%M:%S")

echo "[$LOG_TIMESTAMP] Starting backup..." >> $APP_DIR/logs/event.log

BACKUP_FILE="$APP_DIR/backups/$DB_NAME-$TIMESTAMP.sql"

echo "Backup file: $BACKUP_FILE" >> $APP_DIR/logs/event.log

# Current date in seconds
CURRENT_DATE=$(date +%s)

# Perform the backup using mysqldump
echo "Running mysqldump..." >> $APP_DIR/logs/event.log
mariadb-dump -h "$DB_HOST" -u "$DB_USER" -p"$DB_PASSWORD" "$DB_NAME" > $BACKUP_FILE
function create_directorys() {
echo "Creating backup directory..."
mkdir -p "$APP_DIR"
mkdir -p "$APP_DIR/backups"
mkdir -p "$APP_DIR/logs"
echo "Backup directory created successfully."
}

# cat $BACKUP_FILE
echo "Running S3 Backup...." >> $APP_DIR/logs/event.log
s3cmd --host=${AWS_HOST} --host-bucket=s3://${BUCKET} put --acl-${PUB_PRIV} ${BACKUP_FILE} s3://${S3_PATH}
function run_backup() {
echo "[$LOG_TIMESTAMP] Starting backup..."
echo "Backup file: $BACKUP_FILE"
BACKUP_FILE="$APP_DIR/backups/$DB_NAME-$TIMESTAMP.sql"
echo "Running mysqldump..."
mariadb-dump -h "$DB_HOST" -u "$DB_USER" -p"$DB_PASSWORD" "$DB_NAME" > $BACKUP_FILE
echo "Backup finished."
}

# Adding a change to test
function run_s3_backup() {
echo "Running S3 Backup...."
s3cmd --host=${AWS_HOST} --host-bucket=s3://${BUCKET} put --acl-${PUB_PRIV} ${BACKUP_FILE} s3://${S3_PATH}
}

echo "Running Cleanup...." >> $APP_DIR/logs/event.log
function cleanup() {
echo "Running Cleanup...."
# List all files in the S3 bucket with their timestamps
s3cmd --host=${AWS_HOST} --host-bucket=s3://${BUCKET} ls --recursive s3://${S3_PATH} | while read -r line; do
# Extract the date and file path
FILE_DATE=$(echo $line | awk '{print $1}')
FILE_TIME=$(echo $line | awk '{print $2}')
FILE_NAME=$(echo $line | awk '{print $4}')

# Cleanup
# Combine date and time, then convert to seconds since epoch
FILE_DATETIME="$FILE_DATE $FILE_TIME"
FILE_DATE_SECONDS=$(date -d"$FILE_DATETIME" +%s)

# Current date in seconds
CURRENT_DATE=$(date +%s)
# Calculate the file's age in days
AGE=$(( ($CURRENT_DATE - $FILE_DATE_SECONDS) / 86400 ))

# List all files in the S3 bucket with their timestamps
s3cmd --host=${AWS_HOST} --host-bucket=s3://${BUCKET} ls --recursive s3://${S3_PATH} | while read -r line; do
# Extract the date and file path
FILE_DATE=$(echo $line | awk '{print $1}')
FILE_TIME=$(echo $line | awk '{print $2}')
FILE_NAME=$(echo $line | awk '{print $4}')
# Check the file name and age
if [[ $FILE_NAME != *"CLI"* && $AGE -gt $OLDER_THAN_DAYS ]]; then
echo "Deleting $FILE_NAME which is $AGE days old."
s3cmd --host=${AWS_HOST} --host-bucket=s3://${BUCKET} del "$FILE_NAME"
else
echo "Skipping $FILE_NAME"
fi
done
}

# Combine date and time, then convert to seconds since epoch
FILE_DATETIME="$FILE_DATE $FILE_TIME"
FILE_DATE_SECONDS=$(date -d"$FILE_DATETIME" +%s)
create_directorys
run_backup
run_s3_backup
cleanup

# Calculate the file's age in days
AGE=$(( ($CURRENT_DATE - $FILE_DATE_SECONDS) / 86400 ))

# Check the file name and age
if [[ $FILE_NAME != *"CLI"* && $AGE -gt $OLDER_THAN_DAYS ]]; then
echo "Deleting $FILE_NAME which is $AGE days old." >> $APP_DIR/logs/event.log
s3cmd --host=${AWS_HOST} --host-bucket=s3://${BUCKET} del "$FILE_NAME"
else
echo "Skipping $FILE_NAME" >> $APP_DIR/logs/event.log
fi
done

0 comments on commit f75e0c7

Please sign in to comment.