From 041d9892bf1c50347305ffe00d569ee1ffbeee9f Mon Sep 17 00:00:00 2001 From: shashidharanA Date: Wed, 14 Dec 2022 18:38:03 +0530 Subject: [PATCH 1/7] Removed old BackupServer and added new changes --- cron-backup/Dockerfile | 16 ++-- cron-backup/README.md | 42 +++++++--- cron-backup/cron.sh | 24 ------ cron-backup/grafana_backup.sh | 153 ++++++++++++++++++++++++++------- cron-backup/mqtts_backup.sh | 154 +++++++++++++++++++++++++++------- cron-backup/nginx_backup.sh | 153 ++++++++++++++++++++++++++------- cron-backup/nodered_backup.sh | 154 +++++++++++++++++++++++++++------- cron-backup/startup.sh | 27 ++++++ docker-compose.yml | 14 ++-- influxdb/Dockerfile | 8 +- influxdb/backup.sh | 141 +++++++++++++++++++++++++------ influxdb/influxdb_cron.sh | 19 ----- influxdb/influxstart.sh | 12 +++ 13 files changed, 694 insertions(+), 223 deletions(-) delete mode 100755 cron-backup/cron.sh create mode 100644 cron-backup/startup.sh delete mode 100644 influxdb/influxdb_cron.sh create mode 100644 influxdb/influxstart.sh diff --git a/cron-backup/Dockerfile b/cron-backup/Dockerfile index 9f5b137..9f63f81 100755 --- a/cron-backup/Dockerfile +++ b/cron-backup/Dockerfile @@ -80,16 +80,18 @@ RUN chmod +x /bin/nginx_backup.sh COPY mqtts_backup.sh /bin/mqtts_backup.sh RUN chmod +x /bin/mqtts_backup.sh +# Backup script for startup.sh +COPY startup.sh /etc/service/startup/run +RUN chmod +x /etc/service/startup/run + # Backup script for mongodb -COPY mongodb_backup.sh /bin/mongodb_backup.sh -RUN chmod +x /bin/mongodb_backup.sh +#COPY mongodb_backup.sh /etc/service/mongodb_backup/run +#RUN chmod +x /etc/service/mongodb_backup/run + # Start the postfix daemon during container startup +RUN mkdir -p /etc/my_init.d COPY postfix.sh /etc/my_init.d/postfix.sh RUN chmod +x /etc/my_init.d/postfix.sh -# To Enable crontab -RUN mkdir -p /etc/my_init.d -COPY cron.sh /etc/my_init.d/cron.sh -RUN chmod +x /etc/my_init.d/cron.sh -# end of file +# end of file \ No newline at end of file diff --git a/cron-backup/README.md b/cron-backup/README.md index 116f926..f859142 100644 --- a/cron-backup/README.md +++ b/cron-backup/README.md @@ -6,23 +6,43 @@ This instance provides backup support for the `Nginx`, `Node-red` and `Grafana` For backing up the directory data -- It uses [`grafana_backup.sh`](cron-backup\grafana_backup.sh) for `Grafana` container. -- It uses [`nodered_backup.sh`](cron-backup\nodered_backup.sh) for `Node-red` container. -- It uses [`nginx_backup.sh`](cron-backup\nginx_backup.sh) for `Nginx` container. +- It uses [`grafana_backup.sh`](backup\grafana_backup.sh) for `Grafana` container. +- It uses [`nodered_backup.sh`](backup\nodered_backup.sh) for `Node-red` container. +- It uses [`nginx_backup.sh`](backup\nginx_backup.sh) for `Nginx` container. +- It uses [`mqtts_backup.sh`](backup\mqtts_backup.sh) for `Mqtts` container. -## Scheduling backup using `crontab` +## Scheduling backup using `Daemon thread` The following backup jobs are added to run at specific time. ``` bash -# echo new cron into cron file -{ - echo "35 6 * * * /bin/bash -l -c '/bin/nodered_backup.sh'" - echo "35 7 * * * /bin/bash -l -c '/bin/grafana_backup.sh'" - echo "35 8 * * * /bin/bash -l -c '/bin/nginx_backup.sh'" -} >> mycron - +# Start up the Process +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/nodered_backup.sh + sleep 60 + fi + if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] + then + /bin/grafana_backup.sh + sleep 60 + fi + if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] + then + /bin/nginx_backup.sh + sleep 60 + fi + if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] + then + /bin/mqtts_backup.sh + sleep 60 + fi ``` ## Mail Alert diff --git a/cron-backup/cron.sh b/cron-backup/cron.sh deleted file mode 100755 index 0225d7f..0000000 --- a/cron-backup/cron.sh +++ /dev/null @@ -1,24 +0,0 @@ -#!/bin/sh - -# exit on unchecked errors -set -e - -# backups are scheduled via the root crontab. Start by heading there -cd /root - -# write out current crontab -crontab -l > mycron || echo "no crontab for root, going on" - -# echo new cron into cron file -{ - echo "35 6 * * * /bin/bash -l -c '/bin/nodered_backup.sh'" - echo "35 7 * * * /bin/bash -l -c '/bin/grafana_backup.sh'" - echo "35 8 * * * /bin/bash -l -c '/bin/nginx_backup.sh'" - echo "35 9 * * * /bin/bash -l -c '/bin/mqtts_backup.sh'" -} >> mycron - -# delete duplicated lines -sort -u -o mycron mycron - -# install new cron file -crontab mycron diff --git a/cron-backup/grafana_backup.sh b/cron-backup/grafana_backup.sh index 33ca0bd..f1cb98f 100755 --- a/cron-backup/grafana_backup.sh +++ b/cron-backup/grafana_backup.sh @@ -1,6 +1,16 @@ #!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec DATE1=$(date +%Y%m%d%H%M) DATE=$(date +%d-%m-%y_%H-%M) @@ -9,47 +19,126 @@ mkdir -p /var/lib/backup/grafana grafana_src='/grafana' if [ ! -d $grafana_src ]; then - - echo "DATE:" "$DATE" > /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "STATUS: Grafana backup failed" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "The source backup directory: grafana_src is not available" >> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${CRON_BACKUP_MAIL}" + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" + echo "" + echo "STATUS: Grafana backup failed" + echo "" + echo "The source backup directory: grafana_src is not available" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" exit else tar cvzf /var/lib/backup/grafana/"${SOURCE_NAME}"_grafana_data_backup_"${DATE1}".tgz ${grafana_src}/ fi -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/; +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/grafana/; then - echo "DATE:" "$DATE" > /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "STATUS: Grafana backup succeeded." >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "******* Grafana Data Backup ****************" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_GRAFANA}\/,,g" &>> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "************** END **************************" >> /tmp/grafana.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" + echo "" + echo "STATUS: Grafana Daily backup succeeded." + echo "" + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt else - echo "DATE:" "$DATE" > /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" >> /tmp/grafana.txt - echo "" >> /tmp/grafana.txt - echo "STATUS: Grafana backup failed" >> /tmp/grafana.txt +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" + echo "" + echo "STATUS: Grafana Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" + echo "" + echo "STATUS: Grafana Monthly backup succeeded." echo "" >> /tmp/grafana.txt - echo "Something went wrong, please check it" >> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${CRON_BACKUP_MAIL}" + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/monthly_backup/grafana/\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" + echo "" + echo "STATUS: Grafana Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" + echo "" + echo "STATUS: Grafana Yearly backup succeeded." + echo "" + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/yearly_backup/grafana/\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" + echo "" + echo "STATUS: Grafana Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi fi -< /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${CRON_BACKUP_MAIL}" + + +< /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" # Remove the old backup data in local directory to avoid excessive storage use find /var/lib/backup/grafana/ -type f -exec rm {} \; +rm /tmp/grafana.txt +###PRUNE### -exit +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/grafana/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/cron-backup/mqtts_backup.sh b/cron-backup/mqtts_backup.sh index 1b42070..8225492 100755 --- a/cron-backup/mqtts_backup.sh +++ b/cron-backup/mqtts_backup.sh @@ -1,55 +1,145 @@ #!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec DATE1=$(date +%Y%m%d%H%M) DATE=$(date +%d-%m-%y_%H-%M) + mkdir -p /var/lib/backup/mqtts mqtts_src='/mqtts' if [ ! -d $mqtts_src ]; then - - echo "DATE:" "$DATE" > /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "DESCRIPTION: ${SOURCE_NAME}_MQTTs backup" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "STATUS: MQTTs backup failed." >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "The source backup directory: mqtts_src is not available" >> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: MQTTs Data Backup" "${CRON_BACKUP_MAIL}" + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts backup" + echo "" + echo "STATUS: Mqtts backup failed" + echo "" + echo "The source backup directory: mqtts_src is not available" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" exit else tar cvzf /var/lib/backup/mqtts/"${SOURCE_NAME}"_mqtts_data_backup_"${DATE1}".tgz ${mqtts_src}/ fi -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/; +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/mqtts/; then - echo "DATE:" "$DATE" > /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "DESCRIPTION: ${SOURCE_NAME}_MQTTs backup" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "STATUS: MQTTs backup succeeded." >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "******* MQTTs Data Backup ****************" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MQTTS}\/,,g" &>> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "************** END **************************" >> /tmp/mqtts.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" + echo "" + echo "STATUS: Mqtts Daily backup succeeded." + echo "" + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt else - echo "DATE:" "$DATE" > /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "DESCRIPTION: ${SOURCE_NAME}_MQTTs backup" >> /tmp/mqtts.txt - echo "" >> /tmp/mqtts.txt - echo "STATUS: MQTTs backup failed." >> /tmp/mqtts.txt +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" + echo "" + echo "STATUS: Mqtts Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" + echo "" + echo "STATUS: Mqtts Monthly backup succeeded." echo "" >> /tmp/mqtts.txt - echo "Something went wrong, please check it" >> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: MQTTs Data Backup" "${CRON_BACKUP_MAIL}" + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/monthly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" + echo "" + echo "STATUS: Mqtts Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" + echo "" + echo "STATUS: Mqtts Yearly backup succeeded." + echo "" + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/yearly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" + echo "" + echo "STATUS: Mqtts Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" fi -< /tmp/mqtts.txt mail -s "${SOURCE_NAME}: MQTTs Data Backup" "${CRON_BACKUP_MAIL}" +fi + + +< /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" # Remove the old backup data in local directory to avoid excessive storage use find /var/lib/backup/mqtts/ -type f -exec rm {} \; +rm /tmp/mqtts.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/mqtts/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + -exit +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi \ No newline at end of file diff --git a/cron-backup/nginx_backup.sh b/cron-backup/nginx_backup.sh index 1db5260..44beb0b 100755 --- a/cron-backup/nginx_backup.sh +++ b/cron-backup/nginx_backup.sh @@ -1,6 +1,16 @@ #!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec DATE1=$(date +%Y%m%d%H%M) DATE=$(date +%d-%m-%y_%H-%M) @@ -9,47 +19,126 @@ mkdir -p /var/lib/backup/nginx nginx_src='/nginx' if [ ! -d $nginx_src ]; then - - echo "DATE:" "$DATE" > /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "STATUS: Nginx backup failed." >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "The source backup directory: nginx_src is not available" >> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${CRON_BACKUP_MAIL}" + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" + echo "" + echo "STATUS: Nginx backup failed" + echo "" + echo "The source backup directory: nginx_src is not available" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" exit else tar cvzf /var/lib/backup/nginx/"${SOURCE_NAME}"_nginx_data_backup_"${DATE1}".tgz ${nginx_src}/ fi -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/; +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/nginx/; then - echo "DATE:" "$DATE" > /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "STATUS: Nginx backup succeeded." >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "******* Nginx Data Backup ****************" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_NGINX}\/,,g" &>> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "************** END **************************" >> /tmp/nginx.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" + echo "" + echo "STATUS: Nginx Daily backup succeeded." + echo "" + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt else - echo "DATE:" "$DATE" > /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" >> /tmp/nginx.txt - echo "" >> /tmp/nginx.txt - echo "STATUS: Nginx backup failed." >> /tmp/nginx.txt +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" + echo "" + echo "STATUS: Nginx Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" + echo "" + echo "STATUS: Nginx Monthly backup succeeded." echo "" >> /tmp/nginx.txt - echo "Something went wrong, please check it" >> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${CRON_BACKUP_MAIL}" + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/monthly_backup/nginx/\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" + echo "" + echo "STATUS: Nginx Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" fi -< /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${CRON_BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" + echo "" + echo "STATUS: Nginx Yearly backup succeeded." + echo "" + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/yearly_backup/nginx/\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" + echo "" + echo "STATUS: Nginx Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" # Remove the old backup data in local directory to avoid excessive storage use find /var/lib/backup/nginx/ -type f -exec rm {} \; +rm /tmp/nginx.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/nginx/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + -exit +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi \ No newline at end of file diff --git a/cron-backup/nodered_backup.sh b/cron-backup/nodered_backup.sh index 8232eee..fa48477 100755 --- a/cron-backup/nodered_backup.sh +++ b/cron-backup/nodered_backup.sh @@ -1,6 +1,16 @@ #!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec DATE1=$(date +%Y%m%d%H%M) DATE=$(date +%d-%m-%y_%H-%M) @@ -9,47 +19,127 @@ mkdir -p /var/lib/backup/nodered nodered_src='/nodered' if [ ! -d $nodered_src ]; then - - echo "DATE:" "$DATE" > /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "STATUS: Nodered backup failed." >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "The source backup directory: nodered_src is not available" >> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${CRON_BACKUP_MAIL}" + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" + echo "" + echo "STATUS: Nodered backup failed" + echo "" + echo "The source backup directory: nodered_src is not available" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" exit else tar cvzf /var/lib/backup/nodered/"${SOURCE_NAME}"_nodered_data_backup_"${DATE1}".tgz ${nodered_src}/ fi -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/; +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/nodered/; then - echo "DATE:" "$DATE" > /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "STATUS: Node-red backup succeeded." >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "******* Node-red Data Backup ****************" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_NODERED}\/,,g" &>> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "************** END **************************" >> /tmp/nodered.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" + echo "" + echo "STATUS: Nodered Daily backup succeeded." + echo "" + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt else - echo "DATE:" "$DATE" > /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" >> /tmp/nodered.txt - echo "" >> /tmp/nodered.txt - echo "STATUS: Nodered backup failed." >> /tmp/nodered.txt +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" + echo "" + echo "STATUS: Nodered Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" + echo "" + echo "STATUS: Nodered Monthly backup succeeded." echo "" >> /tmp/nodered.txt - echo "Something went wrong, please check it" >> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${CRON_BACKUP_MAIL}" + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/monthly_backup/nodered/\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" + echo "" + echo "STATUS: Nodered Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" + echo "" + echo "STATUS: Nodered Yearly backup succeeded." + echo "" + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/yearly_backup/nodered/\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" + echo "" + echo "STATUS: Nodered Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" fi -< /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${CRON_BACKUP_MAIL}" +fi + + +< /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" # Remove the old backup data in local directory to avoid excessive storage use find /var/lib/backup/nodered/ -type f -exec rm {} \; +rm /tmp/nodered.txt + +###PRUNE### -exit +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/nodered/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/cron-backup/startup.sh b/cron-backup/startup.sh new file mode 100644 index 0000000..4cf2cdd --- /dev/null +++ b/cron-backup/startup.sh @@ -0,0 +1,27 @@ +#!/bin/bash +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/nodered_backup.sh + sleep 60 + fi + if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] + then + /bin/grafana_backup.sh + sleep 60 + fi + if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] + then + /bin/nginx_backup.sh + sleep 60 + fi + if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] + then + /bin/mqtts_backup.sh + sleep 60 + fi +done \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 5fe81e8..6b24790 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -183,8 +183,8 @@ # IOT_DASHBOARD_INFLUXDB_BACKUP_EMAIL # To send backup mail in Influxdb container. Use "space" to delimit the MAIL IDs. # -# IOT_DASHBOARD_CRON_BACKUP_EMAIL -# To send backup mail in cron-backup container. Use "space" to delimit the MAIL IDs. +# IOT_DASHBOARD_BACKUP_EMAIL +# To send backup mail in backup container. Use "space" to delimit the MAIL IDs. # # IOT_DASHBOARD_MONGO_INITDB_ROOT_USERNAME # The Username to be used for accessing Mongodb. @@ -348,13 +348,13 @@ services: - "2525:25" hostname: "${IOT_DASHBOARD_MAIL_HOST_NAME:-iotmail}" - cron-backup: + backup: restart: unless-stopped build: - context: ./cron-backup + context: ./backup dockerfile: Dockerfile args: - hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-cron-backup}" + hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-backup}" relay_ip: "postfix:25" domain: "${IOT_DASHBOARD_MAIL_DOMAIN:-example.com}" # Moving backup data to S3 Bucket @@ -363,7 +363,7 @@ services: AWS_DEFAULT_REGION: "${IOT_DASHBOARD_AWS_DEFAULT_REGION:-.}" AWS_HOST_BASE: "${IOT_DASHBOARD_AWS_HOST_BASE:-.}" AWS_HOST_BUCKET: "${IOT_DASHBOARD_AWS_HOST_BUCKET:-.}" - hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-cron-backup}" + hostname: "${IOT_DASHBOARD_CRONBACKUP_MAIL_HOST_NAME:-backup}" volumes: - "${IOT_DASHBOARD_DATA}grafana:/grafana" - "${IOT_DASHBOARD_DATA}node-red:/nodered" @@ -372,7 +372,7 @@ services: - "${IOT_DASHBOARD_DATA}mongodb/mongodb_data:/var/lib/mongodb" - "${IOT_DASHBOARD_DATA}mongodb/mongodb-S3-bucket:/var/lib/mongodb-S3-bucket" environment: - CRON_BACKUP_MAIL: "${IOT_DASHBOARD_CRON_BACKUP_EMAIL:-}" + BACKUP_MAIL: "${IOT_DASHBOARD_BACKUP_EMAIL:-}" MONGO_INITDB_ROOT_USERNAME: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_USERNAME:-}" MONGO_INITDB_ROOT_PASSWORD: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_PASSWORD:-}" SOURCE_NAME: "${IOT_DASHBOARD_CERTBOT_FQDN}" diff --git a/influxdb/Dockerfile b/influxdb/Dockerfile index 90252c4..4e768f2 100644 --- a/influxdb/Dockerfile +++ b/influxdb/Dockerfile @@ -61,8 +61,6 @@ COPY influxdb.conf /etc/influxdb/influxdb.conf # Enable influxdb database automatic backup crontab RUN mkdir -p /etc/my_init.d -COPY influxdb_cron.sh /etc/my_init.d/influxdb_cron.sh -RUN chmod +x /etc/my_init.d/influxdb_cron.sh # Start the postfix daemon during container startup COPY postfix.sh /etc/my_init.d/postfix.sh @@ -73,4 +71,8 @@ RUN mkdir /etc/service/influx COPY influx.sh /etc/service/influx/run RUN chmod +x /etc/service/influx/run -# end of file +# Backup script for influxdb +COPY influxstart.sh /etc/service/influxstart/run +RUN chmod +x /etc/service/influxstart/run + +# end of file \ No newline at end of file diff --git a/influxdb/backup.sh b/influxdb/backup.sh index d4b93aa..3e61fe4 100755 --- a/influxdb/backup.sh +++ b/influxdb/backup.sh @@ -1,6 +1,16 @@ #!/bin/bash #The Shell script will be used for taking backup and send it to S3 bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec # TO list all Databases in influxdb databases DATE=$(date +%d-%m-%y_%H-%M) DATE1=$(date +%Y%m%d%H%M) @@ -31,37 +41,120 @@ done < "/tmp/data.txt" tar czf /var/lib/backup/influxdb/"${SOURCE_NAME}"_influxdb_metdata_db_backup_"${DATE1}".tgz /var/lib/influxdb-backup/ && tar czf /var/lib/backup/influxdb/"${SOURCE_NAME}"_influxdb_data_backup_"${DATE1}".tgz /var/lib/influxdb/ -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/; +# Moving the backup to S3 bucket (Daily Backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/influxdb/; then - echo "DATE:" "$DATE" > /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb backup" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "STATUS: Influxdb backup succeeded." >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "******* Influxdb Database & metadata Backup ********" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "************** Influxdb data Backup ****************" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "********************** END ********************* " >> /tmp/influxbackup.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Daily backup" + echo "" + echo "STATUS: Influxdb Daily backup succeeded." + echo "" + echo "******* Influxdb Database & metadata Backup ********" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "************** Influxdb data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "********************** END ********************* " + }>> /tmp/influxbackup.txt else - echo "DATE:" "$DATE" > /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb backup" >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "STATUS: Influxdb backup failed." >> /tmp/influxbackup.txt - echo "" >> /tmp/influxbackup.txt - echo "Something went wrong, please check it" >> /tmp/influxbackup.txt + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Daily backup" + echo "" + echo "STATUS: Influxdb Daily backup failed." + echo "" + echo "Something went wrong, please check it" + }>> /tmp/influxbackup.txt < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" fi +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Monthly backup" + echo "" + echo "STATUS: Influxdb Monthly backup succeeded." + echo "" + echo "******* Influxdb Database & metadata Backup ********" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "************** Influxdb data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "********************** END ********************* " + }>> /tmp/influxbackup.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Monthly backup" + echo "" + echo "STATUS: Influxdb Monthly backup failed." + echo "" + echo "Something went wrong, please check it" + }>> /tmp/influxbackup.txt + < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" +fi +fi + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/influxdb/ s3://"${S3_BUCKET_INFLUXDB}"/yearly_backup/influxdb/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Yearly backup" + echo "" + echo "STATUS: Influxdb Yearly backup succeeded." + echo "" + echo "******* Influxdb Database & metadata Backup ********" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/yearly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_metdata_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "************** Influxdb data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_INFLUXDB}"/yearly_backup/influxdb/ --human-readable | grep -i "${SOURCE_NAME}"_influxdb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_INFLUXDB}\/,,g" &>> /tmp/influxbackup.txt + echo "" + echo "********************** END ********************* " + }>> /tmp/influxbackup.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Influxdb Yearly backup" + echo "" + echo "STATUS: Influxdb Yearly backup failed." + echo "" + echo "Something went wrong, please check it" + }>> /tmp/influxbackup.txt + < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" +fi +fi # Remove the old backup data in local directory to avoid excessive storage use find /var/lib/backup/influxdb/ -type f -exec rm {} \; find /var/lib/influxdb-backup/ -type f -exec rm {} \; < /tmp/influxbackup.txt mail -s "${SOURCE_NAME}: Influxdb backup" "${INFLUXDB_BACKUP_MAIL}" +###PRUNE### +rm /tmp/influxbackup.txt +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_INFLUXDB}"/influxdb/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_INFLUXDB}"/monthly_backup/influxdb/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/influxdb/influxdb_cron.sh b/influxdb/influxdb_cron.sh deleted file mode 100644 index 4c13808..0000000 --- a/influxdb/influxdb_cron.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/sh - -# exit on unchecked errors -set -e - -# backups are scheduled via the root crontab. Start by heading there -cd /root - -#write out current crontab -crontab -l > mycron || echo "no crontab for root, going on" - -#echo new cron into cron file -echo "35 6 * * * /bin/bash -l -c '/bin/backup.sh'" >> mycron - -#delete duplicated lines -sort -u -o mycron mycron - -#install new cron file -crontab mycron diff --git a/influxdb/influxstart.sh b/influxdb/influxstart.sh new file mode 100644 index 0000000..5c222f5 --- /dev/null +++ b/influxdb/influxstart.sh @@ -0,0 +1,12 @@ +#!/bin/bash +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/backup.sh + sleep 60 + fi + done \ No newline at end of file From 08c5bc6c4fa814bf60d5f276829105ce48cd53d6 Mon Sep 17 00:00:00 2001 From: shashidharanA <103415202+shashidharanA@users.noreply.github.com> Date: Wed, 14 Dec 2022 18:49:15 +0530 Subject: [PATCH 2/7] Update docker-compose.yml updated Mongodb version --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index 6b24790..910e8ae 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -406,7 +406,7 @@ services: mongodb: restart: unless-stopped - image: mongo:5.0 + image: mongo:6.0 environment: MONGO_INITDB_ROOT_USERNAME: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_USERNAME:-}" MONGO_INITDB_ROOT_PASSWORD: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_PASSWORD:-}" From 4ab78d2db612d987583ff1daf003ed211ec4112c Mon Sep 17 00:00:00 2001 From: shashidharanA Date: Wed, 14 Dec 2022 18:54:12 +0530 Subject: [PATCH 3/7] Renamed Backup Server --- backup/Dockerfile | 97 ++++++++++++++++++++++++++ backup/README.md | 50 ++++++++++++++ backup/grafana_backup.sh | 144 ++++++++++++++++++++++++++++++++++++++ backup/mongodb_backup.sh | 65 ++++++++++++++++++ backup/mqtts_backup.sh | 145 +++++++++++++++++++++++++++++++++++++++ backup/nginx_backup.sh | 144 ++++++++++++++++++++++++++++++++++++++ backup/nodered_backup.sh | 145 +++++++++++++++++++++++++++++++++++++++ backup/postfix.sh | 2 + backup/startup.sh | 27 ++++++++ 9 files changed, 819 insertions(+) create mode 100644 backup/Dockerfile create mode 100644 backup/README.md create mode 100644 backup/grafana_backup.sh create mode 100644 backup/mongodb_backup.sh create mode 100644 backup/mqtts_backup.sh create mode 100644 backup/nginx_backup.sh create mode 100644 backup/nodered_backup.sh create mode 100644 backup/postfix.sh create mode 100644 backup/startup.sh diff --git a/backup/Dockerfile b/backup/Dockerfile new file mode 100644 index 0000000..9f63f81 --- /dev/null +++ b/backup/Dockerfile @@ -0,0 +1,97 @@ +# +# Dockerfile for building the cron-backup instance with S3-backup and Mail alert setup for the below service +# 1. Node-red +# 2. Grafana +# 3. Nginx +# 4. Mqtts +# 5. mongodb + +# To find the version of installed Mongodb service +FROM mongo:latest AS mongodb +RUN env | grep MON > /root/env + + +# Building cron-backup instance +FROM phusion/baseimage:master-amd64 +# Copying mongodb's version +COPY --from=mongodb /root/env /root/env + +# Installing same Mongodb's tools as in the copied version here in the cron-backup instance +RUN set -x \ + && export $(xargs < /root/env) \ + && echo "deb http://$MONGO_REPO/apt/ubuntu focal/${MONGO_PACKAGE%-unstable}/$MONGO_MAJOR multiverse" | tee "/etc/apt/sources.list.d/${MONGO_PACKAGE%-unstable}.list" \ + && apt-key adv --keyserver keyserver.ubuntu.com --recv-keys B00A0BD1E2C63C11 \ + && export DEBIAN_FRONTEND=noninteractive && apt-get update && ln -s /bin/true /usr/local/bin/systemctl && apt-get install -y \ + ${MONGO_PACKAGE}=$MONGO_VERSION \ + ${MONGO_PACKAGE}-tools=$MONGO_VERSION + + +# some basic package installation for troubleshooting +RUN apt-get update && apt-get install -y \ + iputils-ping \ + net-tools \ + debconf-utils \ + rsync + +# Change workdir +RUN mkdir -p /opt/backup +WORKDIR "/opt/backup" + +# To backup Mongodb to S3 Bucket, some packages need to be installed as follows: +RUN apt-get update && apt-get install -y python3-pip +RUN pip3 install s3cmd +ARG AWS_ACCESS_KEY_ID +ARG AWS_DEFAULT_REGION +ARG AWS_HOST_BASE +ARG AWS_HOST_BUCKET +ARG AWS_SECRET_ACCESS_KEY +RUN set -x \ + && echo "[default]\naccess_key = ${AWS_ACCESS_KEY_ID}\nbucket_location = $AWS_DEFAULT_REGION\nhost_base = $AWS_HOST_BASE\nhost_bucket = $AWS_HOST_BUCKET\nsecret_key = $AWS_SECRET_ACCESS_KEY" | tee /root/.s3cfg + +# passing arguments to build postfix image +ARG hostname +ARG relay_ip +ARG domain + +# Install Postfix +RUN echo "postfix postfix/mailname string $host_name" | debconf-set-selections +RUN echo "postfix postfix/main_mailer_type select Satellite system" | debconf-set-selections +RUN apt-get update && apt-get install -y postfix mailutils +RUN postconf -e relayhost=$relay_ip + +# This will replace local mail addresses by valid Internet addresses when mail leaves the machine via SMTP. +RUN echo "root@${hostname} backup@${domain}" > /etc/postfix/generic +RUN postconf -e smtp_generic_maps=hash:/etc/postfix/generic +RUN postmap /etc/postfix/generic + +# Backup script for node-red data directory backup +COPY nodered_backup.sh /bin/nodered_backup.sh +RUN chmod +x /bin/nodered_backup.sh + +# Backup script for Grafana data directory backup +COPY grafana_backup.sh /bin/grafana_backup.sh +RUN chmod +x /bin/grafana_backup.sh + +# Backup script for Nginx data directory backup +COPY nginx_backup.sh /bin/nginx_backup.sh +RUN chmod +x /bin/nginx_backup.sh + +# Backup script for Mqtts data directory backup +COPY mqtts_backup.sh /bin/mqtts_backup.sh +RUN chmod +x /bin/mqtts_backup.sh + +# Backup script for startup.sh +COPY startup.sh /etc/service/startup/run +RUN chmod +x /etc/service/startup/run + +# Backup script for mongodb +#COPY mongodb_backup.sh /etc/service/mongodb_backup/run +#RUN chmod +x /etc/service/mongodb_backup/run + + +# Start the postfix daemon during container startup +RUN mkdir -p /etc/my_init.d +COPY postfix.sh /etc/my_init.d/postfix.sh +RUN chmod +x /etc/my_init.d/postfix.sh + +# end of file \ No newline at end of file diff --git a/backup/README.md b/backup/README.md new file mode 100644 index 0000000..f859142 --- /dev/null +++ b/backup/README.md @@ -0,0 +1,50 @@ +# [cron-backup](./cron-backup) Docker Container Usage + +This instance provides backup support for the `Nginx`, `Node-red` and `Grafana` containers and pushed the backed up data to S3-compatible storage. + +## Shell script + +For backing up the directory data + +- It uses [`grafana_backup.sh`](backup\grafana_backup.sh) for `Grafana` container. +- It uses [`nodered_backup.sh`](backup\nodered_backup.sh) for `Node-red` container. +- It uses [`nginx_backup.sh`](backup\nginx_backup.sh) for `Nginx` container. +- It uses [`mqtts_backup.sh`](backup\mqtts_backup.sh) for `Mqtts` container. + +## Scheduling backup using `Daemon thread` + +The following backup jobs are added to run at specific time. + +``` bash + +# Start up the Process +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/nodered_backup.sh + sleep 60 + fi + if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] + then + /bin/grafana_backup.sh + sleep 60 + fi + if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] + then + /bin/nginx_backup.sh + sleep 60 + fi + if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] + then + /bin/mqtts_backup.sh + sleep 60 + fi +``` + +## Mail Alert + +The above backup shell scripts were configured to send mail for the both successful/unsuccessful run. diff --git a/backup/grafana_backup.sh b/backup/grafana_backup.sh new file mode 100644 index 0000000..f1cb98f --- /dev/null +++ b/backup/grafana_backup.sh @@ -0,0 +1,144 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + +mkdir -p /var/lib/backup/grafana + +grafana_src='/grafana' + +if [ ! -d $grafana_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" + echo "" + echo "STATUS: Grafana backup failed" + echo "" + echo "The source backup directory: grafana_src is not available" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/grafana/"${SOURCE_NAME}"_grafana_data_backup_"${DATE1}".tgz ${grafana_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" + echo "" + echo "STATUS: Grafana Daily backup succeeded." + echo "" + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" + echo "" + echo "STATUS: Grafana Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" + echo "" + echo "STATUS: Grafana Monthly backup succeeded." + echo "" >> /tmp/grafana.txt + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/monthly_backup/grafana/\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" + echo "" + echo "STATUS: Grafana Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" + echo "" + echo "STATUS: Grafana Yearly backup succeeded." + echo "" + echo "******* Grafana Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/yearly_backup/grafana/\/,,g" &>> /tmp/grafana.txt + echo "" + echo "************** END **************************" + } >> /tmp/grafana.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" + echo "" + echo "STATUS: Grafana Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/grafana.txt + < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/grafana/ -type f -exec rm {} \; +rm /tmp/grafana.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/grafana/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/backup/mongodb_backup.sh b/backup/mongodb_backup.sh new file mode 100644 index 0000000..f8105ff --- /dev/null +++ b/backup/mongodb_backup.sh @@ -0,0 +1,65 @@ +#!/bin/bash +#The Shell script will be used for taking backup and send it to S3 bucket. + +# TO list all Databases in mongodb databases +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + +mkdir -p /var/lib/backup/mongodb + +#Full Mongodb backup + +mongodump --host mongodb:27017 --authenticationDatabase admin -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o /var/lib/mongodb-backup/dump + + +showdb(){ +mongo --quiet --host mongodb:27017 --eval "printjson(db.adminCommand('listDatabases'))" -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" | grep -i name | awk -F'"' '{print $4}' +} + + +showdb > /mongo_dbs.txt + +#Backing up the databases listed. +while read -r db +do + echo "Creating backup for $db" + mongodump --host mongodb:27017 --db "$db" --authenticationDatabase admin -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o /var/lib/mongodb-backup/ +done < "/mongo_dbs.txt" + +tar czf /var/lib/backup/mongodb/"${SOURCE_NAME}"_mongodb_db_backup_"${DATE1}".tgz /var/lib/mongodb-backup/. && rsync -avr /var/lib/mongodb/ /root/mongodb_data/ && tar czf /var/lib/backup/mongodb/"${SOURCE_NAME}"_mongodb_data_backup_"${DATE1}".tgz /root/mongodb_data/. + +# Moving the backup to S3 bucket +if s3cmd put -r --no-mime-magic /var/lib/backup/mongodb/ s3://"${S3_BUCKET_MONGODB}"/; +then + echo "DATE:" "$DATE" > /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "DESCRIPTION: ${SOURCE_NAME}_Mongodb backup" >> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "STATUS: mongodb backup is Successful." >> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "******* Mongodb Database Backup ****************" >> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MONGODB}"/ --human-readable | grep -i "${SOURCE_NAME}"_mongodb_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MONGODB}\/,,g" &>> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "************** Mongodb data Backup *************" >> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MONGODB}"/ --human-readable | grep -i "${SOURCE_NAME}"_mongodb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MONGODB}\/,,g" &>> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "********************** END *********************" >> /tmp/mongodbbackup.txt +else + echo "DATE:" "$DATE" > /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "DESCRIPTION: ${SOURCE_NAME}_Mongodb backup" >> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "STATUS: mongodb backup is Failed." >> /tmp/mongodbbackup.txt + echo " " >> /tmp/mongodbbackup.txt + echo "Something went wrong, Please check it" >> /tmp/mongodbbackup.txt + < /tmp/mongodbbackup.txt mail -s "${SOURCE_NAME}: mongodb backup" "${CRON_BACKUP_MAIL}" +fi + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/mongodb/ -type f -exec rm {} \; +find /root/mongodb_data/ -type f -exec rm {} \; +find /var/lib/mongodb-backup/ -type f -exec rm {} \; + +< /tmp/mongodbbackup.txt mail -s "${SOURCE_NAME}: mongodb backup" "${CRON_BACKUP_MAIL}" diff --git a/backup/mqtts_backup.sh b/backup/mqtts_backup.sh new file mode 100644 index 0000000..8225492 --- /dev/null +++ b/backup/mqtts_backup.sh @@ -0,0 +1,145 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + + +mkdir -p /var/lib/backup/mqtts + +mqtts_src='/mqtts' + +if [ ! -d $mqtts_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts backup" + echo "" + echo "STATUS: Mqtts backup failed" + echo "" + echo "The source backup directory: mqtts_src is not available" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/mqtts/"${SOURCE_NAME}"_mqtts_data_backup_"${DATE1}".tgz ${mqtts_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" + echo "" + echo "STATUS: Mqtts Daily backup succeeded." + echo "" + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" + echo "" + echo "STATUS: Mqtts Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" + echo "" + echo "STATUS: Mqtts Monthly backup succeeded." + echo "" >> /tmp/mqtts.txt + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/monthly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" + echo "" + echo "STATUS: Mqtts Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" + echo "" + echo "STATUS: Mqtts Yearly backup succeeded." + echo "" + echo "******* Mqtts Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/yearly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt + echo "" + echo "************** END **************************" + } >> /tmp/mqtts.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" + echo "" + echo "STATUS: Mqtts Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/mqtts.txt + < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/mqtts/ -type f -exec rm {} \; +rm /tmp/mqtts.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/mqtts/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi \ No newline at end of file diff --git a/backup/nginx_backup.sh b/backup/nginx_backup.sh new file mode 100644 index 0000000..44beb0b --- /dev/null +++ b/backup/nginx_backup.sh @@ -0,0 +1,144 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + +mkdir -p /var/lib/backup/nginx + +nginx_src='/nginx' + +if [ ! -d $nginx_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" + echo "" + echo "STATUS: Nginx backup failed" + echo "" + echo "The source backup directory: nginx_src is not available" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/nginx/"${SOURCE_NAME}"_nginx_data_backup_"${DATE1}".tgz ${nginx_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" + echo "" + echo "STATUS: Nginx Daily backup succeeded." + echo "" + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" + echo "" + echo "STATUS: Nginx Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" + echo "" + echo "STATUS: Nginx Monthly backup succeeded." + echo "" >> /tmp/nginx.txt + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/monthly_backup/nginx/\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" + echo "" + echo "STATUS: Nginx Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" + echo "" + echo "STATUS: Nginx Yearly backup succeeded." + echo "" + echo "******* Nginx Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/yearly_backup/nginx/\/,,g" &>> /tmp/nginx.txt + echo "" + echo "************** END **************************" + } >> /tmp/nginx.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" + echo "" + echo "STATUS: Nginx Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nginx.txt + < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/nginx/ -type f -exec rm {} \; +rm /tmp/nginx.txt +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/nginx/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi \ No newline at end of file diff --git a/backup/nodered_backup.sh b/backup/nodered_backup.sh new file mode 100644 index 0000000..fa48477 --- /dev/null +++ b/backup/nodered_backup.sh @@ -0,0 +1,145 @@ +#!/bin/bash +#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. +#Version:v0.1 +#Created Date:2022-08-26 +#Modified Date:12-10-2022 +#Reviewer: Terry Moore. +#Author: Shashi, VishnuNambi. + +a=$(date +%b) +b=Mar +c=Jun +d=Sep +e=Dec +DATE1=$(date +%Y%m%d%H%M) +DATE=$(date +%d-%m-%y_%H-%M) + +mkdir -p /var/lib/backup/nodered + +nodered_src='/nodered' + +if [ ! -d $nodered_src ]; then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" + echo "" + echo "STATUS: Nodered backup failed" + echo "" + echo "The source backup directory: nodered_src is not available" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" + exit +else + tar cvzf /var/lib/backup/nodered/"${SOURCE_NAME}"_nodered_data_backup_"${DATE1}".tgz ${nodered_src}/ +fi + +# Moving the backup to S3 bucket (Daily backup) +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" + echo "" + echo "STATUS: Nodered Daily backup succeeded." + echo "" + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else +{ echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" + echo "" + echo "STATUS: Nodered Daily backup failed" + echo "" + echo "Something went wrong, please check it" + } >> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi + + +# Moving the backup to S3 bucket (Monthly backup) +if [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" + echo "" + echo "STATUS: Nodered Monthly backup succeeded." + echo "" >> /tmp/nodered.txt + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/monthly_backup/nodered/\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" + echo "" + echo "STATUS: Nodered Monthly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi +fi + + +# Moving the backup to S3 bucket (Yearly backup) +if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then +if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/; +then + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" + echo "" + echo "STATUS: Nodered Yearly backup succeeded." + echo "" + echo "******* Nodered Data Backup ****************" + echo "" + s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/yearly_backup/nodered/\/,,g" &>> /tmp/nodered.txt + echo "" + echo "************** END **************************" + } >> /tmp/nodered.txt +else + { + echo "DATE:" "$DATE" + echo "" + echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" + echo "" + echo "STATUS: Nodered Yearly backup failed" + echo "" + echo "Something went wrong, please check it" + }>> /tmp/nodered.txt + < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" +fi +fi + + +< /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" + +# Remove the old backup data in local directory to avoid excessive storage use +find /var/lib/backup/nodered/ -type f -exec rm {} \; +rm /tmp/nodered.txt + +###PRUNE### + +# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) +s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/nodered/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done + + +if [ "$(date -d +1day +%d)" -eq 01 ]; then +# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) +s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done +fi diff --git a/backup/postfix.sh b/backup/postfix.sh new file mode 100644 index 0000000..c14d96e --- /dev/null +++ b/backup/postfix.sh @@ -0,0 +1,2 @@ +#!/bin/sh +/etc/init.d/postfix restart diff --git a/backup/startup.sh b/backup/startup.sh new file mode 100644 index 0000000..4cf2cdd --- /dev/null +++ b/backup/startup.sh @@ -0,0 +1,27 @@ +#!/bin/bash +while true +do + HOUR="$(date +'%H')" + MINUTE="$(date +'%M')" + + if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] + then + /bin/nodered_backup.sh + sleep 60 + fi + if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] + then + /bin/grafana_backup.sh + sleep 60 + fi + if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] + then + /bin/nginx_backup.sh + sleep 60 + fi + if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] + then + /bin/mqtts_backup.sh + sleep 60 + fi +done \ No newline at end of file From efb3c54650071efa54e381e50eece846dd7da7d7 Mon Sep 17 00:00:00 2001 From: shashidharanA Date: Wed, 14 Dec 2022 18:55:02 +0530 Subject: [PATCH 4/7] Removed old server --- cron-backup/Dockerfile | 97 ----------------------- cron-backup/README.md | 50 ------------ cron-backup/grafana_backup.sh | 144 --------------------------------- cron-backup/mongodb_backup.sh | 65 --------------- cron-backup/mqtts_backup.sh | 145 ---------------------------------- cron-backup/nginx_backup.sh | 144 --------------------------------- cron-backup/nodered_backup.sh | 145 ---------------------------------- cron-backup/postfix.sh | 2 - cron-backup/startup.sh | 27 ------- 9 files changed, 819 deletions(-) delete mode 100755 cron-backup/Dockerfile delete mode 100644 cron-backup/README.md delete mode 100755 cron-backup/grafana_backup.sh delete mode 100644 cron-backup/mongodb_backup.sh delete mode 100755 cron-backup/mqtts_backup.sh delete mode 100755 cron-backup/nginx_backup.sh delete mode 100755 cron-backup/nodered_backup.sh delete mode 100755 cron-backup/postfix.sh delete mode 100644 cron-backup/startup.sh diff --git a/cron-backup/Dockerfile b/cron-backup/Dockerfile deleted file mode 100755 index 9f63f81..0000000 --- a/cron-backup/Dockerfile +++ /dev/null @@ -1,97 +0,0 @@ -# -# Dockerfile for building the cron-backup instance with S3-backup and Mail alert setup for the below service -# 1. Node-red -# 2. Grafana -# 3. Nginx -# 4. Mqtts -# 5. mongodb - -# To find the version of installed Mongodb service -FROM mongo:latest AS mongodb -RUN env | grep MON > /root/env - - -# Building cron-backup instance -FROM phusion/baseimage:master-amd64 -# Copying mongodb's version -COPY --from=mongodb /root/env /root/env - -# Installing same Mongodb's tools as in the copied version here in the cron-backup instance -RUN set -x \ - && export $(xargs < /root/env) \ - && echo "deb http://$MONGO_REPO/apt/ubuntu focal/${MONGO_PACKAGE%-unstable}/$MONGO_MAJOR multiverse" | tee "/etc/apt/sources.list.d/${MONGO_PACKAGE%-unstable}.list" \ - && apt-key adv --keyserver keyserver.ubuntu.com --recv-keys B00A0BD1E2C63C11 \ - && export DEBIAN_FRONTEND=noninteractive && apt-get update && ln -s /bin/true /usr/local/bin/systemctl && apt-get install -y \ - ${MONGO_PACKAGE}=$MONGO_VERSION \ - ${MONGO_PACKAGE}-tools=$MONGO_VERSION - - -# some basic package installation for troubleshooting -RUN apt-get update && apt-get install -y \ - iputils-ping \ - net-tools \ - debconf-utils \ - rsync - -# Change workdir -RUN mkdir -p /opt/backup -WORKDIR "/opt/backup" - -# To backup Mongodb to S3 Bucket, some packages need to be installed as follows: -RUN apt-get update && apt-get install -y python3-pip -RUN pip3 install s3cmd -ARG AWS_ACCESS_KEY_ID -ARG AWS_DEFAULT_REGION -ARG AWS_HOST_BASE -ARG AWS_HOST_BUCKET -ARG AWS_SECRET_ACCESS_KEY -RUN set -x \ - && echo "[default]\naccess_key = ${AWS_ACCESS_KEY_ID}\nbucket_location = $AWS_DEFAULT_REGION\nhost_base = $AWS_HOST_BASE\nhost_bucket = $AWS_HOST_BUCKET\nsecret_key = $AWS_SECRET_ACCESS_KEY" | tee /root/.s3cfg - -# passing arguments to build postfix image -ARG hostname -ARG relay_ip -ARG domain - -# Install Postfix -RUN echo "postfix postfix/mailname string $host_name" | debconf-set-selections -RUN echo "postfix postfix/main_mailer_type select Satellite system" | debconf-set-selections -RUN apt-get update && apt-get install -y postfix mailutils -RUN postconf -e relayhost=$relay_ip - -# This will replace local mail addresses by valid Internet addresses when mail leaves the machine via SMTP. -RUN echo "root@${hostname} backup@${domain}" > /etc/postfix/generic -RUN postconf -e smtp_generic_maps=hash:/etc/postfix/generic -RUN postmap /etc/postfix/generic - -# Backup script for node-red data directory backup -COPY nodered_backup.sh /bin/nodered_backup.sh -RUN chmod +x /bin/nodered_backup.sh - -# Backup script for Grafana data directory backup -COPY grafana_backup.sh /bin/grafana_backup.sh -RUN chmod +x /bin/grafana_backup.sh - -# Backup script for Nginx data directory backup -COPY nginx_backup.sh /bin/nginx_backup.sh -RUN chmod +x /bin/nginx_backup.sh - -# Backup script for Mqtts data directory backup -COPY mqtts_backup.sh /bin/mqtts_backup.sh -RUN chmod +x /bin/mqtts_backup.sh - -# Backup script for startup.sh -COPY startup.sh /etc/service/startup/run -RUN chmod +x /etc/service/startup/run - -# Backup script for mongodb -#COPY mongodb_backup.sh /etc/service/mongodb_backup/run -#RUN chmod +x /etc/service/mongodb_backup/run - - -# Start the postfix daemon during container startup -RUN mkdir -p /etc/my_init.d -COPY postfix.sh /etc/my_init.d/postfix.sh -RUN chmod +x /etc/my_init.d/postfix.sh - -# end of file \ No newline at end of file diff --git a/cron-backup/README.md b/cron-backup/README.md deleted file mode 100644 index f859142..0000000 --- a/cron-backup/README.md +++ /dev/null @@ -1,50 +0,0 @@ -# [cron-backup](./cron-backup) Docker Container Usage - -This instance provides backup support for the `Nginx`, `Node-red` and `Grafana` containers and pushed the backed up data to S3-compatible storage. - -## Shell script - -For backing up the directory data - -- It uses [`grafana_backup.sh`](backup\grafana_backup.sh) for `Grafana` container. -- It uses [`nodered_backup.sh`](backup\nodered_backup.sh) for `Node-red` container. -- It uses [`nginx_backup.sh`](backup\nginx_backup.sh) for `Nginx` container. -- It uses [`mqtts_backup.sh`](backup\mqtts_backup.sh) for `Mqtts` container. - -## Scheduling backup using `Daemon thread` - -The following backup jobs are added to run at specific time. - -``` bash - -# Start up the Process -while true -do - HOUR="$(date +'%H')" - MINUTE="$(date +'%M')" - - if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] - then - /bin/nodered_backup.sh - sleep 60 - fi - if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] - then - /bin/grafana_backup.sh - sleep 60 - fi - if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] - then - /bin/nginx_backup.sh - sleep 60 - fi - if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] - then - /bin/mqtts_backup.sh - sleep 60 - fi -``` - -## Mail Alert - -The above backup shell scripts were configured to send mail for the both successful/unsuccessful run. diff --git a/cron-backup/grafana_backup.sh b/cron-backup/grafana_backup.sh deleted file mode 100755 index f1cb98f..0000000 --- a/cron-backup/grafana_backup.sh +++ /dev/null @@ -1,144 +0,0 @@ -#!/bin/bash -#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. -#Version:v0.1 -#Created Date:2022-08-26 -#Modified Date:12-10-2022 -#Reviewer: Terry Moore. -#Author: Shashi, VishnuNambi. - -a=$(date +%b) -b=Mar -c=Jun -d=Sep -e=Dec -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/grafana - -grafana_src='/grafana' - -if [ ! -d $grafana_src ]; then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana backup" - echo "" - echo "STATUS: Grafana backup failed" - echo "" - echo "The source backup directory: grafana_src is not available" - }>> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/grafana/"${SOURCE_NAME}"_grafana_data_backup_"${DATE1}".tgz ${grafana_src}/ -fi - -# Moving the backup to S3 bucket (Daily backup) -if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/grafana/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" - echo "" - echo "STATUS: Grafana Daily backup succeeded." - echo "" - echo "******* Grafana Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""\/,,g" &>> /tmp/grafana.txt - echo "" - echo "************** END **************************" - } >> /tmp/grafana.txt -else -{ echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Daily backup" - echo "" - echo "STATUS: Grafana Daily backup failed" - echo "" - echo "Something went wrong, please check it" - } >> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" -fi - - -# Moving the backup to S3 bucket (Monthly backup) -if [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" - echo "" - echo "STATUS: Grafana Monthly backup succeeded." - echo "" >> /tmp/grafana.txt - echo "******* Grafana Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/monthly_backup/grafana/\/,,g" &>> /tmp/grafana.txt - echo "" - echo "************** END **************************" - } >> /tmp/grafana.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Monthly backup" - echo "" - echo "STATUS: Grafana Monthly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" -fi -fi - - -# Moving the backup to S3 bucket (Yearly backup) -if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/grafana/ s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" - echo "" - echo "STATUS: Grafana Yearly backup succeeded." - echo "" - echo "******* Grafana Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_GRAFANA}"/yearly_backup/grafana/ --human-readable | grep -i "${SOURCE_NAME}"_grafana_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_GRAFANA}""/yearly_backup/grafana/\/,,g" &>> /tmp/grafana.txt - echo "" - echo "************** END **************************" - } >> /tmp/grafana.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Grafana Yearly backup" - echo "" - echo "STATUS: Grafana Yearly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/grafana.txt - < /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" -fi -fi - - -< /tmp/grafana.txt mail -s "${SOURCE_NAME}: Grafana Data Backup" "${BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/grafana/ -type f -exec rm {} \; -rm /tmp/grafana.txt -###PRUNE### - -# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) -s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/grafana/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done - - -if [ "$(date -d +1day +%d)" -eq 01 ]; then -# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) -s3cmd ls -r s3://"${S3_BUCKET_GRAFANA}"/monthly_backup/grafana/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done -fi diff --git a/cron-backup/mongodb_backup.sh b/cron-backup/mongodb_backup.sh deleted file mode 100644 index f8105ff..0000000 --- a/cron-backup/mongodb_backup.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash -#The Shell script will be used for taking backup and send it to S3 bucket. - -# TO list all Databases in mongodb databases -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/mongodb - -#Full Mongodb backup - -mongodump --host mongodb:27017 --authenticationDatabase admin -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o /var/lib/mongodb-backup/dump - - -showdb(){ -mongo --quiet --host mongodb:27017 --eval "printjson(db.adminCommand('listDatabases'))" -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" | grep -i name | awk -F'"' '{print $4}' -} - - -showdb > /mongo_dbs.txt - -#Backing up the databases listed. -while read -r db -do - echo "Creating backup for $db" - mongodump --host mongodb:27017 --db "$db" --authenticationDatabase admin -u "$MONGO_INITDB_ROOT_USERNAME" -p "$MONGO_INITDB_ROOT_PASSWORD" -o /var/lib/mongodb-backup/ -done < "/mongo_dbs.txt" - -tar czf /var/lib/backup/mongodb/"${SOURCE_NAME}"_mongodb_db_backup_"${DATE1}".tgz /var/lib/mongodb-backup/. && rsync -avr /var/lib/mongodb/ /root/mongodb_data/ && tar czf /var/lib/backup/mongodb/"${SOURCE_NAME}"_mongodb_data_backup_"${DATE1}".tgz /root/mongodb_data/. - -# Moving the backup to S3 bucket -if s3cmd put -r --no-mime-magic /var/lib/backup/mongodb/ s3://"${S3_BUCKET_MONGODB}"/; -then - echo "DATE:" "$DATE" > /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Mongodb backup" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "STATUS: mongodb backup is Successful." >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "******* Mongodb Database Backup ****************" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MONGODB}"/ --human-readable | grep -i "${SOURCE_NAME}"_mongodb_db | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MONGODB}\/,,g" &>> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "************** Mongodb data Backup *************" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MONGODB}"/ --human-readable | grep -i "${SOURCE_NAME}"_mongodb_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/${S3_BUCKET_MONGODB}\/,,g" &>> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "********************** END *********************" >> /tmp/mongodbbackup.txt -else - echo "DATE:" "$DATE" > /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "DESCRIPTION: ${SOURCE_NAME}_Mongodb backup" >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "STATUS: mongodb backup is Failed." >> /tmp/mongodbbackup.txt - echo " " >> /tmp/mongodbbackup.txt - echo "Something went wrong, Please check it" >> /tmp/mongodbbackup.txt - < /tmp/mongodbbackup.txt mail -s "${SOURCE_NAME}: mongodb backup" "${CRON_BACKUP_MAIL}" -fi - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/mongodb/ -type f -exec rm {} \; -find /root/mongodb_data/ -type f -exec rm {} \; -find /var/lib/mongodb-backup/ -type f -exec rm {} \; - -< /tmp/mongodbbackup.txt mail -s "${SOURCE_NAME}: mongodb backup" "${CRON_BACKUP_MAIL}" diff --git a/cron-backup/mqtts_backup.sh b/cron-backup/mqtts_backup.sh deleted file mode 100755 index 8225492..0000000 --- a/cron-backup/mqtts_backup.sh +++ /dev/null @@ -1,145 +0,0 @@ -#!/bin/bash -#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. -#Version:v0.1 -#Created Date:2022-08-26 -#Modified Date:12-10-2022 -#Reviewer: Terry Moore. -#Author: Shashi, VishnuNambi. - -a=$(date +%b) -b=Mar -c=Jun -d=Sep -e=Dec -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - - -mkdir -p /var/lib/backup/mqtts - -mqtts_src='/mqtts' - -if [ ! -d $mqtts_src ]; then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts backup" - echo "" - echo "STATUS: Mqtts backup failed" - echo "" - echo "The source backup directory: mqtts_src is not available" - }>> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/mqtts/"${SOURCE_NAME}"_mqtts_data_backup_"${DATE1}".tgz ${mqtts_src}/ -fi - -# Moving the backup to S3 bucket (Daily backup) -if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/mqtts/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" - echo "" - echo "STATUS: Mqtts Daily backup succeeded." - echo "" - echo "******* Mqtts Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""\/,,g" &>> /tmp/mqtts.txt - echo "" - echo "************** END **************************" - } >> /tmp/mqtts.txt -else -{ echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Daily backup" - echo "" - echo "STATUS: Mqtts Daily backup failed" - echo "" - echo "Something went wrong, please check it" - } >> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" -fi - - -# Moving the backup to S3 bucket (Monthly backup) -if [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" - echo "" - echo "STATUS: Mqtts Monthly backup succeeded." - echo "" >> /tmp/mqtts.txt - echo "******* Mqtts Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/monthly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt - echo "" - echo "************** END **************************" - } >> /tmp/mqtts.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Monthly backup" - echo "" - echo "STATUS: Mqtts Monthly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" -fi -fi - - -# Moving the backup to S3 bucket (Yearly backup) -if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/mqtts/ s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" - echo "" - echo "STATUS: Mqtts Yearly backup succeeded." - echo "" - echo "******* Mqtts Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_MQTTS}"/yearly_backup/mqtts/ --human-readable | grep -i "${SOURCE_NAME}"_mqtts_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_MQTTS}""/yearly_backup/mqtts/\/,,g" &>> /tmp/mqtts.txt - echo "" - echo "************** END **************************" - } >> /tmp/mqtts.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Mqtts Yearly backup" - echo "" - echo "STATUS: Mqtts Yearly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/mqtts.txt - < /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" -fi -fi - - -< /tmp/mqtts.txt mail -s "${SOURCE_NAME}: Mqtts Data Backup" "${BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/mqtts/ -type f -exec rm {} \; -rm /tmp/mqtts.txt -###PRUNE### - -# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) -s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/mqtts/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done - - -if [ "$(date -d +1day +%d)" -eq 01 ]; then -# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) -s3cmd ls -r s3://"${S3_BUCKET_MQTTS}"/monthly_backup/mqtts/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done -fi \ No newline at end of file diff --git a/cron-backup/nginx_backup.sh b/cron-backup/nginx_backup.sh deleted file mode 100755 index 44beb0b..0000000 --- a/cron-backup/nginx_backup.sh +++ /dev/null @@ -1,144 +0,0 @@ -#!/bin/bash -#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. -#Version:v0.1 -#Created Date:2022-08-26 -#Modified Date:12-10-2022 -#Reviewer: Terry Moore. -#Author: Shashi, VishnuNambi. - -a=$(date +%b) -b=Mar -c=Jun -d=Sep -e=Dec -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/nginx - -nginx_src='/nginx' - -if [ ! -d $nginx_src ]; then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx backup" - echo "" - echo "STATUS: Nginx backup failed" - echo "" - echo "The source backup directory: nginx_src is not available" - }>> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/nginx/"${SOURCE_NAME}"_nginx_data_backup_"${DATE1}".tgz ${nginx_src}/ -fi - -# Moving the backup to S3 bucket (Daily backup) -if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/nginx/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" - echo "" - echo "STATUS: Nginx Daily backup succeeded." - echo "" - echo "******* Nginx Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""\/,,g" &>> /tmp/nginx.txt - echo "" - echo "************** END **************************" - } >> /tmp/nginx.txt -else -{ echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Daily backup" - echo "" - echo "STATUS: Nginx Daily backup failed" - echo "" - echo "Something went wrong, please check it" - } >> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" -fi - - -# Moving the backup to S3 bucket (Monthly backup) -if [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" - echo "" - echo "STATUS: Nginx Monthly backup succeeded." - echo "" >> /tmp/nginx.txt - echo "******* Nginx Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/monthly_backup/nginx/\/,,g" &>> /tmp/nginx.txt - echo "" - echo "************** END **************************" - } >> /tmp/nginx.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Monthly backup" - echo "" - echo "STATUS: Nginx Monthly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" -fi -fi - - -# Moving the backup to S3 bucket (Yearly backup) -if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/nginx/ s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" - echo "" - echo "STATUS: Nginx Yearly backup succeeded." - echo "" - echo "******* Nginx Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NGINX}"/yearly_backup/nginx/ --human-readable | grep -i "${SOURCE_NAME}"_nginx_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NGINX}""/yearly_backup/nginx/\/,,g" &>> /tmp/nginx.txt - echo "" - echo "************** END **************************" - } >> /tmp/nginx.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nginx Yearly backup" - echo "" - echo "STATUS: Nginx Yearly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/nginx.txt - < /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" -fi -fi - - -< /tmp/nginx.txt mail -s "${SOURCE_NAME}: Nginx Data Backup" "${BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/nginx/ -type f -exec rm {} \; -rm /tmp/nginx.txt -###PRUNE### - -# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) -s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/nginx/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done - - -if [ "$(date -d +1day +%d)" -eq 01 ]; then -# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) -s3cmd ls -r s3://"${S3_BUCKET_NGINX}"/monthly_backup/nginx/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done -fi \ No newline at end of file diff --git a/cron-backup/nodered_backup.sh b/cron-backup/nodered_backup.sh deleted file mode 100755 index fa48477..0000000 --- a/cron-backup/nodered_backup.sh +++ /dev/null @@ -1,145 +0,0 @@ -#!/bin/bash -#Purpose: The Shell script will be used for taking backup and send it to S3 bucket and Prune Old Data in S3 Bucket. -#Version:v0.1 -#Created Date:2022-08-26 -#Modified Date:12-10-2022 -#Reviewer: Terry Moore. -#Author: Shashi, VishnuNambi. - -a=$(date +%b) -b=Mar -c=Jun -d=Sep -e=Dec -DATE1=$(date +%Y%m%d%H%M) -DATE=$(date +%d-%m-%y_%H-%M) - -mkdir -p /var/lib/backup/nodered - -nodered_src='/nodered' - -if [ ! -d $nodered_src ]; then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered backup" - echo "" - echo "STATUS: Nodered backup failed" - echo "" - echo "The source backup directory: nodered_src is not available" - }>> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" - exit -else - tar cvzf /var/lib/backup/nodered/"${SOURCE_NAME}"_nodered_data_backup_"${DATE1}".tgz ${nodered_src}/ -fi - -# Moving the backup to S3 bucket (Daily backup) -if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/nodered/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" - echo "" - echo "STATUS: Nodered Daily backup succeeded." - echo "" - echo "******* Nodered Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""\/,,g" &>> /tmp/nodered.txt - echo "" - echo "************** END **************************" - } >> /tmp/nodered.txt -else -{ echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Daily backup" - echo "" - echo "STATUS: Nodered Daily backup failed" - echo "" - echo "Something went wrong, please check it" - } >> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" -fi - - -# Moving the backup to S3 bucket (Monthly backup) -if [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" - echo "" - echo "STATUS: Nodered Monthly backup succeeded." - echo "" >> /tmp/nodered.txt - echo "******* Nodered Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/monthly_backup/nodered/\/,,g" &>> /tmp/nodered.txt - echo "" - echo "************** END **************************" - } >> /tmp/nodered.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Monthly backup" - echo "" - echo "STATUS: Nodered Monthly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" -fi -fi - - -# Moving the backup to S3 bucket (Yearly backup) -if [ "$a" == "$b" ] || [ "$a" == "$c" ] || [ "$a" == "$d" ] || [ "$a" == "$e" ] && [ "$(date -d +1day +%d)" -eq 01 ]; then -if s3cmd put -r --no-mime-magic /var/lib/backup/nodered/ s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/; -then - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" - echo "" - echo "STATUS: Nodered Yearly backup succeeded." - echo "" - echo "******* Nodered Data Backup ****************" - echo "" - s3cmd ls --no-mime-magic s3://"${S3_BUCKET_NODERED}"/yearly_backup/nodered/ --human-readable | grep -i "${SOURCE_NAME}"_nodered_data | cut -d' ' -f3- | tac | head -10 | sed "s,s3:\/\/""${S3_BUCKET_NODERED}""/yearly_backup/nodered/\/,,g" &>> /tmp/nodered.txt - echo "" - echo "************** END **************************" - } >> /tmp/nodered.txt -else - { - echo "DATE:" "$DATE" - echo "" - echo "DESCRIPTION: ${SOURCE_NAME}_Nodered Yearly backup" - echo "" - echo "STATUS: Nodered Yearly backup failed" - echo "" - echo "Something went wrong, please check it" - }>> /tmp/nodered.txt - < /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" -fi -fi - - -< /tmp/nodered.txt mail -s "${SOURCE_NAME}: Nodered Data Backup" "${BACKUP_MAIL}" - -# Remove the old backup data in local directory to avoid excessive storage use -find /var/lib/backup/nodered/ -type f -exec rm {} \; -rm /tmp/nodered.txt - -###PRUNE### - -# prune the old backup data in S3 bucket to avoid excessive storage use(Daily backup) -s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/nodered/ | awk -v DEL="$(date +%F -d "31 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done - - -if [ "$(date -d +1day +%d)" -eq 01 ]; then -# prune the old backup data in S3 bucket to avoid excessive storage use(Monthly backup) -s3cmd ls -r s3://"${S3_BUCKET_NODERED}"/monthly_backup/nodered/ | awk -v DEL="$(date +%F -d "366 days ago")" '$1 < DEL {print $4}' | while read -r file; do s3cmd rm "$file"; done -fi diff --git a/cron-backup/postfix.sh b/cron-backup/postfix.sh deleted file mode 100755 index c14d96e..0000000 --- a/cron-backup/postfix.sh +++ /dev/null @@ -1,2 +0,0 @@ -#!/bin/sh -/etc/init.d/postfix restart diff --git a/cron-backup/startup.sh b/cron-backup/startup.sh deleted file mode 100644 index 4cf2cdd..0000000 --- a/cron-backup/startup.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -while true -do - HOUR="$(date +'%H')" - MINUTE="$(date +'%M')" - - if [ "$HOUR" = "06" ] && [ "$MINUTE" = "35" ] - then - /bin/nodered_backup.sh - sleep 60 - fi - if [ "$HOUR" = "07" ] && [ "$MINUTE" = "35" ] - then - /bin/grafana_backup.sh - sleep 60 - fi - if [ "$HOUR" = "08" ] && [ "$MINUTE" = "35" ] - then - /bin/nginx_backup.sh - sleep 60 - fi - if [ "$HOUR" = "09" ] && [ "$MINUTE" = "35" ] - then - /bin/mqtts_backup.sh - sleep 60 - fi -done \ No newline at end of file From 6b2879c0a22038a4fae1cad00db8fb6736b8ceb3 Mon Sep 17 00:00:00 2001 From: shashidharanA Date: Thu, 15 Dec 2022 18:28:16 +0530 Subject: [PATCH 5/7] Updated New Server Versions --- backup/Dockerfile | 2 +- docker-compose.yml | 2 +- node-red/Dockerfile | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backup/Dockerfile b/backup/Dockerfile index 9f63f81..1cc5c2e 100644 --- a/backup/Dockerfile +++ b/backup/Dockerfile @@ -7,7 +7,7 @@ # 5. mongodb # To find the version of installed Mongodb service -FROM mongo:latest AS mongodb +FROM mongo:5.0.11 AS mongodb RUN env | grep MON > /root/env diff --git a/docker-compose.yml b/docker-compose.yml index 910e8ae..647e0bd 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -406,7 +406,7 @@ services: mongodb: restart: unless-stopped - image: mongo:6.0 + image: mongo:5.0.11 environment: MONGO_INITDB_ROOT_USERNAME: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_USERNAME:-}" MONGO_INITDB_ROOT_PASSWORD: "${IOT_DASHBOARD_MONGO_INITDB_ROOT_PASSWORD:-}" diff --git a/node-red/Dockerfile b/node-red/Dockerfile index a50c865..c546aa2 100644 --- a/node-red/Dockerfile +++ b/node-red/Dockerfile @@ -5,7 +5,7 @@ # build the node red image using the offical node red distribution # passing arguments to build specific image ARG node_red_version -FROM nodered/node-red:${node_red_version} +FROM nodered/node-red:2.2.3 # To avoid SSL certification issue ENV NODE_TLS_REJECT_UNAUTHORIZED=0 From cdc502e872dcbce012b91ea024056cdf21c524c3 Mon Sep 17 00:00:00 2001 From: Shashidharan Anbazhagan Date: Wed, 10 Jan 2024 19:37:24 +0530 Subject: [PATCH 6/7] Updated the baseimage for all containers and changed the influxdb key link --- apiserver/Dockerfile | 2 +- backup/Dockerfile | 19 ++++++++++--------- docker-compose.yml | 1 + expo/Dockerfile | 2 +- influxdb/Dockerfile | 6 +++--- mqtts/Dockerfile | 3 +-- nginx/Dockerfile | 2 +- node-red/Dockerfile | 2 +- postfix/Dockerfile | 2 +- 9 files changed, 20 insertions(+), 19 deletions(-) diff --git a/apiserver/Dockerfile b/apiserver/Dockerfile index 0c6c98f..9f6d64d 100644 --- a/apiserver/Dockerfile +++ b/apiserver/Dockerfile @@ -4,7 +4,7 @@ # Build the APISERVER using phusion base image -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Enabling SSH service RUN rm -f /etc/service/sshd/down diff --git a/backup/Dockerfile b/backup/Dockerfile index 1cc5c2e..817c0ff 100644 --- a/backup/Dockerfile +++ b/backup/Dockerfile @@ -12,19 +12,20 @@ RUN env | grep MON > /root/env # Building cron-backup instance -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Copying mongodb's version COPY --from=mongodb /root/env /root/env -# Installing same Mongodb's tools as in the copied version here in the cron-backup instance RUN set -x \ && export $(xargs < /root/env) \ - && echo "deb http://$MONGO_REPO/apt/ubuntu focal/${MONGO_PACKAGE%-unstable}/$MONGO_MAJOR multiverse" | tee "/etc/apt/sources.list.d/${MONGO_PACKAGE%-unstable}.list" \ - && apt-key adv --keyserver keyserver.ubuntu.com --recv-keys B00A0BD1E2C63C11 \ - && export DEBIAN_FRONTEND=noninteractive && apt-get update && ln -s /bin/true /usr/local/bin/systemctl && apt-get install -y \ - ${MONGO_PACKAGE}=$MONGO_VERSION \ - ${MONGO_PACKAGE}-tools=$MONGO_VERSION - + && echo "deb http://security.ubuntu.com/ubuntu focal-security main" | tee /etc/apt/sources.list.d/focal-security.list \ + && apt-get install -y gpg curl \ + && curl -fsSL https://pgp.mongodb.com/server-7.0.asc | \ + gpg -o /usr/share/keyrings/mongodb-server-7.0.gpg \ + --dearmor\ + && echo "deb [ arch=amd64,arm64 signed-by=/usr/share/keyrings/mongodb-server-7.0.gpg ] https://repo.mongodb.org/apt/ubuntu jammy/mongodb-org/7.0 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-7.0.list\ + && apt-get update \ + && apt-get install -y mongodb-org mongodb-org-database mongodb-org-server mongodb-org-shell mongodb-org-mongos mongodb-org-tools # some basic package installation for troubleshooting RUN apt-get update && apt-get install -y \ @@ -94,4 +95,4 @@ RUN mkdir -p /etc/my_init.d COPY postfix.sh /etc/my_init.d/postfix.sh RUN chmod +x /etc/my_init.d/postfix.sh -# end of file \ No newline at end of file +# end of file diff --git a/docker-compose.yml b/docker-compose.yml index 647e0bd..d3d27d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -254,6 +254,7 @@ services: - "${IOT_DASHBOARD_DATA}node-red:/data" environment: TZ: "${IOT_DASHBOARD_TIMEZONE:-GMT}" + NODE_RED_ENABLE_PROJECTS: "true" # nodered opens ports on influxdb and postfix so it needs to be able to talk to it. links: - influxdb diff --git a/expo/Dockerfile b/expo/Dockerfile index d5da83d..f37691c 100644 --- a/expo/Dockerfile +++ b/expo/Dockerfile @@ -4,7 +4,7 @@ # Build the EXPO using phusion base image -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Enabling SSH service RUN rm -f /etc/service/sshd/down diff --git a/influxdb/Dockerfile b/influxdb/Dockerfile index 4e768f2..26bfe63 100644 --- a/influxdb/Dockerfile +++ b/influxdb/Dockerfile @@ -2,7 +2,7 @@ # Dockerfile for building the influxdb instance with S3-backup and Mail alert setup # -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Default InfluxDB host ENV INFLUX_HOST=influxdb @@ -13,7 +13,7 @@ ARG distrib_id ARG distrib_codename RUN echo "${distrib_id}" -RUN wget -qO- https://repos.influxdata.com/influxdb.key | apt-key add - +RUN wget -qO- https://repos.influxdata.com/influxdata-archive_compat.key | apt-key add - RUN /bin/bash -c "source /etc/lsb-release" RUN echo "deb https://repos.influxdata.com/${distrib_id} ${distrib_codename} stable" | tee /etc/apt/sources.list.d/influxdb.list @@ -75,4 +75,4 @@ RUN chmod +x /etc/service/influx/run COPY influxstart.sh /etc/service/influxstart/run RUN chmod +x /etc/service/influxstart/run -# end of file \ No newline at end of file +# end of file diff --git a/mqtts/Dockerfile b/mqtts/Dockerfile index 7d1544c..eee617e 100644 --- a/mqtts/Dockerfile +++ b/mqtts/Dockerfile @@ -3,8 +3,7 @@ # # Build the MQTTS using phusion base image -FROM phusion/baseimage:master-amd64 - +FROM phusion/baseimage:jammy-1.0.1 # Installing mosquitto packages and certbot RUN apt-add-repository ppa:mosquitto-dev/mosquitto-ppa RUN apt-get update && apt-get install -y \ diff --git a/nginx/Dockerfile b/nginx/Dockerfile index 8bcb60c..8eeff99 100644 --- a/nginx/Dockerfile +++ b/nginx/Dockerfile @@ -3,8 +3,8 @@ # # Start from Phusion. -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # Installing the required packages RUN apt-get update && apt-get install -y \ software-properties-common \ diff --git a/node-red/Dockerfile b/node-red/Dockerfile index c546aa2..e985b67 100644 --- a/node-red/Dockerfile +++ b/node-red/Dockerfile @@ -35,4 +35,4 @@ COPY settings.js /usr/src/node-red/.node-red/ # change the startup command to be sure to use our settings. CMD ["npm", "start", "--", "--userDir", "/data", "--settings", "/usr/src/node-red/.node-red/settings.js"] -# end of file \ No newline at end of file +# end of file diff --git a/postfix/Dockerfile b/postfix/Dockerfile index d90a589..34abb9b 100644 --- a/postfix/Dockerfile +++ b/postfix/Dockerfile @@ -2,8 +2,8 @@ # Dockerfile for building POSTFIX # # Build the Postfix using phusion base image -FROM phusion/baseimage:master-amd64 +FROM phusion/baseimage:jammy-1.0.1 # some basic package installation for troubleshooting RUN apt-get update && apt-get install -y \ iputils-ping \ From a2707608489c2fec677b03bc8c3199d1bf0a5294 Mon Sep 17 00:00:00 2001 From: Shashidharan Anbazhagan Date: Fri, 12 Jan 2024 12:11:46 +0530 Subject: [PATCH 7/7] made changes in the setup.md for mqtts part --- SETUP.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/SETUP.md b/SETUP.md index 21ded55..e34312e 100644 --- a/SETUP.md +++ b/SETUP.md @@ -602,6 +602,8 @@ To access mqtt channel, user needs credentials to access it. # mosquitto_passwd -c /etc/mosquitto/credentials/passwd Password: Reenter password: + + # chmod 644 /etc/mosquitto/credentials/passwd ``` 3. Close the connection to mqtts (Ctrl+D).