Merge pull request #1 from teddysun/master

update
This commit is contained in:
ilucaslee 2016-11-29 22:04:03 +08:00 committed by GitHub
commit 7e5f3a8576
4 changed files with 408 additions and 205 deletions

View File

@ -3,8 +3,8 @@
l2tp.sh l2tp.sh
======= =======
* Description: Auto install L2TP VPN for CentOS6+/Debian7+/Ubuntu12+ - Description: Auto install L2TP VPN for CentOS6+/Debian7+/Ubuntu12+
* Intro: https://teddysun.com/448.html - Intro: https://teddysun.com/448.html
```bash ```bash
Usage: l2tp [-l,--list|-a,--add|-d,--del|-m,--mod|-h,--help] Usage: l2tp [-l,--list|-a,--add|-d,--del|-m,--mod|-h,--help]
@ -20,8 +20,8 @@ Usage: l2tp [-l,--list|-a,--add|-d,--del|-m,--mod|-h,--help]
bench.sh bench.sh
======== ========
* Description: Auto test download & I/O speed script - Description: Auto test download & I/O speed script
* Intro: https://teddysun.com/444.html - Intro: https://teddysun.com/444.html
```bash ```bash
Usage: Usage:
@ -36,34 +36,43 @@ Usage:
backup.sh backup.sh
========= =========
* You need to modify the config at first - You must to modify the config before run it
* Backup MySQL/MariaDB all datebases & files and directories - Backup MySQL/MariaDB/Percona datebases, files and directories
* Backups are encrypted with AES256-cbc with SHA1 message-digest - Backup file is encrypted with AES256-cbc with SHA1 message-digest (option)
* Auto transfer backup file to Google Drive (need install `gdrive`) - Auto transfer backup file to Google Drive (need install `gdrive` command) (option)
* Auto delete Google Drive's remote file (option) - Auto transfer backup file to FTP server (option)
- Auto delete Google Drive's or FTP server's remote file (option)
- Intro: https://teddysun.com/469.html
```bash ```bash
Install gdrive step: Install gdrive command step:
For x86_64: For x86_64:
wget -O /usr/bin/gdrive http://dl.teddysun.com/files/gdrive-linux-x64 wget -O /usr/bin/gdrive http://dl.lamp.sh/files/gdrive-linux-x64
chmod +x /usr/bin/gdrive chmod +x /usr/bin/gdrive
For i386: For i386:
wget -O /usr/bin/gdrive http://dl.teddysun.com/files/gdrive-linux-386 wget -O /usr/bin/gdrive http://dl.lamp.sh/files/gdrive-linux-386
chmod +x /usr/bin/gdrive chmod +x /usr/bin/gdrive
``` ```
ftp_upload.sh
=============
- You must to modify the config before run it
- Upload file(s) to FTP server
- Intro: https://teddysun.com/484.html
unixbench.sh unixbench.sh
============ ============
* Description: Auto install unixbench and test script - Description: Auto install unixbench and test script
* Intro: https://teddysun.com/245.html - Intro: https://teddysun.com/245.html
pptp.sh(Deprecated) pptp.sh(Deprecated)
=================== ===================
* Description: Auto Install PPTP for CentOS 6 - Description: Auto Install PPTP for CentOS 6
* Intro: https://teddysun.com/134.html - Intro: https://teddysun.com/134.html
Copyright (C) 2013-2016 Teddysun <i@teddysun.com> Copyright (C) 2013-2016 Teddysun <i@teddysun.com>

245
backup.sh
View File

@ -1,18 +1,27 @@
#!/usr/bin/env bash #!/usr/bin/env bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin #
#==============================================================# # Auto backup script
# Description: backup script # #
# Author: Teddysun <i@teddysun.com> # # Copyright (C) 2016 Teddysun <i@teddysun.com>
# Visit: https://teddysun.com # #
#==============================================================# # URL: https://teddysun.com/469.html
#
# You must to modify the config before run it!!!
# Backup MySQL/MariaDB/Percona datebases, files and directories
# Backup file is encrypted with AES256-cbc with SHA1 message-digest (option)
# Auto transfer backup file to Google Drive (need install gdrive command) (option)
# Auto transfer backup file to FTP server (option)
# Auto delete Google Drive's or FTP server's remote file (option)
#
[[ $EUID -ne 0 ]] && echo 'Error: This script must be run as root!' && exit 1 [[ $EUID -ne 0 ]] && echo "Error: This script must be run as root!" && exit 1
########## START OF CONFIG ##########
### START OF CONFIG ###
# Encrypt flag (true: encrypt, false: not encrypt) # Encrypt flag (true: encrypt, false: not encrypt)
ENCRYPTFLG=true ENCRYPTFLG=true
# KEEP THE PASSWORD SAFE. # WARNING: KEEP THE PASSWORD SAFE!!!
# The password used to encrypt the backup # The password used to encrypt the backup
# To decrypt backups made by this script, run the following command: # To decrypt backups made by this script, run the following command:
# openssl enc -aes256 -in [encrypted backup] -out decrypted_backup.tgz -pass pass:[backup password] -d -md sha1 # openssl enc -aes256 -in [encrypted backup] -out decrypted_backup.tgz -pass pass:[backup password] -d -md sha1
@ -27,55 +36,70 @@ TEMPDIR="/root/backups/temp/"
# File to log the outcome of backups # File to log the outcome of backups
LOGFILE="/root/backups/backup.log" LOGFILE="/root/backups/backup.log"
# OPTIONAL: If you want MySQL to be backed up, enter the root password below # OPTIONAL: If you want backup MySQL database, enter the MySQL root password below
MYSQL_ROOT_PASSWORD="" MYSQL_ROOT_PASSWORD=""
# Below is a list of MySQL database name that will be backed up # Below is a list of MySQL database name that will be backed up
# if you want backup all databases, leave it blank. # If you want backup ALL databases, leave it blank.
MYSQL_DATABASE_NAME[0]="" MYSQL_DATABASE_NAME[0]=""
# Below is a list of files and directories that will be backed up in the tar backup # Below is a list of files and directories that will be backed up in the tar backup
# For example: # For example:
# File: /data/www/default/test.tgz # File: /data/www/default/test.tgz
# Directory: /data/www/default/test/ # Directory: /data/www/default/test
# if you want not to be backed up, leave it blank.
BACKUP[0]="" BACKUP[0]=""
# Number of days to store daily local backups # Number of days to store daily local backups (default 7 days)
LOCALAGEDAILIES="7" LOCALAGEDAILIES="7"
# Delete Googole Drive's remote file flag(true:delete, false:not delete) # Delete Googole Drive's & FTP server's remote file flag (true: delete, false: not delete)
DELETE_REMOTE_FILE_FLG=true DELETE_REMOTE_FILE_FLG=false
# Upload to FTP server flag (true: upload, false: not upload)
FTP_FLG=false
# FTP server
# OPTIONAL: If you want upload to FTP server, enter the Hostname or IP address below
FTP_HOST=""
# FTP username
# OPTIONAL: If you want upload to FTP server, enter the FTP username below
FTP_USER=""
# FTP password
# OPTIONAL: If you want upload to FTP server, enter the username's password below
FTP_PASS=""
# FTP server remote folder
# OPTIONAL: If you want upload to FTP server, enter the FTP remote folder below
# For example: public_html
FTP_DIR=""
########## END OF CONFIG ##########
# Date & Time # Date & Time
DAY=$(date +%d)
MONTH=$(date +%m)
YEAR=$(date +%C%y)
BACKUPDATE=$(date +%Y%m%d%H%M%S) BACKUPDATE=$(date +%Y%m%d%H%M%S)
# Backup file name # Backup file name
TARFILE="${LOCALDIR}""$(hostname)"_"${BACKUPDATE}".tgz TARFILE="${LOCALDIR}""$(hostname)"_"${BACKUPDATE}".tgz
# Encrypted backup file name
ENC_TARFILE="${TARFILE}.enc"
# Backup MySQL dump file name # Backup MySQL dump file name
SQLFILE="${TEMPDIR}mysql_${BACKUPDATE}.sql" SQLFILE="${TEMPDIR}mysql_${BACKUPDATE}.sql"
### END OF CONFIG ###
log() { log() {
echo "$(date "+%Y-%m-%d %H:%M:%S")" "$1" echo "$(date "+%Y-%m-%d %H:%M:%S")" "$1"
echo -e "$(date "+%Y-%m-%d %H:%M:%S")" "$1" >> ${LOGFILE} echo -e "$(date "+%Y-%m-%d %H:%M:%S")" "$1" >> ${LOGFILE}
} }
# Check for list of mandatory binaries
### START OF CHECKS ### check_commands() {
# Check if the backup folders exist and are writeable
if [ ! -d "${LOCALDIR}" ]; then
mkdir -p ${LOCALDIR}
fi
if [ ! -d "${TEMPDIR}" ]; then
mkdir -p ${TEMPDIR}
fi
# This section checks for all of the binaries used in the backup # This section checks for all of the binaries used in the backup
BINARIES=( cat cd du date dirname echo openssl mysql mysqldump pwd rm tar ) BINARIES=( cat cd du date dirname echo openssl mysql mysqldump pwd rm tar ftp )
# Iterate over the list of binaries, and if one isn't found, abort # Iterate over the list of binaries, and if one isn't found, abort
for BINARY in "${BINARIES[@]}"; do for BINARY in "${BINARIES[@]}"; do
@ -84,16 +108,28 @@ for BINARY in "${BINARIES[@]}"; do
exit 1 exit 1
fi fi
done done
### END OF CHECKS ###
STARTTIME=$(date +%s) # check gdrive command
cd ${LOCALDIR} || exit GDRIVE_COMMAND=false
log "Backup progress start" if [ "$(command -v "gdrive")" ]; then
GDRIVE_COMMAND=true
fi
}
calculate_size() {
local file_name=$1
local file_size=$(du -h $file_name 2>/dev/null | awk '{print $1}')
if [ "x${file_size}" = "x" ]; then
echo "unknown"
else
echo "${file_size}"
fi
}
### START OF MYSQL BACKUP ### # Backup MySQL databases
mysql_backup() {
if [ -z ${MYSQL_ROOT_PASSWORD} ]; then if [ -z ${MYSQL_ROOT_PASSWORD} ]; then
log "MySQL root password not set, MySQL back up skip" log "MySQL root password not set, MySQL backup skipped"
else else
log "MySQL dump start" log "MySQL dump start"
mysql -u root -p"${MYSQL_ROOT_PASSWORD}" 2>/dev/null <<EOF mysql -u root -p"${MYSQL_ROOT_PASSWORD}" 2>/dev/null <<EOF
@ -128,17 +164,16 @@ EOF
BACKUP=(${BACKUP[*]} ${DBFILE}) BACKUP=(${BACKUP[*]} ${DBFILE})
done done
fi fi
log "MySQL dump completed" log "MySQL dump completed"
fi fi
### END OF MYSQL BACKUP ### }
start_backup() {
[ "${BACKUP[*]}" == "" ] && echo "Error: You must to modify the [$(basename $0)] config before run it!" && exit 1
### START OF TAR BACKUP ###
log "Tar backup file start" log "Tar backup file start"
tar -zcPf ${TARFILE} ${BACKUP[*]} tar -zcPf ${TARFILE} ${BACKUP[*]}
if [ $? -ne 0 ]; then if [ $? -gt 1 ]; then
log "Tar backup file failed" log "Tar backup file failed"
exit 1 exit 1
fi fi
@ -147,7 +182,7 @@ log "Tar backup file completed"
# Encrypt tar file # Encrypt tar file
if ${ENCRYPTFLG}; then if ${ENCRYPTFLG}; then
log "Encrypt backup file start" log "Encrypt backup file start"
openssl enc -aes256 -in "${TARFILE}" -out "${TARFILE}.enc" -pass pass:"${BACKUPPASS}" -md sha1 openssl enc -aes256 -in "${TARFILE}" -out "${ENC_TARFILE}" -pass pass:"${BACKUPPASS}" -md sha1
log "Encrypt backup file completed" log "Encrypt backup file completed"
# Delete unencrypted tar # Delete unencrypted tar
@ -162,15 +197,13 @@ do
rm -f ${sql} rm -f ${sql}
done done
log "Backup progress complete"
if ${ENCRYPTFLG}; then if ${ENCRYPTFLG}; then
BACKUPSIZE=$(du -h ${TARFILE}.enc | cut -f1) OUT_FILE="${ENC_TARFILE}"
log "File name: ${TARFILE}.enc, File size: ${BACKUPSIZE}"
else else
BACKUPSIZE=$(du -h ${TARFILE} | cut -f1) OUT_FILE="${TARFILE}"
log "File name: ${TARFILE}, File size: ${BACKUPSIZE}"
fi fi
log "File name: ${OUT_FILE}, File size: `calculate_size ${OUT_FILE}`"
}
# Transfer backup file to Google Drive # Transfer backup file to Google Drive
# If you want to install gdrive command, please visit website: # If you want to install gdrive command, please visit website:
@ -178,33 +211,51 @@ fi
# of cause, you can use below command to install it # of cause, you can use below command to install it
# For x86_64: wget -O /usr/bin/gdrive http://dl.lamp.sh/files/gdrive-linux-x64; chmod +x /usr/bin/gdrive # For x86_64: wget -O /usr/bin/gdrive http://dl.lamp.sh/files/gdrive-linux-x64; chmod +x /usr/bin/gdrive
# For i386: wget -O /usr/bin/gdrive http://dl.lamp.sh/files/gdrive-linux-386; chmod +x /usr/bin/gdrive # For i386: wget -O /usr/bin/gdrive http://dl.lamp.sh/files/gdrive-linux-386; chmod +x /usr/bin/gdrive
gdrive_upload() {
if [ ! "$(command -v "gdrive")" ]; then if ${GDRIVE_COMMAND}; then
GDRIVE_COMMAND=false
log "gdrive is not installed"
log "File transfer skipped. please install it and try again"
else
GDRIVE_COMMAND=true
log "Tranferring backup file to Google Drive" log "Tranferring backup file to Google Drive"
if ${ENCRYPTFLG}; then gdrive upload --no-progress ${OUT_FILE} >> ${LOGFILE}
gdrive upload --no-progress ${TARFILE}.enc >> ${LOGFILE} if [ $? -ne 0 ]; then
else log "Error: Tranferring backup file to Google Drive failed"
gdrive upload --no-progress ${TARFILE} >> ${LOGFILE} exit 1
fi fi
log "Tranfer completed" log "Tranferring backup file to Google Drive completed"
fi fi
### END OF TAR BACKUP ### }
# Tranferring backup file to FTP server
ftp_upload() {
if ${FTP_FLG}; then
[ -z ${FTP_HOST} ] && log "Error: FTP_HOST can not be empty!" && exit 1
[ -z ${FTP_USER} ] && log "Error: FTP_USER can not be empty!" && exit 1
[ -z ${FTP_PASS} ] && log "Error: FTP_PASS can not be empty!" && exit 1
[ -z ${FTP_DIR} ] && log "Error: FTP_DIR can not be empty!" && exit 1
### START OF DELETE OLD BACKUP FILE### local FTP_OUT_FILE=$(basename ${OUT_FILE})
getFileDate() { log "Tranferring backup file to FTP server"
unset FILEYEAR FILEMONTH FILEDAY FILETIME FILEDAYS FILEAGE ftp -in ${FTP_HOST} 2>&1 >> ${LOGFILE} <<EOF
user $FTP_USER $FTP_PASS
binary
lcd $LOCALDIR
cd $FTP_DIR
put $FTP_OUT_FILE
quit
EOF
log "Tranferring backup file to FTP server completed"
fi
}
# Get file date
get_file_date() {
#Approximate a 30-day month and 365-day year
DAYS=$(( $((10#${YEAR}*365)) + $((10#${MONTH}*30)) + $((10#${DAY})) ))
unset FILEYEAR FILEMONTH FILEDAY FILEDAYS FILEAGE
FILEYEAR=$(echo "$1" | cut -d_ -f2 | cut -c 1-4) FILEYEAR=$(echo "$1" | cut -d_ -f2 | cut -c 1-4)
FILEMONTH=$(echo "$1" | cut -d_ -f2 | cut -c 5-6) FILEMONTH=$(echo "$1" | cut -d_ -f2 | cut -c 5-6)
FILEDAY=$(echo "$1" | cut -d_ -f2 | cut -c 7-8) FILEDAY=$(echo "$1" | cut -d_ -f2 | cut -c 7-8)
FILETIME=$(echo "$1" | cut -d_ -f2 | cut -c 9-14)
if [[ "${FILEYEAR}" && "${FILEMONTH}" && "${FILEDAY}" && "${FILETIME}" ]]; then if [[ "${FILEYEAR}" && "${FILEMONTH}" && "${FILEDAY}" ]]; then
#Approximate a 30-day month and 365-day year #Approximate a 30-day month and 365-day year
FILEDAYS=$(( $((10#${FILEYEAR}*365)) + $((10#${FILEMONTH}*30)) + $((10#${FILEDAY})) )) FILEDAYS=$(( $((10#${FILEYEAR}*365)) + $((10#${FILEMONTH}*30)) + $((10#${FILEDAY})) ))
FILEAGE=$(( 10#${DAYS} - 10#${FILEDAYS} )) FILEAGE=$(( 10#${DAYS} - 10#${FILEDAYS} ))
@ -214,7 +265,8 @@ getFileDate() {
return 1 return 1
} }
deleteRemoteFile() { # Delete Google Drive's old backup file
delete_gdrive_file() {
local FILENAME=$1 local FILENAME=$1
if ${DELETE_REMOTE_FILE_FLG} && ${GDRIVE_COMMAND}; then if ${DELETE_REMOTE_FILE_FLG} && ${GDRIVE_COMMAND}; then
local FILEID=$(gdrive list -q "name = '${FILENAME}'" --no-header | awk '{print $1}') local FILEID=$(gdrive list -q "name = '${FILENAME}'" --no-header | awk '{print $1}')
@ -225,13 +277,22 @@ deleteRemoteFile() {
fi fi
} }
AGEDAILIES=${LOCALAGEDAILIES} # Delete FTP server's old backup file
DAY=$(date +%d) delete_ftp_file() {
MONTH=$(date +%m) local FILENAME=$1
YEAR=$(date +%C%y) if ${DELETE_REMOTE_FILE_FLG} && ${FTP_FLG}; then
#Approximate a 30-day month and 365-day year ftp -in ${FTP_HOST} 2>&1 >> ${LOGFILE} <<EOF
DAYS=$(( $((10#${YEAR}*365)) + $((10#${MONTH}*30)) + $((10#${DAY})) )) user $FTP_USER $FTP_PASS
cd $FTP_DIR
del $FILENAME
quit
EOF
log "FTP server's old backup file name: ${FILENAME} has been deleted"
fi
}
# Clean up old file
clean_up_files() {
cd ${LOCALDIR} || exit cd ${LOCALDIR} || exit
if ${ENCRYPTFLG}; then if ${ENCRYPTFLG}; then
@ -240,19 +301,43 @@ else
LS=($(ls *.tgz)) LS=($(ls *.tgz))
fi fi
for f in ${LS[*]} for f in ${LS[@]}
do do
getFileDate ${f} get_file_date ${f}
if [ $? == 0 ]; then if [ $? == 0 ]; then
if [[ ${FILEAGE} -gt ${AGEDAILIES} ]]; then if [[ ${FILEAGE} -gt ${LOCALAGEDAILIES} ]]; then
rm -f ${f} rm -f ${f}
log "Old backup file name: ${f} has been deleted" log "Old backup file name: ${f} has been deleted"
deleteRemoteFile ${f} delete_gdrive_file ${f}
delete_ftp_file ${f}
fi fi
fi fi
done done
### END OF DELETE OLD BACKUP FILE### }
# Main progress
STARTTIME=$(date +%s)
# Check if the backup folders exist and are writeable
if [ ! -d "${LOCALDIR}" ]; then
mkdir -p ${LOCALDIR}
fi
if [ ! -d "${TEMPDIR}" ]; then
mkdir -p ${TEMPDIR}
fi
log "Backup progress start"
check_commands
mysql_backup
start_backup
log "Backup progress complete"
log "Upload progress start"
gdrive_upload
ftp_upload
log "Upload progress complete"
clean_up_files
ENDTIME=$(date +%s) ENDTIME=$(date +%s)
DURATION=$((ENDTIME - STARTTIME)) DURATION=$((ENDTIME - STARTTIME))

View File

@ -1,10 +1,13 @@
#!/bin/bash #!/usr/bin/env bash
#==============================================================# #
# Description: bench test shell script # # Description: Auto test download & I/O speed script
# Author: Teddysun <i@teddysun.com> # #
# Thanks: LookBack <admin@dwhd.org> # # Copyright (C) 2015 - 2016 Teddysun <i@teddysun.com>
# Visit: https://teddysun.com # #
#==============================================================# # Thanks: LookBack <admin@dwhd.org>
#
# URL: https://teddysun.com/444.html
#
if [ ! -e '/usr/bin/wget' ]; then if [ ! -e '/usr/bin/wget' ]; then
echo "Error: wget command not found. You must be install wget command at first." echo "Error: wget command not found. You must be install wget command at first."
@ -86,12 +89,14 @@ io_test() {
calc_disk() { calc_disk() {
local total_size=0 local total_size=0
local array=$1 local array=$@
for size in ${array[@]} for size in ${array[@]}
do do
size_t=`echo ${size:0:${#size}-1}` [ "${size}" == "0" ] && size_t=0 || size_t=`echo ${size:0:${#size}-1}`
[ "`echo ${size:(-1)}`" == "M" ] && size=$( awk 'BEGIN{print '$size_t' / 1024}' ) || size=${size_t} [ "`echo ${size:(-1)}`" == "M" ] && size=$( awk 'BEGIN{printf "%.1f", '$size_t' / 1024}' )
total_size=$( awk 'BEGIN{print '$total_size' + '$size'}' ) [ "`echo ${size:(-1)}`" == "T" ] && size=$( awk 'BEGIN{printf "%.1f", '$size_t' * 1024}' )
[ "`echo ${size:(-1)}`" == "G" ] && size=${size_t}
total_size=$( awk 'BEGIN{printf "%.1f", '$total_size' + '$size'}' )
done done
echo ${total_size} echo ${total_size}
} }
@ -100,32 +105,29 @@ cname=$( awk -F: '/model name/ {name=$2} END {print name}' /proc/cpuinfo | sed '
cores=$( awk -F: '/model name/ {core++} END {print core}' /proc/cpuinfo ) cores=$( awk -F: '/model name/ {core++} END {print core}' /proc/cpuinfo )
freq=$( awk -F: '/cpu MHz/ {freq=$2} END {print freq}' /proc/cpuinfo | sed 's/^[ \t]*//;s/[ \t]*$//' ) freq=$( awk -F: '/cpu MHz/ {freq=$2} END {print freq}' /proc/cpuinfo | sed 's/^[ \t]*//;s/[ \t]*$//' )
tram=$( free -m | awk '/Mem/ {print $2}' ) tram=$( free -m | awk '/Mem/ {print $2}' )
uram=$( free -m | awk '/Mem/ {print $3}' )
swap=$( free -m | awk '/Swap/ {print $2}' ) swap=$( free -m | awk '/Swap/ {print $2}' )
up=$( awk '{a=$1/86400;b=($1%86400)/3600;c=($1%3600)/60;d=$1%60} {printf("%ddays, %d:%d:%d\n",a,b,c,d)}' /proc/uptime ) uswap=$( free -m | awk '/Swap/ {print $3}' )
up=$( awk '{a=$1/86400;b=($1%86400)/3600;c=($1%3600)/60} {printf("%d days, %d hour %d min\n",a,b,c)}' /proc/uptime )
load=$( w | head -1 | awk -F'load average:' '{print $2}' | sed 's/^[ \t]*//;s/[ \t]*$//' ) load=$( w | head -1 | awk -F'load average:' '{print $2}' | sed 's/^[ \t]*//;s/[ \t]*$//' )
opsy=$( get_opsy ) opsy=$( get_opsy )
arch=$( uname -m ) arch=$( uname -m )
lbit=$( getconf LONG_BIT ) lbit=$( getconf LONG_BIT )
host=$( hostname )
kern=$( uname -r ) kern=$( uname -r )
ipv6=$( wget -qO- -t1 -T2 ipv6.icanhazip.com ) ipv6=$( wget -qO- -t1 -T2 ipv6.icanhazip.com )
disk_size1=($( df -ahPl | grep -wvE '\-|none|Filesystem' | awk '{print $2}' )) disk_size1=($( df -ahPl | grep -wvE '\-|none|tmpfs|by-uuid|Filesystem' | awk '{print $2}' ))
disk_size2=($( df -ahPl | grep -wvE '\-|none|Filesystem' | awk '{print $3}' )) disk_size2=($( df -ahPl | grep -wvE '\-|none|tmpfs|by-uuid|Filesystem' | awk '{print $3}' ))
disk_size3=($( df -ahPl | grep -wvE '\-|none|Filesystem' | awk '{print $4}' )) disk_total_size=$( calc_disk ${disk_size1[@]} )
disk_total_size=$( calc_disk $disk_size1 ) disk_used_size=$( calc_disk ${disk_size2[@]} )
disk_used_size=$( calc_disk $disk_size2 )
disk_avail_size=$( calc_disk $disk_size3 )
clear clear
next next
echo "CPU model : $cname" echo "CPU model : $cname"
echo "Number of cores : $cores" echo "Number of cores : $cores"
echo "CPU frequency : $freq MHz" echo "CPU frequency : $freq MHz"
echo "Total size of Disk : $disk_total_size GB" echo "Total size of Disk : $disk_total_size GB ($disk_used_size GB Used)"
echo "Used size of Disk : $disk_used_size GB" echo "Total amount of Mem : $tram MB ($uram MB Used)"
echo "Avail size of Disk : $disk_avail_size GB" echo "Total amount of Swap : $swap MB ($uswap MB Used)"
echo "Total amount of Mem : $tram MB"
echo "Total amount of Swap : $swap MB"
echo "System uptime : $up" echo "System uptime : $up"
echo "Load average : $load" echo "Load average : $load"
echo "OS : $opsy" echo "OS : $opsy"
@ -145,7 +147,7 @@ ioraw2=$( echo $io2 | awk 'NR==1 {print $1}' )
ioraw3=$( echo $io3 | awk 'NR==1 {print $1}' ) ioraw3=$( echo $io3 | awk 'NR==1 {print $1}' )
[ "`echo $io3 | awk 'NR==1 {print $2}'`" == "GB/s" ] && ioraw3=$( awk 'BEGIN{print '$ioraw3' * 1024}' ) [ "`echo $io3 | awk 'NR==1 {print $2}'`" == "GB/s" ] && ioraw3=$( awk 'BEGIN{print '$ioraw3' * 1024}' )
ioall=$( awk 'BEGIN{print '$ioraw1' + '$ioraw2' + '$ioraw3'}' ) ioall=$( awk 'BEGIN{print '$ioraw1' + '$ioraw2' + '$ioraw3'}' )
ioavg=$( awk 'BEGIN{print '$ioall'/3}' ) ioavg=$( awk 'BEGIN{printf "%.1f", '$ioall' / 3}' )
echo "Average I/O speed : $ioavg MB/s" echo "Average I/O speed : $ioavg MB/s"
next next
echo -e "Node Name\t\t\tIPv4 address\t\tDownload Speed" echo -e "Node Name\t\t\tIPv4 address\t\tDownload Speed"

107
ftp_upload.sh Normal file
View File

@ -0,0 +1,107 @@
#!/usr/bin/env bash
#
# Upload file(s) to FTP server
#
# Copyright (C) 2016 Teddysun <i@teddysun.com>
#
# Argument example:
# 1) ./ftp_upload.sh filename
# 2) ./ftp_upload.sh filename1 filename2 filename3 ...
# 3) ./ftp_upload.sh "*.extension"
# 4) ./ftp_upload.sh "*.extension1" "*.extension2"
#
########## START OF CONFIG ##########
# Local directory (current folder)
LOCALDIR=$( pwd )
# File to log the outcome of backups
LOGFILE="/var/log/ftp_upload.log"
# FTP server
# Enter the Hostname or IP address below
FTP_HOST=""
# FTP username
# Enter the FTP username below
FTP_USER=""
# FTP password
# Enter the username's password below
FTP_PASS=""
# FTP server remote folder
# Enter the FTP remote folder below
# For example: public_html
FTP_DIR=""
########## END OF CONFIG ##########
log() {
echo "$(date "+%Y-%m-%d %H:%M:%S")" "$1"
echo -e "$(date "+%Y-%m-%d %H:%M:%S")" "$1" >> ${LOGFILE}
}
# Check ftp command
check_command() {
if [ ! "$(command -v "ftp")" ]; then
log "ftp command is not installed, please install it and try again"
exit 1
fi
}
# Tranferring backup file to FTP server
ftp_upload() {
cd ${LOCALDIR} || exit
[ -z ${FTP_HOST} ] && log "Error: FTP_HOST can not be empty!" && exit 1
[ -z ${FTP_USER} ] && log "Error: FTP_USER can not be empty!" && exit 1
[ -z ${FTP_PASS} ] && log "Error: FTP_PASS can not be empty!" && exit 1
[ -z ${FTP_DIR} ] && log "Error: FTP_DIR can not be empty!" && exit 1
echo "$@" | grep "*" > /dev/null 2>&1
if [ $? -eq 0 ]; then
ls $@ > /dev/null 2>&1
[ $? -ne 0 ] && log "Error: [$@] file(s) not exists!" && exit 1
else
for f in $@
do
[ ! -f ${f} ] && log "Error: [${f}] not exists!" && exit 1
done
fi
local FTP_OUT_FILE=("$@")
log "Tranferring file(s) list below to FTP server:"
for file in ${FTP_OUT_FILE[@]}
do
log "$file"
done
ftp -in ${FTP_HOST} 2>&1 >> ${LOGFILE} <<EOF
user $FTP_USER $FTP_PASS
binary
lcd $LOCALDIR
cd $FTP_DIR
mput ${FTP_OUT_FILE[@]}
quit
EOF
log "Tranfer to FTP server completed"
}
# Main progress
STARTTIME=$(date +%s)
[ $# -eq 0 ] && log "Error: argument can not be empty!" && exit 1
check_command
ftp_upload "$@"
ENDTIME=$(date +%s)
DURATION=$((ENDTIME - STARTTIME))
log "All done"
log "Transfer completed in ${DURATION} seconds"