#!/bin/bash
#备份目录的最大容量(M)
alarmrate=500
#每个日志大小限制(M)
file_max_size=5
#原日志目录
log_ram_dir=/home/log_dir
#备份日志目录
working_dir=/home/log_mnt
#程序运行周期(S)
SLEEPTIME=5
#Append YEAR.MONTH.DAY and timestamp to log file
filenameConvert() #给日志文件名打时间戳
{
timestamp=$(date +%Y%m%d%H%M%S)
timestamp=`echo $timestamp`
RETVAL=$1.$timestamp
}
#search dir to fetch the oldest log
searchdir() #寻找文件夹下最旧的日志文件
{
ldestlog=`ls -rt | head -n1 | awk '{print $1}'`
}
#this function clean old logs under working dir if it reaches it`s size limitation,say 500M
clear_old_log_under_working_dir() #删除工作目录下的日志
{
cd $working_dir
while true;
do
logsize=`du -ms $working_dir |awk '{print $1}'`
if [ $logsize -gt $alarmrate ];
then
searchdir
rm -rf $oldestlog
else
break;
fi
done
}
#this is the main process of our log backup activity.
backuplog_process() #主备份程序
{
cd $log_ram_dir
for i in * ;do
file_size=`du -m $i | awk '{print $1}'`
case $i in
access.log |error.log | apcupsd.events | evms-engine.log | messages | soho.log | kern.log | lpr.log | mail.err | mail.info | mail.log | mail.warn | news | resycd.log | user.log | dmesg | dmesg.0 | dmesg.new)
if [ ! -d $working_dir ] ;then
mkdir -p $working_dir
fi
if [ $file_size -gt $file_max_size ]
then
filenameConvert $i
cp $log_ram_dir/$i $working_dir/$RETVAL
echo "" > $log_ram_dir/$i
clear_old_log_under_working_dir
fi
;;
*)
echo "">$log_ram_dir/$i #其他格式文件直接删除
esac
done
}
while true ; do
backuplog_process
sleep $SLEEPTIME
done