linux script timed log file compression

1. traversing all logs directory in the current directory folder, two days before the log by a log compression name alone (5 archive file 5, the compression rate is low.)
#! / Bin / SH
baseFolder = "/ opt / Test ";
# log lookup days-1 days before
days =. 3;
CD $ {baseFolder};
for the dir in $ ($ baseFolder LS)
do
     echo" list file: the dir $ {} "
     IF [-d $ {the dir }]; the then
          echo "whether there is a need to traverse the compressed log file ......"
          iF [-d $ {} the dir / logs]; the then
               echo `pwd`" target path: $ {} the dir / logs ";
               for in $ log ($ {Find the dir} / {$ + logs -mtime Days} -name "* .log")
               do
                    the tar -zcvf $ {log log {}} $ .tar.gz;
                    IF [-eq $ 0? ]; the then
                              echo "log file compression successfully deleted the original log files ......!"
                              -f $ {log} RM;
                              IF [$ -eq 0?]; the then
                                        "! log files are deleted successfully" echo
                              the else
                                        "! log file deletion failed" echo
                              fi
                    the else
                              echo "log file compression failure!"
                    fi
               DONE
          fi
     the else
          echo "There is no need to compress the log file ......"
     fi
DONE

2. traverse the entire folder logs directory under the specified directory, log two days before all compressed into a single compressed file (multiple log files are compressed into a single file, to enhance the compression ratio)
#! / Bin / SH
# log search path
baseFolder = "/ opt / Test";
log_time = `DATE" +% the Y% m% D% H% m% S "`;
# find the log days-1 days before
days =. 3;
CD $ {baseFolder};
for $ in the dir (LS $ baseFolder)
do
     echo "files: the dir $ {}"
     iF [the dir -d $ {}]; the then
          echo "whether there is a need to traverse the compressed log file ......"
          iF [-d the dir} {$ / logs]; the then
               echo `pwd`" target path: $ {} the dir / logs ";
               n-$ =` {Find the dir} / {$ + logs -mtime Days} -name "* .log" | -l` WC;
               echo "pending log files: n-$ {}";
               IF [$ {} n--gt 0]; the then
                    echo "compressed log file ......";
                    {} $ -zcvf the dir the tar / logs / $ {} $ {log_time the dir .tar.gz} {$ `Find the dir} / {$ + logs -mtime Days} -name" * .log "`;
                    IF [$? 0 -eq]; the then
                         echo "log file compression successfully deleted the original log files ......!"
                         for $ log in (the Find $ {dir} / logs -mtime + $ {Days} -name "* .log" )
                         do
                              RM -f $ {log};
                              IF [$ -eq 0?]; the then
                                   "! log files are deleted successfully" echo
                              the else
                                    "! log file deletion failed" echo
                              fi
                         DONE
                    the else
                          echo "log file compression failure!"
                    fi
               the else
                    echo "No logs need to deal with!"
               fi
          the else
             "no logs folder in the current directory!" echo
          fi
     the else
          echo "non-paper catalog ......"
     fi
DONE

3. Add crontab Cron Job Management

crontab -e
0 16 28 * * /bin/sh /opt/test/backlog.sh >/dev/null 2>&1

Each month at 16:00 on the 28th execution of the script.

Guess you like

Origin www.cnblogs.com/joker666/p/12061140.html