#!/bin/sh # Provide some data from nginx log files. # # List the number of entries in each log file. # List the number of requests for robots.txt. # List the number of blocked IP addresses for the log files. # nginx keeps 52 log files (1 year's weekly logs). # # Log files are rotated weekly, but not at a fixed time -- Sat/Sun/Mon. # So they will cover 6-8 days. Week-to-week comparisons will not be exact. # There's also variable down time when the network connection is lost. # # Send the output through /home/bchivers/commands/sed.html.ttbl # # Brent Chivers 2020/Dec/27 LOGDIR=/var/log/nginx LOGNAME=access echo " Log Reqs Robots Mail Bad IPs" for FILE in `ls -tr $LOGDIR | grep $LOGNAME` do case $FILE in *.gz) REQS=`zcat $LOGDIR/$FILE | wc -l` ROBOTS=`zcat $LOGDIR/$FILE | grep robots.txt | wc -l` FEEDBACKS=`zcat $LOGDIR/$FILE | grep /cgi-bin/mailer.bin | wc -l` ;; *) REQS=`wc -l < $LOGDIR/$FILE` # redirect to avoid printing filename ROBOTS=`grep robots.txt $LOGDIR/$FILE | wc -l` FEEDBACKS=`grep /cgi-bin/mailer.bin $LOGDIR/$FILE | wc -l` ;; esac DATE=`ls -tr --full-time $LOGDIR/$FILE | awk '{print $6}'` case $FILE in $LOGNAME.log) BLOCKFILES=`ls $LOGDIR/deny* | fgrep -v '.'` # BLOCKS="-" # if logs have just rotated, don't use previous count ;; *) BLOCKFILES=`ls $LOGDIR/deny* | grep $DATE` ;; esac if [ -z "$BLOCKFILES" ] ; then BLOCKS="-" else # BLOCKS=`sort $LOGDIR/deny*.$DATE | sort | uniq | wc -l` BLOCKS=`sort $BLOCKFILES | sort | uniq | wc -l` fi echo "$DATE $REQS $ROBOTS $FEEDBACKS $BLOCKS" done echo " Log Reqs Robots Mail Bad IPs"