find files between dates

find files between two specified dates

find / -type f -newermt 2017-07-01 ! -newermt 2017-08-30 -ls

find files created between begin of current month and the current day

find / -type f -newermt $( date +%Y-%m-01 ) ! -newermt $( date +%Y-%m-%d ) -ls

find .php files modified on the last 30 days

find / -type f -regex .*php$ -mtime -30 -exec ls -lt {} +

find files between 1 month ago and 3 days ago that ends with numbers in name

find / -type f -regex .*[0-9]$ -newermt $( date -d '-1 month' +%Y-%m-%d ) ! -newermt $( date -d '-3 day' +%Y-%m-%d ) -exec ls -la {} +

compress files created between two specified dates

find / -type f -newermt $( date -d '-1 month' +%Y-%m-%d ) ! -newermt $( date -d '-3 day' +%Y-%m-%d ) -exec gzip -f {} +
Advertisements

filter last occurrence on file

filter last occurrence containing the DHCP string

tac /var/log/syslog | grep -m 1 'DHCP'

filter 3 lines between the last occurrence containing the err or warn string

tac /var/log/syslog | egrep -m 1 -C 3 'err|warn'

amount of hits the same url per ip in apache

cut ip and url fields and show only results of hits greater than 10 to the same url

awk '{print $1,$7}' /var/log/httpd/access.log | sort | uniq -c | sort -n | awk '$1 >= 10'

cut ip and url fields and show only results of current day than not starts with 192.168 range with hits greater than 10 to the same url

grep $(date +%d/%b/%Y) /var/log/httpd/access.log | awk '$1 !~ "^192.168" {print $1,$7}' | sort | uniq -c | sort -n | awk '$1 >= 10'

analyzes apache log and blocking url access 3 times in X minutes

block multiple URLs

#!/bin/bash
# analyzes apache log and blocking ip than access url 3 times in 2 minutes
# block only access to url not to all urls

# you can use | to separate multiple strings to block
URL_STRING_TO_BLOCK="insertPartOfUrl1Here|insertPartOfUrl2Here"

RANGE_TIME=$( date '+%H:%M' )

# make array with 2 past minutes using | separator. used as a separator field in the egrep regex 
for i in {1..2};do
  RANGE_TIME=${RANGE_TIME}\|$(date -d "-$i min" '+%H:%M:')
done

# list of ips with hits to the same url
IPs_LIST=$( egrep "$(date '+%d\/%b\/%Y:')($RANGE_TIME).*($URL_STRING_TO_BLOCK)" /var/log/httpd/access.log )

# group ips by number of hits to the same url
GROUP_BY_IPs=$( echo "$IPs_LIST" | cut -d' ' -f1 | sort | uniq -c )

# create iptables rule to block each ip that access url 3 times in N minutes than not starts with 192.168.0 range
for i in $( echo "$GROUP_BY_IPs" | awk '$1 >= 3 && $2 !~ "^192.168.0" {print $2}' );do
  /sbin/iptables -nvL INPUT | grep " $i .*many" > /dev/null
  if [ $? -ne 0 ]; then
    for j in $( echo $URL_STRING_TO_BLOCK | sed 's/|/\n/g' ); do
      echo $( date '+%d/%m/%Y %R' ) "blocking ip: $i than access part of URL: $j" | tee -a /var/log/$( basename $0 ).log
      /sbin/iptables -A INPUT -s $i -p tcp -m tcp --dport 80 -m string --string "$j" --algo bm --to 65535 -m comment --comment "block IP than access this URL many times" -j DROP
    done
  fi
done

one liner code to blocking url access 3 times in 1 minute. (block only 1 url)

URL='insertUrlHere'; RANGE_TIME=$(date '+%H:%M'); for i in {1..1};do RANGE_TIME=${RANGE_TIME}\|$(date -d "-$i min" '+%H:%M:'); done; egrep "$(date '+%d\/%b\/%Y:')($RANGE_TIME).*$URL" /var/log/httpd/access.log | cut -d' ' -f1 | sort | uniq -c | awk '$1 >= 3 && $2 !~ "^192.168" {print $2}' | xargs -I% iptables -A INPUT -s % -p tcp -m tcp --dport 80 -m string --string "$URL" --algo bm --to 65535 -j DROP