不管肉鸡,还是代理,CC识别就封杀!

时间:2023-03-08 18:57:18
不管肉鸡,还是代理,CC识别就封杀!

这几天的心得,汇成代码。

PYTHON版,我编的。

#!/usr/bin/env python
# -*- coding: utf-8 -*-

import os,sys,time
import commands,logging

#以下为手工命令时的动作
#time format 17/Oct/2014:10:00:00
#time format Sat Oct 18 12:35:43 2014
#awk '$4>"[17/Oct/2014:14:00:00"&&$4<"[17/Oct/2014:14:02:00"' xxx.log|awk '{a[$7]++}END{for(i in a)print a[i],i}'|sort -n|tail -n 10|column -t | 500
#awk '$4>"[17/Oct/2014:14:00:00"&&$4<"[17/Oct/2014:14:02:00"' xxx.log|grep 'uc_server/images/noavatar_small.gif'|awk '{a[$1]++}END{for(i in a)print a[i],i}'|sort -n|tail -n 10|column -t | 100

#定义预警触发时的CC访问数及IP访问CC网址的次数
urlAlert = 500
ipAlert = 50
print 'urlAler is:',urlAlert, ', ipAlert is:', ipAlert
#定义LOG文件地址
logPath = "xxx.log"

#120秒的处理间隔
endtime = time.time()
strEndTime = time.ctime(endtime).split()
strBeginTime = time.ctime(endtime-120).split()
#规整格式,使PYTHON时间格式对应NGINX的日志时间格式
endTime = "%s/%s/%s:%s"%(strEndTime[2],strEndTime[1],strEndTime[4],strEndTime[3])
beginTime = "%s/%s/%s:%s"%(strBeginTime[2],strBeginTime[1],strBeginTime[4],strBeginTime[3])
#LOGGIN记录功能
logging.basicConfig(level=logging.INFO,
                format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
                datefmt='%a, %d %b %Y %H:%M:%S',
                filename='XXX.log',
                filemode='a')

#通用的命令处理函数

def exec_Cmd(Cmd):
    (status,output) = commands.getstatusoutput(Cmd)
    try:
        if int(status) == 0:
            print Cmd + " is OK!"
            logging.info(Cmd + " ---is OK!")
            return output
        else:
            logging.info(Cmd + " cant't finish...")
            print Cmd + " can't finish...."
            sys.exit(1)
    except:
        logging.info(Cmd + " is Wrong")
        print Cmd + " is Wrong!"
        sys.exit(1)
#获取最可能的CC的URL列表
def CCDef_url():
    global urlAlert, logPath, beginTime, endTime
    #封装BASH SHELL命令,搜索日志为当前时间前两分钟内,也可以用TAIL -F,省时间,AWK语法我有些搞不懂,运维兄弟提供
    urlCmd = "awk '$4>\"[" + beginTime + "\"&&$4<\"[" + endTime + "\"' " + logPath + "|awk '{a[$7]++}END{for(i in a)print a[i],i}'|sort -n|tail -n 10|column -t"
    urlOutput = exec_Cmd(urlCmd)
    urlList = []
    #规范输出格式并获取大于预警的URL地址列表
    for line in (urlOutput.split('\n')):
    urlCountList = line.split()
    if int(urlCountList[0]) > urlAlert:
        urlList.append(urlCountList[1])
        #logging.info("cc's url is:", urlCountList[1])
    return urlList
#获取访问过这些URL的IP地址列表
def CCDef_ip(urlList):
    global ipAlert, logPath, beginTime, endTime
    ipList = []
    for url in urlList:
        #更改一下AWK,获取
        ipCmd = "awk '$4>\"[" + beginTime + "\"&&$4<\"[" + endTime + "\"' " + logPath + "|grep " + url + "|awk '{a[$1]++}END{for(i in a)print a[i],i}'|sort -n|tail -n 10|column -t"
    ipOutput = exec_Cmd(ipCmd)
    #规范输出格式并获取大于预警的IP地址列表
        for line in (ipOutput.split('\n')):
            ipCountList = line.split()
            if int(ipCountList[0]) > ipAlert:
                ipList.append(ipCountList[1])
                #logging.info("cc's url is:", urlCountList[1])
    return ipList
#将最可以疑的攻击IP加入防火墙
def dropIptables(ipList):
    drop_ip_list = []
    #先列出已有的IPTABLES的IP,达到过滤,去重的目的
    iptablesOutput = exec_Cmd('iptables -vnL')
    for line in iptablesOutput.split('\n'):
    line = line.split()
    if not line[0].isdigit():
            continue
    drop_ip_list.append(line[7])

    for ip in ipList:
    if ip in drop_ip_list:
        continue
    dropCmd = "iptables -I INPUT -s " + ip + " -j DROP"
    print dropCmd
    #干之!!!
    exec_Cmd(dropCmd)

def main():
    urlList = CCDef_url()
    ipList = CCDef_ip(urlList)
    print ipList
    dropIptables(ipList)
    pass

if __name__=="__main__":
    main()

BASH SHELL版,运维兄弟写的,真的省好多行。要好好攻一下SHELL脚本了。

#!/bin/bash
#while true
#do
        tail -f XXX.log > url.txt &
        echo "wait 120s"
        sleep 120s
         `ps -ef | grep "tail -f" | grep -v grep | awk '{print $2}' | xargs` > /dev/null

        one=` url.txt | awk '{print $4}'`
        two=` url.txt | awk '{print $4}'`
        #|column -t
        url_max=`|column -t | awk '{if ($1 > 500) print $2}' | xargs`
        echo $url_max >> url.log
        for i in $url_max
        do
            ipaddress=`|column -t | awk '{if ($1 > 100) print $2}'|xargs`
            for ip in $ipaddress
            do
                iptables -L -n | grep "$ip"
                 ]
                then
                    echo $ip >> url.log
                    iptables -I INPUT -s $ip -j DROP
                fi
            done
        done
#done