Version 9.x robot and request limiting iRule

Problem this snippet solves:

This iRule limits robots and what they can do. Furthermore, it restricts requests per second and blacklists a client that goes above the limit

Code :

when RULE_INIT {

#Define blacklist timeout
set ::bl_timeout 30
#Define request per minute threshold
set ::req_limit 5
#Expiration for tracking IPs
set ::expiration_time 300
#Sets iRule Runlevel 0-log only 1 - Logging and Blocking
set ::runlevel 1

}

when HTTP_REQUEST {

#Captures User-Agent header to check for known robots
set ua [string tolower [HTTP::header User-Agent]]
log local0. "User Agent: $ua"


    #Checks to see if the connection is a known robot or requests the robot.txt file
if { ([matchclass $ua contains $::RUA]) or ([string tolower [HTTP::uri]] contains "robot.txt") } {
set robot 1
log local0. "Robot Detected"
    } else {
set robot 0
    }

#Defines client_ip variable with the address of the client
set client_ip [IP::client_addr]
log local0. "Client IP: $client_ip"

    
#Robot logic
if { $robot > 0 }{
set bl_check [session lookup uie blacklist_$client_ip]
log local0. "Value of bl_check variable: $bl_check"
set req_uri [string tolower [HTTP::uri]]
log local0. "Request URI: $req_uri"


#Checks to see if IP address is on blacklist
if { $bl_check ne ""}{
log local0.warn "Request Blocked: $client_ipClient on Blacklist[HTTP::request]"
if { $::runlevel > 0 }{
HTTP::respond 403
    }
}


#Checks to see if Robot is allowed and sets restrictions. Default is no access
switch -glob $ua {
"*slurp*" -
"*yahooseeker*" -
"*googlebot*" - 
"*msnbot*" -
"*teoma*" -
"*voyager*" {
if { [matchclass $req_uri starts_with $::robot_block] }{
log local0.warn "Request Blocked: $client_ipRequest Blocked. Robot not following Robot.txt[HTTP::request]"
if { $::runlevel > 0 }{
HTTP::respond 403
}
} else {
pool dave_pool
}
}
default {
log local0.warn "Request Blocked: $client_ipRequest Blocked, Unauthoried Robot[HTTP::request]"
if { $::runlevel > 0 }{
HTTP::respond 403
}
}
}
} 


#Logic for non-robots. Checks to see if blacklisted
set bl_check [session lookup uie blacklist_$client_ip]
log local0. "Non-Robot bl_check: $bl_check"

if { $bl_check ne "" }{
log local0.warn "Request Blocked: $client_ipClient on Blacklist[HTTP::request]"
log local0.warn "Session Record: $bl_check"
if { $::runlevel > 0 }{
HTTP::respond 403
    }
}

set curr_time [clock seconds]
set timekey starttime_$client_ip
set reqkey reqcount_$client_ip
set request_count [session lookup uie $reqkey]
log local0. "Request Count: $request_count"


#If user uses search their request count is reset
if { [HTTP::uri] starts_with "/search" }{
session delete uie $reqkey
}


#Sets up new count for first time connections. If not a new connection, connection count is incremented and the iRule checks to 
#see if over the threshold
if { $request_count eq "" } {
log local0. "Request Count is 0"
set request_count 1
session add uie $reqkey $request_count $::expiration_time
log local0. "Current Time: $curr_time"
log local0. "Timekey Value: $timekey"
log local0. "Reqkey value: $reqkey"
session add uie $timekey [expr {$curr_time - 2}] [expr {$::expiration_time + 2}]
log local0. "Request Count is now: $request_count"
} else {
set start_time [session lookup uie $timekey]
log local0. "Start Time: $start_time"
log local0. "Request Count (beyond first request): $request_count"
incr request_count
session add uie $reqkey $request_count $::expiration_time
set elapsed_time [expr {$curr_time - $start_time}]
log local0. "Elapsed Time: $elapsed_time"

if {$elapsed_time < 60} {
set elapsed_time 60
}

set curr_rate [expr {$request_count / ($elapsed_time/60)}]
log local0. "Current Rate of Request for $client_ip: $curr_rate"

if {$curr_rate > $::req_limit}{
log local0.warn "Request Blocked: $client_ipClient over Threshold. Added to Blacklist[HTTP::request]"

if { $::runlevel > 0 }{
session add uie blacklist_$client_ip $::bl_timeout
HTTP::respond 403
}

}
}
}
Published Mar 18, 2015
Version 1.0
No CommentsBe the first to comment