ADD Rate Limiting Build Test REF: #175

This commit is contained in:
Mitchell Krog 2019-06-26 09:09:22 +02:00
parent a52b9b3e2b
commit b9cdff44ec
No known key found for this signature in database
GPG key ID: C243C388553EDE5D
3 changed files with 125 additions and 0 deletions

View file

@ -228,6 +228,58 @@ echo "${bold}${green}Whitelisting Test Complete"
echo "${bold}${green}--------------------------"
printf "\n\n"
# ******************
# TEST RATE LIMITING
# ******************
printf "\n"
echo "${bold}${green}---------------------------"
echo "${bold}${green}Starting Rate Limiting Test"
echo "${bold}${green}---------------------------"
printf "\n\n"
# ***************************
# Make GoogleBot Rate Limited
# ***************************
echo "${bold}${yellow}-----------------------------"
echo "${bold}${yellow}Making GoogleBot ${yellow}RATE LIMITED"
echo "${bold}${yellow}-----------------------------"
printf "\n\n"
sudo cp ${TRAVIS_BUILD_DIR}/.dev-tools/test_units/rate-limiting-user-agents.conf /etc/nginx/bots.d/blacklist-user-agents.conf
sudo cp ${TRAVIS_BUILD_DIR}/.dev-tools/test_units/custom-bad-referrers.conf /etc/nginx/bots.d/custom-bad-referrers.conf
echo "${bold}${green}---------------"
echo "${bold}${green}Reloading Nginx"
echo "${bold}${green}---------------"
printf "\n\n"
sudo nginx -t && sudo nginx -s reload
echo "${bold}${yellow}-----------------------------------------------------------------------"
echo "${bold}${yellow}Sleeping for 30 seconds to allow Nginx to Properly Reload inside Travis"
echo "${bold}${yellow}-----------------------------------------------------------------------"
printf "\n\n"
sleep 30s
IFS=$'\n'
file=${TRAVIS_BUILD_DIR}/.dev-tools/test_units/rate-limit-googlebot.list
lines=$(cat ${file})
for line in ${lines}; do
if
curl http://localhost:9000 -e "http://${line}" 2>&1 | grep -i 'Welcome'; then
echo "${bold}${green}PASSED - ${red}${line} was ${bold}${green}ALLOWED"
else
echo "${bold}${red}FAILED - ${red}${line} was ${bold}${red}NOT ALLOWED"
fi
done
IFS=""
printf "\n"
echo "${bold}${green}---------------------------"
echo "${bold}${green}Rate Limiting Test Complete"
echo "${bold}${green}---------------------------"
printf "\n\n"
# *******************
# RELEASE NEW VERSION
# *******************

View file

@ -0,0 +1,10 @@
GoogleBot
GoogleBot
GoogleBot
GoogleBot
GoogleBot
GoogleBot
GoogleBot
GoogleBot
GoogleBot
GoogleBot

View file

@ -0,0 +1,63 @@
# EDIT THIS FILE AS YOU LIKE TO BLACKLIST OR WHITELIST ANY BAD USER-AGENT STRINGS YOU WANT TO SCAN FOR
# ****************************************************************************************************
### VERSION INFORMATION #
###################################################
### Version: V3.2018.05
### Updated: 2018-08-21
###################################################
### VERSION INFORMATION ##
##############################################################################
# _ __ _ #
# / |/ /__ _(_)__ __ __ #
# / / _ `/ / _ \\ \ / #
# /_/|_/\_, /_/_//_/_\_\ #
# __/___/ __ ___ __ ___ __ __ #
# / _ )___ ____/ / / _ )___ / /_ / _ )/ /__ ____/ /_____ ____ #
# / _ / _ `/ _ / / _ / _ \/ __/ / _ / / _ \/ __/ '_/ -_) __/ #
# /____/\_,_/\_,_/ /____/\___/\__/ /____/_/\___/\__/_/\_\\__/_/ #
# #
##############################################################################
# Add One Entry Per Line - List all the extra bad User-Agents you want to permanently block or whitelist.
# This is for User-Agents that are not included in the main list of the bot blocker
# This file must exist on your system or Nginx will fail a reload due to a missing file
# This allows you finer control of keeping certain bots blocked and automatic updates will
# Never be able to remove this custom list of yours
# Please note this include file loads first before any of the already whitelisted User-Agents
# in the bad bot blocker. By loading first in line it over-rides anything below it so for instance
# if you want to block Baidu, Google or Bing for any reason you add them to this file which loads
# first and takes precedence over anything below it. This now allows even finer control over the
# bad bot blocker. Enjoy !!!
# Even though this file is called blacklist-user-agents, as mentioned it can also be used to whitelist user agents
# By adding them below and setting the 3; to 0; this will permanently whitelist the User-Agent.
# Make sure any words that contain special characters are escaped and include word boundaries as per the Regex examples below.
# Example the User-Agent name "someverybaduseragentname1" is entered as "\bsomeverybaduseragentname1\b"
# Example the User-Agent name "some-very-bad-useragentname2" is entered as "\bsome\-very\-bad\-useragentname1\b"
# the "\b" are word boundaries which prevents partial matching and false positives.
# BY DEFAULT ALL THE EXAMPLES BELOW ARE COMMENTED OUT AND HENCE NOT ENABLED
# ---------------------
# WHITELISTING EXAMPLES
# ---------------------
# "~*\bsomeverygooduseragentname1\b" 0;
# "~*\bsomeverygooduseragentname2\b" 0;
# "~*\bsome\-very\-good\-useragentname2\b" 0;
# ---------------------
# BLACKLISTING EXAMPLES
# ---------------------
# "~*\bsomeverybaduseragentname1\b" 3;
# "~*\bsomeverybaduseragentname2\b" 3;
# "~*\bsome\-very\-bad\-useragentname2\b" 3;
# START MAKE BAD BOTS GOOD ### DO NOT EDIT THIS LINE AT ALL ###
"~*(?:\b)GoogleBot(?:\b|)" 2;
# END MAKE BAD BOTS GOOD ### DO NOT EDIT THIS LINE AT ALL ###