Saturday, July 20, 2013

Red V!per Sub Domain Scanner



#!/bin/bash
# WebExploiter Sub Domain Scanner
# Coded By : Red H4t V!per

page=0
how_many=1
URL=$1
domain= 
single_page=
last_page_check=

banner()
{
echo "           _         _                       _                                              "
echo "          | |       | |                     (_)                                             "
echo " ___ _   _| |__   __| | ___  _ __ ___   __ _ _ _ __    ___  ___ __ _ _ __  _ __   ___ _ __  "
echo "/ __| | | | '_ \ / _\ |/ _ \| '_ \ _ \ / _\ | | '_ \  / __|/ __/ _\ | '_ \| '_ \ / _ \ '__| " 
echo "\__ \ |_| | |_) | (_| | (_) | | | | | | (_| | | | | | \__ \ (_| (_| | | | | | | |  __/ |    "
echo "|___/\__,_|_.__/ \__,_|\___/|_| |_| |_|\__,_|_|_| |_| |___/\___\__,_|_| |_|_| |_|\___|_|    "
echo "                                                                                            "

}
Usage()
{
echo ""
echo "# ***************************************************************************​****​*********************#"
echo "# Usage      : WebExploiter Sub Domain Scanner <domin>                                               *#"
echo "# Help       : -h && --help : Show This Menu                                                         *#"
echo "# RunScript  : Give Permision to script and run it !!                                                *#"
echo "# ***************************************************************************​****​*********************#"
echo ""
}

Check_Arguments()
{
if [ -z "$URL" ] || [ "$URL" == "-h" ] || [ "$URL" == "--help" ]; then
Usage;
exit
fi
}

Sub_Domain()
{

rm -f alldomain_bing.txt;
rm -f sub.txt;
rm -f sub2.txt;


if [ `echo "$URL" | egrep  "http://"` ] || [ `echo "$URL" | egrep  "Http://"` ]; 
then
echo -e "\e[1;31m[-] please insert domain with out http:// \e[0m" 
 exit 1
fi

if [ `echo "$URL" | egrep  "www."`  ]; 
then
domain=`echo "$URL" |sed '/www./s///g' `
else 
domain=$URL
fi


echo -e "\e[1;31m[*] SubDomain Urls Will be Saved  at subdomain.txt Searching Plz W8...\e[0m" 


while [ -z "$last_page_check" ] && [ -n "$how_many" ] && [ -z "$single_page" ]; do


url="http://www.bing.com/search?q=%27$domain%27&qs=n&pq=%27$domain%27&sc=0-0&sp=-1&sk=&first=${page}0&FORM=PERE"

wget -q -O sub_domain_bing.php "$url"

 last_page_check=`egrep -o '<span class="sb_count" id="count">[0-9]+-([0-9]+) of (\1)' sub_domain_bing.php`

 how_many=`egrep -o '<span class="sb_count" id="count">[^<]+' sub_domain_bing.php | cut -d '>' -f 2|cut -d ' ' -f 1-3`

 single_page=`egrep -o '<span class="sb_count" id="count">[0-9] results' sub_domain_bing.php `


 cat "sub_domain_bing.php" | egrep -o "<h3><a href=\"[^\"]+" sub_domain_bing.php | cut -d '"' -f 2 >> alldomain_bing.txt
 rm -f sub_domain_bing.php
 let page=$page+1 
 done

cat alldomain_bing.txt | cut -d '/' -f 3 | tr '[:upper:]' '[:lower:]' | sed '/www./s///g' | sort | uniq >> sub.txt

for var in `cat sub.txt` 
do 
echo "$var" | grep "$domain" >> /dev/null;check=$? 
if [ $check -eq 0 ] 
then 
 echo "$var" >> sub2.txt
fi 
done

cat sub2.txt | sort | uniq > subdomain.txt


found_N=`wc -l subdomain.txt | sed 's/subdomain.txt//'` 
echo -e "\e[1;34m[+] Found $found_N SubDomain :) \e[0m" 


for catvar in `cat subdomain.txt`
do
echo -e "\e[1;32m[*] $catvar \e[0m"   
done 

rm -f alldomain_bing.txt;
rm -f sub.txt;
rm -f sub2.txt;
}

main ()
{
banner;
Check_Arguments;
Sub_Domain;
}

main ;

Download : http://www.mediafire.com/download/638lw6ao7zjw7c4/red-viper-sub-dmoain-scannner.sh

1 comment: