#!/usr/bin/env python2
#ofexploit@gmail.com

print "                    __________.__                "
print "   ________________ \______   \__| ____    ____  "
print "  / ___\_  __ \__  \ |    |  _/  |/    \  / ___\ "
print " / /_/  >  | \// __ \|    |   \  |   |  \/ /_/  >"
print " \___  /|__|  (____  /______  /__|___|  /\___  / "
print "/_____/            \/       \/        \//_____/  "

print '''
	DNS Scrap & ReverIP
	By Saeed Dehghan;

'''
import sys

def start():

	# import library os & socket

	import os
	from socket import gethostbyname,gethostbyaddr	

	# Get File Name

	init=os.path.basename(sys.argv[0])
	if os.path.dirname(sys.argv[0])==os.getcwd():
	    init="./" + init

	# Fill in the parameter and Set option

	import argparse
	usage="%(prog)s [-i ip] [-v verbose]"
	parser=argparse.ArgumentParser(usage=usage)
	parser.add_argument("-i","--ip",type=str,help="Enter Your Target ip",action="store",dest="ip")
	parser.add_argument("-v","--verbose",action="count",dest="verbose",help="result more -v -v -v or -vvv")
	parser.add_argument("-o","--output",type=str,action="store",dest="output",help="Save response in file")
	parser.add_argument("-V","--version",action="version",version="%(prog)s \x31.\x32(Gama)",help="Version graBing")
	args=parser.parse_args()

	# print help and example	

	if len(sys.argv) == 1:
		parser.print_help()
		sys.exit(1)
		print 'example : %s 127.0.0.1 -vvvv\n\t127.0.0.1 -vv -o response.txt'%init

	
	import string
	import re
	import httplib

	# Check ip define

	def pransor(ip):
		ip_match=re.search(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$", ip)

		if ip_match:
			return 1
		else:
			return 0

	# Check ip & Check Network 

	if args.ip:
		if pransor(args.ip):
			ip=args.ip
		else:
			print "There is no such an IP"
			sys.exit(1)
		try:
			host = gethostbyaddr(ip)
			gethostbyname("adobe.com")
		except:
			print "network! ip Not online . Connection Checking"
			sys.exit(1)
	else:
		parser.print_help()
		print("ip Not Found.")
		sys.exit(1)

	# set limit

	if args.verbose:
		limit=args.verbose
	else:
		limit=0

	# set output

	if args.output:
		output = args.output
	else:
		output = False

	# class Cleaning the source

	class parser:
	    def __init__(self,source):
	        self.source=source
	        self.temp=[]

	    def genericClean(self):
	        self.source=re.sub('<em>','',self.source)
	        self.source=re.sub('<wbr>','',self.source)
	        self.source=re.sub('</wbr>','',self.source)
	        self.source=re.sub('<b>','',self.source)
	        self.source=re.sub('</b>','',self.source)
	        self.source=re.sub('</em>','',self.source)
	        self.source=re.sub('%2f',' ',self.source)
	        self.source=re.sub('%3a',' ',self.source)
	        self.source=re.sub('<strong>','',self.source)
	        self.source=re.sub('</strong>','',self.source)

	        for b in ('>','\\','%3C',':','=','<','/',';','&','%3A','%3D'):
	            self.source=string.replace(self.source,b,' ')

	    def ecco(self):
	        self.source=re.sub('<em>','',self.source)
	        self.source=re.sub('</em>','',self.source)
	        self.source=re.sub('%2f',' ',self.source)
	        self.source=re.sub('%3a',' ',self.source)
	        for e in ('<','>',':','=',';','&','%3A','%3D','%3C'):
	            self.source=string.replace(self.source,e,' ')

	    def cransor(self):
	        reg_hosts=re.compile(r'<cite>([\w\.\-?\/\/=\-\d:]*?)?</cite>')
	        temp=reg_hosts.findall(self.source)
	        for x in temp:
	            if x.count(':'):
	                res=x.split(':')[1].split('/')[2]
	            else:
	                res=x.split("/")[0]
	            self.temp.append(res)
	        hostnames=self.powex()
	        return hostnames

	    def powex(self):
	        self.new=[]
	        for x in self.temp:
	            if x not in self.new:
	                self.new.append(x)
	        return self.new

	# set requests and connection

	def downing(ip,page):
		totalresults=''
		url="/search?q=ip:"+ip+"&go=&count=50&FORM=QBHL&qs=n&first="+str(page)
		h=httplib.HTTP('www.bing.com')
		h.putrequest('GET',url)
		h.putheader('Host',"www.bing.com")
		h.putheader('Cookie','mkt=en-US;ui=en-US;SRCHHPGUSR=NEWWND=0&ADLT=DEMOTE&NRSLT=50')
		h.putheader('Accept-Language','en-us,en')
		h.putheader('User-agent',"Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)")
		h.endheaders()
		returncode,returnmsg,headers=h.getreply()
		results=h.getfile().read()
		totalresults += results
		return totalresults

	# Scroll again

	def verbose(source,verlen):
		all_hosts=[]
		regex=re.compile(r'a aria-label="Page*')
		all_page=len(re.findall(regex,source))

		if verlen>all_page or verlen==all_page:
			num=0
			while 1:
				x=downing(ip,num)
				all_hosts.append(parser(x).cransor())
				if num==all_page:
					break
				num=num+1

		elif verlen < all_page and verlen != 0:
			num=0
			while 1:
				x=downing(ip,num)
				all_hosts.append(parser(x).cransor())
				if num==verlen:
					break
				num =num+1

		else:
			x=downing(ip,verlen)
			all_hosts.append(parser(x).cransor())
		return all_hosts

		
	source=downing(ip,0)
	end=verbose(source,limit)
	print "[~] Search ended"
	print "[~] Result Search in Bing ~\n"

	if end==[[]]:
		print "No site found :~)"
		sys.exit()

	# Count the result
		
	num=0

	# print responses

	for array in end:
		for path in array:
			try:
				theip = gethostbyname(path)
			except:
				theip = '...'

			print "\t"+str(num)+"_ "+path+" ~ "+theip
			num=num+1
			if output:
				try:
					o = open(output,"a+")
					o.write(path+'\n')
					retrev = True
				except:
					pass

	# Pouring in the file

	if output:
		if retrev:
			print "\nResult Saved in %s"%output
		else:
			print "\nFailed to save to file !"

if __name__ == "__main__":
	try:
		start()
	except KeyboardInterrupt:
		print "Cancellation by user."
	except:
		sys.exit(1)
