So, while we have Hacking Defined Experts still in session, Itzik Moshe wrote this. We think it’s cool and it has a really nice interface with shiny colours so here you go.
wget http://hackingdefined.org/tools/lazydorks.txt
Or you can view the source here:
#!usr/bin/python
# -*- coding: utf-8 -*-
#LazyDorks Tool v0.2 Written by Itzik Moshe, Jan 2013
#See-Security Hacking Defined Experts 39
try:
import mechanize
except ImportError:
print "LazyDorks require Mechanize library.\nPlease Install: 'sudo pip install mechanize'"
exit()
try:
from bs4 import BeautifulSoup
except ImportError:
print "LazyDorks require BeautifulSoup4 library.\nPlease Install: 'sudo pip install beautifulsoup4'\nor 'easy_install beautifulsoup4'"
exit()
import httplib
import urlparse
import re
import random
import socket
import time
from time import sleep
import csv
#### Browser Object Setting ####
socket.setdefaulttimeout(15)
br = mechanize.Browser()
br.set_handle_robots(False)
#### END ####
intro = '\033[93m'+'''
_
,//)
) /
/ /
╭╮╱╱╱╱╱╱╱╱╱╱╱╱╱╱╭━━━╮╱╱╱╱╭╮ _,^^,/ /
┃┃╱╱╱╱╱╱╱╱╱╱╱╱╱╱╰╮╭╮┃╱╱╱╱┃┃ (C,00<_/ ┃┃╱╱╭━━┳━━━┳╮╱╭╮╱┃┃┃┣━━┳━┫┃╭┳━━╮ _/\_,_) ┃┃╱╭┫╭╮┣━━┃┃┃╱┃┃╱┃┃┃┃╭╮┃╭┫╰╯┫━━┫ / _ \ ,' ) ┃╰━╯┃╭╮┃┃━━┫╰━╯┃╭╯╰╯┃╰╯┃┃┃╭╮╋━━┃ / /"\ \/ ,_\ ╰━━━┻╯╰┻━━━┻━╮╭╯╰━━━┻━━┻╯╰╯╰┻━━╯ __(,/ > e ) / (_\.oO
╱╱╱╱╱╱╱╱╱╱╱╭━╯┃ \_ / ( -,_/ \_/
╱╱╱╱╱╱╱╱╱╱╱╰━━╯ U \_, _)
( /
>/
(.oO
#LazyDorks Tool v0.2 Written by Itzik Moshe
#See-Security Hacking Defined Experts 39
#Choose your option and insert the domain you would like to scan.
'''+ '\033[0m'
admins_list = ['admin/','administrator/','admin1/','admin2/','admin3/','admin4/','admin5/','usuarios/','usuario/','administrator/','moderator/','webadmin/','adminarea/','bb-admin/','adminLogin/','admin_area/','panel-administracion/','instadmin/',
'memberadmin/','administratorlogin/','adm/','admin/account.php','admin/index.php','admin/login.php','admin/admin.php','admin/account.php',
'admin_area/admin.php','admin_area/login.php','siteadmin/login.php','siteadmin/index.php','siteadmin/login.html','admin/account.html','admin/index.html','admin/login.html','admin/admin.html',
'admin_area/index.php','bb-admin/index.php','bb-admin/login.php','bb-admin/admin.php','admin/home.php','admin_area/login.html','admin_area/index.html',
'admin/controlpanel.php','admin.php','admincp/index.asp','admincp/login.asp','admincp/index.html','admin/account.html','adminpanel.html','webadmin.html',
'webadmin/index.html','webadmin/admin.html','webadmin/login.html','admin/admin_login.html','admin_login.html','panel-administracion/login.html',
'admin/cp.php','cp.php','administrator/index.php','administrator/login.php','nsw/admin/login.php','webadmin/login.php','admin/admin_login.php','admin_login.php',
'administrator/account.php','administrator.php','admin_area/admin.html','pages/admin/admin-login.php','admin/admin-login.php','admin-login.php',
'bb-admin/index.html','bb-admin/login.html','acceso.php','bb-admin/admin.html','admin/home.html','login.php','modelsearch/login.php','moderator.php','moderator/login.php',
'moderator/admin.php','account.php','pages/admin/admin-login.html','admin/admin-login.html','admin-login.html','controlpanel.php','admincontrol.php',
'admin/adminLogin.html','adminLogin.html','admin/adminLogin.html','home.html','rcjakar/admin/login.php','adminarea/index.html','adminarea/admin.html',
'webadmin.php','webadmin/index.php','webadmin/admin.php','admin/controlpanel.html','admin.html','admin/cp.html','cp.html','adminpanel.php','moderator.html',
'administrator/index.html','administrator/login.html','user.html','administrator/account.html','administrator.html','login.html','modelsearch/login.html',
'moderator/login.html','adminarea/login.html','panel-administracion/index.html','panel-administracion/admin.html','modelsearch/index.html','modelsearch/admin.html',
'admincontrol/login.html','adm/index.html','adm.html','moderator/admin.html','user.php','account.html','controlpanel.html','admincontrol.html',
'panel-administracion/login.php','wp-login.php','adminLogin.php','admin/adminLogin.php','home.php','admin.php','adminarea/index.php',
'adminarea/admin.php','adminarea/login.php','panel-administracion/index.php','panel-administracion/admin.php','modelsearch/index.php',
'modelsearch/admin.php','admincontrol/login.php','adm/admloginuser.php','admloginuser.php','admin2.php','admin2/login.php','admin2/index.php','usuarios/login.php',
'adm/index.php','adm.php','affiliate.php','adm_auth.php','memberadmin.php','administratorlogin.php','wp-admin','moodle/login','?q=admin','?q=user/login','user']
agents = ['Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.101 Safari/537.11',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.1) Gecko/2008071615 Fedora/3.0.1-1.fc9 Firefox/3.0.1',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.17 (KHTML, like Gecko) Chrome/24.0.1309.0 Safari/537.17',
'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.15 (KHTML, like Gecko) Chrome/24.0.1295.0 Safari/537.15',
'Mozilla/5.0 (Windows NT 6.2; Win64; x64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1',
'Mozilla/5.0 (X11; Ubuntu; Linux i686; rv:15.0) Gecko/20100101 Firefox/15.0.1',
'Mozilla/6.0 (Windows NT 6.2; WOW64; rv:16.0.1) Gecko/20121011 Firefox/16.0.1']
login_dorks = ['inurl:admin|login|myphp|phpliteadmin|phpadmin|phpmyadmin|administrator|webadmin|adm_auth|sysadmin|user',
'intitle:"admin | login | log-in | administrator | phpadmin | phpmyadmin | adminphp"',
'intext:"admin | username | password | login | log-in | phpadmin | phpmyadmin | adminphp"']
pw_dork = 'pwd|pass|password|passwords|passwd|user|users|usr|admin|username|usernames'
files_type = ['txt','xls','sql','inc','con','cnf']
dork_type = ['inurl:','intext:']
results = []
adminbf_porxy = []
###Domain Name Checker####
def domain_check(domain,dorkt):
if (dorkt is "login") or (dorkt is "pw"):
url = urlparse.urlsplit(domain)
if url.netloc == '':
if 'www.' in url.path:
domain = url.path.split('www.')[1]
else:
domain = url.path
else:
if 'www.' in url.netloc:
domain = url.netloc.split('www.')[1]
else:
domain = url.netloc
if (dorkt is "robots") or (dorkt is "adminbf"):
y = urlparse.urlsplit(domain)
if y.netloc == '':
domain = "http://"+domain
url = urlparse.urlsplit(domain)
domain = url.netloc
return domain
#### End ####
####Log Colors Functions####
def logm(msg):
print '\033[96m' + msg + '\033[0m'
def erm(msg):
print '\033[91m' + msg + '\033[0m'
def sysm(msg):
print '\033[94m' + msg + '\033[0m'
#### End ####
###Print Results Function####
def print_results(dorkt):
if dorkt is "login":
erm("Results for pages might be a login page:")
if dorkt is "pw":
erm("Results for files might contain usernames and passwords")
if dorkt is "robots":
erm("Disallowed pages by robots.txt:")
if dorkt is "adminbf":
erm("Possible Admin Page:")
if dorkt is "owndork":
erm("Your Google Dorking Results:")
for p in results:
erm(p)
#### End ####
####Proxy Handle####
def proxy_setter():
proxy = raw_input('\033[94m'+'Please Insert Proxy (IP:Port):'+'\033[0m')
check = proxy_checker(proxy)
if check == "Working":
logm("Setting Proxy...")
br.set_proxies({"http":proxy})
elif check == "Bad":
erm("Your Proxy is not working well,Please try different one.")
proxy_setter()
def proxy_checker(proxy):
try:
logm("Testing Proxy, Please Wait...")
test_br = mechanize.Browser()
test_br.addheaders = [('User-agent','Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_2) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.101 Safari/537.11')]
test_br.set_proxies({"http": proxy})
start = time.gmtime()
test_br.open('http://google.com')
finish = time.gmtime()
logm("Proxy is Online")
if (finish.tm_min*60+finish.tm_sec) - (start.tm_min*60+start.tm_sec) > 45:
erm("Proxy is Pretty slow!\nSlow Proxy Might Cause Timeout!!\nContinue?(Yes/No):")
inp = raw_input('')
if (inp == "no") or (inp == "No"):
erm("\nThank you for using LazyDorks :)")
exit()
return "Working"
except:
return "Bad"
#### End of Proxy Handle ####
####Export results Functions####
def txt_export(dorkt):
t = time.gmtime()
filename = "lazydork_result{}{}{}{}.txt".format(t.tm_year,t.tm_mday,t.tm_min,t.tm_sec)
f = open(filename,'w')
if dorkt is "login":
f.write("Results for pages might be a login page:\n")
if dorkt is "pw":
f.write("Results for files might contain usernames and passwords:\n")
if dorkt is "robots":
f.write("Disallowed pages by robots.txt:\n")
if dorkt is "adminbf":
f.write("Possible Admin Page:\n")
if dorkt is "owndork":
f.write("Your Google Dork Results:\n")
for w in results:
f.write(w+'\n')
f.close()
logm('Export Results to {}'.format(filename))
def csv_export(dorkt):
t = time.gmtime()
filename = "lazydork_result{}{}{}{}.csv".format(t.tm_year,t.tm_mday,t.tm_min,t.tm_sec)
f = open(filename,'wb')
w = csv.writer(f, dialect='excel')
if dorkt is "login":
w.writerow(['Results for pages might be a login page:',])
if dorkt is "pw":
w.writerow(['Results for files might contain usernames and passwords:',])
if dorkt is "robots":
w.writerow(['Disallowed pages by robots.txt:',])
if dorkt is "adminbf":
w.writerow(['Possible Admin Page:',])
if dorkt is "owndork":
w.writerow(['Your Google Dork Results:',])
for item in results:
w.writerow([item,])
f.close()
logm('Export Results to {}'.format(filename))
#### End ####
####Admin Brute Force Function####
def adminbf(domain):
logm("Brute Forcing {} for Admin page(in case no results try with 'www.')\nIt might take a few minutes, Please Wait...".format(domain))
for a in admins_list:
con = httplib.HTTPConnection(domain)
con.request("GET",'/'+a)
logm("{}/{}".format(domain,a))
res = con.getresponse()
if res.status == 200:
results.append("{}/{}".format(domain,a))
#### End ####
###Google Scraping Functions###
def dorker(query,dorkt):
if dorkt is "owndork":logm("Google Dorking your query: {}\nPlease Wait...".format(query))
br.addheaders = [('User-agent', agents[random.randrange(0,len(agents))])]
br.open('http://www.google.com')
br.select_form(nr=0)
br.form['q'] = query
try:
br.submit()
except:
erm("Something Went Wrong,Check Your internet connection or Proxy")
exit()
scrapper(br,dorkt)
n_t = random.randrange(5,9)
sleep(int(n_t))
def scrapper(br,dorkt):
while(True):
Get_Links(br,dorkt)
next = get_Npage(br)
if next is 'Done' : break
n_t = random.randrange(1,4)
sleep(int(n_t))
try:
br.open(next)
except:
erm("Something Went Wrong, Please Check your internet connection or replace Proxy")
exit()
def Get_Links(br,dorkt):
html = br.response().read()
soup = BeautifulSoup(html)
for link in soup.find_all('h3',{"class":"r"}):
for links in link.find_all('a'):
if not "http://" in links.get('href') and not "https://" in links.get('href'):
continue
if 'google.com/' in links.get('href'):
continue
if not '/url?q=' in links.get('href'):
results.append(links.get('href'))
if '/url?q=' in links.get('href'):
results.append(links.get('href').split('/url?q=')[1].split('&sa=U')[0])
def get_Npage(br):
html = br.response().read()
soup = BeautifulSoup(html)
Npage = 'Done'
try:
for link in soup.find_all('td',{"class":"b"}):
for links in link.find_all('a'):
if links.get('id') == 'pnnext':
Npage = links.get('href')
if not "http://www.google.com" in Npage:
Npage = "http://www.google.com{}".format(Npage)
return Npage
except:
return 'Done'
#### End ####
def dork(domain,dorkt):
if dorkt is "login":
logm("Google Dorking Login Pages\nIt might take a few minutes, Please Wait...")
for login_dork in login_dorks:
dorker("site:{} {}".format(domain,login_dork),dorkt)
elif dorkt is "pw":
logm("Google Dorking Pages that might contain Passwords or Usernames\nIt might take a few minutes, Please Wait...")
for files in files_type:
for dork_t in dork_type:
dorker("site:{} filetype:{} {}{}".format(domain,files,dork_t,pw_dork),dorkt)
elif dorkt is "robots":
br.addheaders = [('User-agent','Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)')]
try:
logm("Searching For Robots.txt")
br.open('http://{}/robots.txt'.format(domain))
for x in br.response().readlines():
if "Disallow:" in x:
results.append(x.split('Disallow:')[1].split('\n')[0].strip())
except:
erm("Couldn't Find Robots.txt")
elif dorkt is "adminbf":
adminbf(domain)
def choosedork():
sysm(''' Please Choose Your Option:
1)Google Dork Login Pages
2)Google Dork Pages Might Contain Usernames/Passwords
3)Google Dork Your Own Query
4)Disallowed Pages in robots.txt
5)Admin Page Brute Force (does not support proxies)
''')
o = raw_input('\033[94m' +'Your Option: '+ '\033[0m')
if o is "1":
return 'login'
elif o is "2":
return 'pw'
elif o is "4":
return 'robots'
elif o is "5":
return 'adminbf'
elif o is "3":
return 'owndork'
else:
erm('No Such Option, Please Choose Again.')
choosedork()
def result_handler(dorkt):
sysm('''LazyDorks Found Results, What would you like to do:
1)Export Results to .txt File
2)Export Results to .csv File
3)Print Results Here
''')
o = raw_input('\033[94m' +'Your Option:'+ '\033[0m')
if o is "1":
txt_export(dorkt)
elif o is "2":
csv_export(dorkt)
elif o is "3":
print_results(dorkt)
else:
erm("No such option, please choose again")
result_handler(dorkt)
def owndork(dorkt):
query = raw_input('\033[94m' +"Insert Your Google Dorking Query: "+ '\033[0m')
dorker(query,dorkt)
def main():
print intro
dorkt = choosedork()
if not dorkt is "adminbf":
px = raw_input('\033[94m' +'Would you like to use a proxy?\nPlease type Yes/No: '+ '\033[0m')
if (px == "yes") or (px == "Yes"):
proxy_setter()
if not dorkt is "owndork":
domain = raw_input('\033[94m'+'Insert Domain (Example.com): '+ '\033[0m')
domain = domain_check(domain,dorkt)
dork(domain,dorkt)
logm("Done Searching")
elif dorkt is "owndork":
owndork(dorkt)
logm("Done Searching")
if results:
result_handler(dorkt)
else:
erm("LazyDorks Couldn't Find Any Results")
erm("\nThank you for using LazyDorks :)")
if "__main__" : main()
Hi guys,
you can also find the script in pastebin(with the original design :P)
http://pastebin.com/ZUZhTVzd
Itzik
ITZIK , You are #1 !!