1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50
| import requests from optparse import OptionParser import threading from Queue import Queue import sys
class WEB_DIR(threading.Thread): def __init__(self,queue): threading.Thread.__init__(self) self._queue = queue def run(self): while not self._queue.empty(): url = self._queue.get() try: headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 SE 2.X MetaSr'} r = requests.get(url, headers=headers, timeout=1) except Exception,e: pass if r.status_code == 200: print '[*]' + url
def start(url,file,count): queue = Queue() f = open('%s.txt' % file, 'r') for i in f: queue.put(url + i.strip('\r\n')) threads = [] thread_count = int(count) for i in range(thread_count): threads.append(WEB_DIR(queue)) for t in threads: t.start() for t in threads: t.join()
def main(): parser = OptionParser() parser = OptionParser() parser.add_option("-u","--url",dest="url",help="Target url for scan") parser.add_option("-f", "--file", dest="filename", help="Dir filename") parser.add_option("-t", "--thread", dest="count",type=int,default=10, help="scan thread count") options,args = parser.parse_args() if options.url and options.filename: start(options.url,options.filename,options.count) else: parser.print_help() sys.exit(1)
if __name__ == '__main__': main()
|