from collections import deque import time import asyncore from asynhttp import http_evented # in brhute repository import asyncdns # sudo pip install asyncdns import warnings # grbrute - asynchronous URL fetcher based on asyhttp # Uses multiple simultaneous connections and multiple requests per connections # 2013, Laurent Ghigonis # Python grequests ressources : # http://stackoverflow.com/questions/16015749/in-what-way-is-grequests-asynchronous # https://github.com/kennethreitz/grequests/issues/13 # http://stackoverflow.com/questions/13809650/using-grequests-to-send-a-pool-of-requests-how-can-i-get-the-response-time-of-e # https://gist.github.com/ibrahima/3153647 - request_queue.py # http://rubydoc.info/github/typhoeus/typhoeus/frames/Typhoeus - Ruby Typhoeus # XXX multiple processes, autodetect and repartir connections # DNS resolving with asyncdns class Brhute_connection: def __init__(self, url_iter, ip, port, req_per_connection, cb_response, interval=0, verbose=False): self.url_iter = url_iter self.ip = ip self.port = port self.req_per_connection = req_per_connection self.cb_response_user = cb_response self.interval = interval self.verbose = verbose self.ongoing = 0 self.hev = http_evented.http_evented((ip, port), onConnected=self._connected) def get(self, host, url): if self.verbose: print "XXX Brhute_connection.get" headers = {'Host': host} body = None self.hev.make_HTTP_request("GET", url, body, headers, self._cb_response) self.ongoing += 1 def _connected(self): self._send() def _send(self): while self.ongoing < self.req_per_connection: # get an URL to send try: (host, url) = next(self.url_iter) except StopIteration, e: return if self.verbose: print "[-] %s" % url # send the url self.get(host, url) def _cb_response(self, response): self.ongoing -= 1 if self.cb_response_user: self.cb_response_user(response) self._send() time.sleep(self.interval) class Brhute_ip: """ Fetch URLs from one IP url_iter is the iterator that provides the URLs. cb_response should return True for the processing to continue, and False to terminate. If you want to integrate it in a gevent driven program, use block=False""" def __init__(self, url_iter, ip, port=80, cb_response=None, nb_connections=3, req_per_connection=10, interval=0, verbose=False, block=True): warnings.warn("XXX WARNING: WORK IN PROGRESS") warnings.warn("XXX WARNING: Don't expect this to work") self.url_iter = url_iter self.ip = ip self.port = port self.cb_response_user = cb_response self.nb_connections = nb_connections self.req_per_connection = req_per_connection self.interval = interval self.verbose = verbose self.conns = deque() for i in range(nb_connections): self.conns.append(Brhute_connection(url_iter, ip, port, req_per_connection, cb_response, interval, verbose)) if block: asyncore.loop() class Brhute_multi_ip: """Fetch URLs from multiple IPs pointing to the same content""" def __init__(self): warnings.warn("XXX WARNING: WORK IN PROGRESS") warnings.warn("XXX WARNING: Don't excpect this to work") class Brhute: """Fetch URLs""" def __init__(self): warnings.warn("XXX WARNING: WORK IN PROGRESS") warnings.warn("XXX WARNING: Don't excpect this to work")