aboutsummaryrefslogtreecommitdiffstats
path: root/toys/brhute-py/brhute_multitprocessing_broken.py
diff options
context:
space:
mode:
Diffstat (limited to 'toys/brhute-py/brhute_multitprocessing_broken.py')
-rw-r--r--toys/brhute-py/brhute_multitprocessing_broken.py121
1 files changed, 121 insertions, 0 deletions
diff --git a/toys/brhute-py/brhute_multitprocessing_broken.py b/toys/brhute-py/brhute_multitprocessing_broken.py
new file mode 100644
index 0000000..fb7f19d
--- /dev/null
+++ b/toys/brhute-py/brhute_multitprocessing_broken.py
@@ -0,0 +1,121 @@
+from collections import deque
+import time
+import threading
+import Queue
+import asyncore
+from asynhttp import http_evented # in brhute repository
+import asyncdns # sudo pip install asyncdns
+import warnings
+
+# grbrute - asynchronous URL fetcher based on asyhttp
+# Uses multiple simultaneous connections and multiple requests per connections
+# 2013, Laurent Ghigonis <laurent@p1sec.com>
+
+# Python grequests ressources :
+# http://stackoverflow.com/questions/16015749/in-what-way-is-grequests-asynchronous
+# https://github.com/kennethreitz/grequests/issues/13
+# http://stackoverflow.com/questions/13809650/using-grequests-to-send-a-pool-of-requests-how-can-i-get-the-response-time-of-e
+# https://gist.github.com/ibrahima/3153647 - request_queue.py
+# http://rubydoc.info/github/typhoeus/typhoeus/frames/Typhoeus - Ruby Typhoeus
+
+# XXX multiple processes, autodetect and repartir connections
+# DNS resolving with asyncdns
+
+class Brhute_connection():
+ def __init__(self, queue, ip, port, req_per_connection, cb_response,
+ interval=0, verbose=False):
+ self.queue = queue
+ self.ip = ip
+ self.port = port
+ self.req_per_connection = req_per_connection
+ self.cb_response_user = cb_response
+ self.interval = interval
+ self.verbose = verbose
+ self.ongoing = 0
+ self.hev = http_evented.http_evented((ip, port), onConnected=self._connected)
+ asyncore.loop()
+
+ def get(self, host, url):
+ if self.verbose:
+ print "XXX Brhute_connection.get"
+ headers = {'Host': host}
+ body = None
+ self.hev.make_HTTP_request("GET", url, body, headers,
+ self._cb_response)
+ self.ongoing += 1
+
+ def _connected(self):
+ self._send()
+
+ def _send(self):
+ if self.ongoing == self.req_per_connection:
+ return
+ # get an URL to send
+ try:
+ print "XXX queue get"
+ try:
+ host, url = self.queue.get(False)
+ except Queue.Empty:
+ return
+ except StopIteration, e:
+ return
+ if self.verbose:
+ print "[-] %s" % url
+ # send the url
+ self.get(host, url)
+ self.queue.task_done()
+
+ def _cb_response(self, response):
+ self.ongoing -= 1
+ if self.cb_response_user:
+ self.cb_response_user(response)
+ self._send()
+ time.sleep(self.interval)
+
+class Brhute_ip:
+ """ Fetch URLs from one IP
+ url_iter is the iterator that provides the URLs.
+ cb_response should return True for the processing to continue, and False
+ to terminate.
+ If you want to integrate it in a gevent driven program, use block=False"""
+ def __init__(self, url_iter, ip, port=80, cb_response=None,
+ nb_connections=3, req_per_connection=10, interval=0,
+ verbose=False, block=True):
+ warnings.warn("XXX WARNING: WORK IN PROGRESS")
+ warnings.warn("XXX WARNING: Don't expect this to work")
+ self.url_iter = url_iter
+ self.ip = ip
+ self.port = port
+ self.cb_response_user = cb_response
+ self.nb_connections = nb_connections
+ self.req_per_connection = req_per_connection
+ self.interval = interval
+ self.verbose = verbose
+ queue = multiprocessing.JoinableQueue()
+
+ self.conns = deque()
+ for i in range(nb_connections):
+ p = multiprocessing.Process(target=self._proc,
+ args=(queue, ip, port,
+ req_per_connection, cb_response,
+ interval, verbose))
+ p.start()
+ self.conns.append(p)
+ for host, url in url_iter:
+ queue.put((host, url))
+ time.sleep(60) # XXX
+
+ def _proc(self, queue, ip, port, req_per_connection, cb_response, interval, verbose):
+ Brhute_connection(queue, ip, port, req_per_connection, cb_response, interval, verbose)
+
+class Brhute_multi_ip:
+ """Fetch URLs from multiple IPs pointing to the same content"""
+ def __init__(self):
+ warnings.warn("XXX WARNING: WORK IN PROGRESS")
+ warnings.warn("XXX WARNING: Don't excpect this to work")
+
+class Brhute:
+ """Fetch URLs"""
+ def __init__(self):
+ warnings.warn("XXX WARNING: WORK IN PROGRESS")
+ warnings.warn("XXX WARNING: Don't excpect this to work")