aboutsummaryrefslogtreecommitdiffstats
path: root/toys/brhute/brhute_threaded.py
diff options
context:
space:
mode:
Diffstat (limited to 'toys/brhute/brhute_threaded.py')
-rw-r--r--toys/brhute/brhute_threaded.py75
1 files changed, 75 insertions, 0 deletions
diff --git a/toys/brhute/brhute_threaded.py b/toys/brhute/brhute_threaded.py
new file mode 100644
index 0000000..22ab2bb
--- /dev/null
+++ b/toys/brhute/brhute_threaded.py
@@ -0,0 +1,75 @@
+from collections import deque
+import time
+import httplib_pipelining
+import threading
+import threaded_resolver
+import Queue
+import warnings
+
+# grbrute - asynchronous URL fetcher using threads and HTTP pipelining
+# Makes multiple simultaneous connections and multiple requests per connections
+# 2013, Laurent Ghigonis <laurent@p1sec.com>
+
+# XXX multiple processes, autodetect and repartir connections
+# DNS resolving with asyncdns
+
+class Brhute_connection(threading.Thread):
+ def __init__(self, ip, port, queue, req_per_connection, cb_response,
+ interval=0, verbose=False):
+ threading.Thread.__init__(self)
+ self.queue = queue
+ self.ip = ip
+ self.port = port
+ self.req_per_connection = req_per_connection
+ self.cb_response_user = cb_response
+ self.interval = interval
+ self.verbose = verbose
+
+ def run(self):
+ conn = httplib_pipelining.HTTP_pipeline(self.ip, self.queue, self.cb_response_user)
+ conn.run()
+
+class Brhute_ip:
+ """ Fetch URLs from one IP
+ url_iter is the iterator that provides the URLs.
+ cb_response should return True for the processing to continue, and False
+ to terminate.
+ If you want to integrate it in a gevent driven program, use block=False"""
+ def __init__(self, url_iter, ip, port=80, cb_response=None,
+ nb_connections=3, req_per_connection=10, interval=0,
+ verbose=False, block=True):
+ warnings.warn("XXX WARNING: WORK IN PROGRESS")
+ warnings.warn("XXX WARNING: Don't expect this to work")
+ self.url_iter = url_iter
+ self.ip = ip
+ self.port = port
+ self.cb_response_user = cb_response
+ self.nb_connections = nb_connections
+ self.req_per_connection = req_per_connection
+ self.interval = interval
+ self.verbose = verbose
+
+ queue = Queue.Queue()
+
+ for i in range(nb_connections):
+ c = Brhute_connection(ip, port, queue,
+ req_per_connection, cb_response,
+ interval, verbose)
+ c.setDaemon(True)
+ c.start()
+
+ for host, url in url_iter:
+ queue.put(url)
+ queue.join()
+
+class Brhute_multi_ip:
+ """Fetch URLs from multiple IPs pointing to the same content"""
+ def __init__(self):
+ warnings.warn("XXX WARNING: WORK IN PROGRESS")
+ warnings.warn("XXX WARNING: Don't excpect this to work")
+
+class Brhute:
+ """Fetch URLs"""
+ def __init__(self):
+ warnings.warn("XXX WARNING: WORK IN PROGRESS")
+ warnings.warn("XXX WARNING: Don't excpect this to work")