aboutsummaryrefslogtreecommitdiffstats
path: root/toys/brhute-py/brhute_twisted.py
blob: 22ab2bb958cbcbf6b401231210e9642f19160f4d (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
from collections import deque
import time
import httplib_pipelining
import threading
import threaded_resolver
import Queue
import warnings

# grbrute - asynchronous URL fetcher using threads and HTTP pipelining
# Makes multiple simultaneous connections and multiple requests per connections
# 2013, Laurent Ghigonis <laurent@p1sec.com>

# XXX multiple processes, autodetect and repartir connections
# DNS resolving with asyncdns

class Brhute_connection(threading.Thread):
    def __init__(self, ip, port, queue, req_per_connection, cb_response,
                 interval=0, verbose=False):
        threading.Thread.__init__(self)
        self.queue = queue
        self.ip = ip
        self.port = port
        self.req_per_connection = req_per_connection
        self.cb_response_user = cb_response
        self.interval = interval
        self.verbose = verbose

    def run(self):
        conn = httplib_pipelining.HTTP_pipeline(self.ip, self.queue, self.cb_response_user)
        conn.run()

class Brhute_ip:
    """ Fetch URLs from one IP
    url_iter is the iterator that provides the URLs.
    cb_response should return True for the processing to continue, and False
    to terminate.
    If you want to integrate it in a gevent driven program, use block=False"""
    def __init__(self, url_iter, ip, port=80, cb_response=None,
                 nb_connections=3, req_per_connection=10, interval=0,
                 verbose=False, block=True):
        warnings.warn("XXX WARNING: WORK IN PROGRESS")
        warnings.warn("XXX WARNING: Don't expect this to work")
        self.url_iter = url_iter
        self.ip = ip
        self.port = port
        self.cb_response_user = cb_response
        self.nb_connections = nb_connections
        self.req_per_connection = req_per_connection
        self.interval = interval
        self.verbose = verbose

        queue = Queue.Queue()

        for i in range(nb_connections):
            c = Brhute_connection(ip, port, queue,
                                  req_per_connection, cb_response,
                                  interval, verbose)
            c.setDaemon(True)
            c.start()
              
        for host, url in url_iter:
            queue.put(url)
        queue.join()

class Brhute_multi_ip:
    """Fetch URLs from multiple IPs pointing to the same content"""
    def __init__(self):
        warnings.warn("XXX WARNING: WORK IN PROGRESS")
        warnings.warn("XXX WARNING: Don't excpect this to work")

class Brhute:
    """Fetch URLs"""
    def __init__(self):
        warnings.warn("XXX WARNING: WORK IN PROGRESS")
        warnings.warn("XXX WARNING: Don't excpect this to work")