aboutsummaryrefslogtreecommitdiffstats
path: root/toys/brhute-py/brhute_multitprocessing_broken.py
blob: fb7f19da4d6943816447787b4148d542f89c559d (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
from collections import deque
import time
import threading
import Queue
import asyncore
from asynhttp import http_evented # in brhute repository
import asyncdns # sudo pip install asyncdns
import warnings

# grbrute - asynchronous URL fetcher based on asyhttp
# Uses multiple simultaneous connections and multiple requests per connections
# 2013, Laurent Ghigonis <laurent@p1sec.com>

# Python grequests ressources :
# http://stackoverflow.com/questions/16015749/in-what-way-is-grequests-asynchronous
# https://github.com/kennethreitz/grequests/issues/13
# http://stackoverflow.com/questions/13809650/using-grequests-to-send-a-pool-of-requests-how-can-i-get-the-response-time-of-e
# https://gist.github.com/ibrahima/3153647 - request_queue.py
# http://rubydoc.info/github/typhoeus/typhoeus/frames/Typhoeus - Ruby Typhoeus

# XXX multiple processes, autodetect and repartir connections
# DNS resolving with asyncdns

class Brhute_connection():
    def __init__(self, queue, ip, port, req_per_connection, cb_response,
                 interval=0, verbose=False):
        self.queue = queue
        self.ip = ip
        self.port = port
        self.req_per_connection = req_per_connection
        self.cb_response_user = cb_response
        self.interval = interval
        self.verbose = verbose
        self.ongoing = 0
        self.hev = http_evented.http_evented((ip, port), onConnected=self._connected)
        asyncore.loop()

    def get(self, host, url):
        if self.verbose:
            print "XXX Brhute_connection.get"
        headers = {'Host': host}
        body = None
        self.hev.make_HTTP_request("GET", url, body, headers,
                                      self._cb_response)
        self.ongoing += 1

    def _connected(self):
        self._send()

    def _send(self):
        if self.ongoing == self.req_per_connection:
            return
        # get an URL to send
        try:
            print "XXX queue get"
            try:
                host, url = self.queue.get(False)
            except Queue.Empty:
                return
        except StopIteration, e:
            return
        if self.verbose:
            print "[-] %s" % url
        # send the url
        self.get(host, url)
        self.queue.task_done()

    def _cb_response(self, response):
        self.ongoing -= 1
        if self.cb_response_user:
            self.cb_response_user(response)
        self._send()
        time.sleep(self.interval)

class Brhute_ip:
    """ Fetch URLs from one IP
    url_iter is the iterator that provides the URLs.
    cb_response should return True for the processing to continue, and False
    to terminate.
    If you want to integrate it in a gevent driven program, use block=False"""
    def __init__(self, url_iter, ip, port=80, cb_response=None,
                 nb_connections=3, req_per_connection=10, interval=0,
                 verbose=False, block=True):
        warnings.warn("XXX WARNING: WORK IN PROGRESS")
        warnings.warn("XXX WARNING: Don't expect this to work")
        self.url_iter = url_iter
        self.ip = ip
        self.port = port
        self.cb_response_user = cb_response
        self.nb_connections = nb_connections
        self.req_per_connection = req_per_connection
        self.interval = interval
        self.verbose = verbose
        queue = multiprocessing.JoinableQueue()

        self.conns = deque()
        for i in range(nb_connections):
            p = multiprocessing.Process(target=self._proc,
                                        args=(queue, ip, port,
                                        req_per_connection, cb_response,
                                        interval, verbose))
            p.start()
            self.conns.append(p)
        for host, url in url_iter:
            queue.put((host, url))
        time.sleep(60) # XXX

    def _proc(self, queue, ip, port, req_per_connection, cb_response, interval, verbose):
        Brhute_connection(queue, ip, port, req_per_connection, cb_response, interval, verbose)

class Brhute_multi_ip:
    """Fetch URLs from multiple IPs pointing to the same content"""
    def __init__(self):
        warnings.warn("XXX WARNING: WORK IN PROGRESS")
        warnings.warn("XXX WARNING: Don't excpect this to work")

class Brhute:
    """Fetch URLs"""
    def __init__(self):
        warnings.warn("XXX WARNING: WORK IN PROGRESS")
        warnings.warn("XXX WARNING: Don't excpect this to work")