aboutsummaryrefslogtreecommitdiffstats
path: root/toys/pphidden_async.py
blob: 3f00b0a2e4ee79d77be477ad17bfc7e40135cfd6 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import requests
import grequests
from collections import deque
import time
import sys
import argparse
import warnings
warnings.warn("XXX WORK IN PROGRESS")
warnings.warn("XXX don't excpect this to work !")

# http://www.pointerpointer.com/gridPositions.json

# Python grequests ressources :
# https://gist.github.com/ibrahima/3153647 - request_queue.py
# http://rubydoc.info/github/typhoeus/typhoeus/frames/Typhoeus - Ruby Typhoeus

class SessionQueue:
    def __init__(self):
        self.session = requests.Session()
        self.ongoing = deque()

    def add(self, url):
        req = grequests.get(url, session=self.sessions)
        grequests.send(req)
        self.ongoing.append(req)

class Brute:
    """ Find existence of URLs provided by url_iter based on HTTP error code.
    Uses multiple simultaneous connections (HTTP/1.1) and multiple parralel
    requests per connection.
    If you want to integrate"""
    def __init__(self, url_iter, nb_sessions=3, req_per_session=10, sleep=0,
                 debug=False, block=True):
        self.url_iter = url_iter
        self.nb_sessions = nb_sessions
        self.req_per_session = req_per_session
        self.sleep = sleep
        self.debug = debug
        self.ips = ips # XXX TODO
        self.sessions = deque()
        self.ongoing = 0
        for i in range(nb_sessions):
            self.sessions.append(SessionQueue())
        self._send()

    def _send(self):
        while self.ongoing < self.nb_sessions * self.req_per_session:
            try:
                url = self.url_iter.next()
            except StopIteration, e:
                return
            if len(s.ongoing) < self.req_per_session:
                print "[-] %s" % url
                s.add(url)
                self.ongoing += 1

    def _cb_response(self, res):
        print "response: %s" % res
        self.ongoing -= 1
        self._send()
        time.sleep(self.sleep)

class Pp_url:
    def __init__(self, image, x, y, max_x=2000, max_y=2000):
        self.base_url = "http://www.pointerpointer.com/images"
        self.image = image
        self.x = x
        self.y = y
        self.max_x = max_x
        self.max_y = max_y
        print ">>> Looking for image %d <<<" % image
        print ">>>> starting x=%d y=%d <<<<" % (x, y)
    def __iter__(self):
        return self
    def __next__(self):
        res = "%s/N%04d_%d_%d.jpg" % (self.base_url, self.image, self.x, self.y)
        self.index += 1
        if self.y > self.max_y:
            self.x += 1
            self.y = 0
        if self.x > self.max_x:
            raise StopIteration
        return res

parser = argparse.ArgumentParser(description='pphidden',
                                 epilog="Example: %s 73 0 0" % sys.argv[0])
parser.add_argument('image', action="store", type=int,
                    help="image number")
parser.add_argument('start_x', action="store", type=int,
                    help="Start at coordinate X=")
parser.add_argument('start_y', action="store", type=int,
                    help="Start at coordinate Y=")
parser.add_argument('-v', action="store_true", dest="verbose", default=False,
                    help="verbose")
args = parser.parse_args()                                               

url_iter = Pp_url(args.image, args.start_x, args.start_y)
b = Brute(url_iter)
if b.found:
    print "[*] found: %s" % b.url
else:
    print "[*] not found"

sys.exit(0 if b.found else 1)