aboutsummaryrefslogtreecommitdiffstats
path: root/toys
diff options
context:
space:
mode:
Diffstat (limited to 'toys')
-rw-r--r--toys/pphidden_async.py83
1 files changed, 14 insertions, 69 deletions
diff --git a/toys/pphidden_async.py b/toys/pphidden_async.py
index 3f00b0a..229a181 100644
--- a/toys/pphidden_async.py
+++ b/toys/pphidden_async.py
@@ -1,65 +1,9 @@
-import requests
-import grequests
-from collections import deque
-import time
import sys
import argparse
-import warnings
-warnings.warn("XXX WORK IN PROGRESS")
-warnings.warn("XXX don't excpect this to work !")
+import grbrute
# http://www.pointerpointer.com/gridPositions.json
-# Python grequests ressources :
-# https://gist.github.com/ibrahima/3153647 - request_queue.py
-# http://rubydoc.info/github/typhoeus/typhoeus/frames/Typhoeus - Ruby Typhoeus
-
-class SessionQueue:
- def __init__(self):
- self.session = requests.Session()
- self.ongoing = deque()
-
- def add(self, url):
- req = grequests.get(url, session=self.sessions)
- grequests.send(req)
- self.ongoing.append(req)
-
-class Brute:
- """ Find existence of URLs provided by url_iter based on HTTP error code.
- Uses multiple simultaneous connections (HTTP/1.1) and multiple parralel
- requests per connection.
- If you want to integrate"""
- def __init__(self, url_iter, nb_sessions=3, req_per_session=10, sleep=0,
- debug=False, block=True):
- self.url_iter = url_iter
- self.nb_sessions = nb_sessions
- self.req_per_session = req_per_session
- self.sleep = sleep
- self.debug = debug
- self.ips = ips # XXX TODO
- self.sessions = deque()
- self.ongoing = 0
- for i in range(nb_sessions):
- self.sessions.append(SessionQueue())
- self._send()
-
- def _send(self):
- while self.ongoing < self.nb_sessions * self.req_per_session:
- try:
- url = self.url_iter.next()
- except StopIteration, e:
- return
- if len(s.ongoing) < self.req_per_session:
- print "[-] %s" % url
- s.add(url)
- self.ongoing += 1
-
- def _cb_response(self, res):
- print "response: %s" % res
- self.ongoing -= 1
- self._send()
- time.sleep(self.sleep)
-
class Pp_url:
def __init__(self, image, x, y, max_x=2000, max_y=2000):
self.base_url = "http://www.pointerpointer.com/images"
@@ -72,9 +16,9 @@ class Pp_url:
print ">>>> starting x=%d y=%d <<<<" % (x, y)
def __iter__(self):
return self
- def __next__(self):
+ def next(self):
res = "%s/N%04d_%d_%d.jpg" % (self.base_url, self.image, self.x, self.y)
- self.index += 1
+ self.y += 1
if self.y > self.max_y:
self.x += 1
self.y = 0
@@ -82,10 +26,16 @@ class Pp_url:
raise StopIteration
return res
-parser = argparse.ArgumentParser(description='pphidden',
+def cb_response(res):
+ print "[-] %s : %d" % (res.url, res.status_code)
+ if res.status_code != 404:
+ print "[*] found: %s" % res.url
+ sys.exit(0)
+
+parser = argparse.ArgumentParser(description='pphidden_async',
epilog="Example: %s 73 0 0" % sys.argv[0])
parser.add_argument('image', action="store", type=int,
- help="image number")
+ help="Image number")
parser.add_argument('start_x', action="store", type=int,
help="Start at coordinate X=")
parser.add_argument('start_y', action="store", type=int,
@@ -95,11 +45,6 @@ parser.add_argument('-v', action="store_true", dest="verbose", default=False,
args = parser.parse_args()
url_iter = Pp_url(args.image, args.start_x, args.start_y)
-b = Brute(url_iter)
-if b.found:
- print "[*] found: %s" % b.url
-else:
- print "[*] not found"
-
-sys.exit(0 if b.found else 1)
-
+grbrute.Grbrute(url_iter, cb_response, verbose=args.verbose)
+print "[*] not found"
+sys.exit(1)