aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.gitignore2
-rw-r--r--README.rst6
-rw-r--r--github3/__init__.py2
-rw-r--r--github3/api.py323
-rw-r--r--github3/config.py58
-rw-r--r--github3/converters.py100
-rw-r--r--github3/core.py127
-rw-r--r--github3/errors.py37
-rw-r--r--github3/exceptions.py14
-rw-r--r--github3/handlers/__init__.py (renamed from github3/packages/omnijson/packages/__init__.py)0
-rw-r--r--github3/handlers/base.py85
-rw-r--r--github3/handlers/gists.py28
-rw-r--r--github3/handlers/user.py205
-rw-r--r--github3/helpers.py21
-rw-r--r--github3/models.py254
-rw-r--r--github3/models/__init__.py4
-rw-r--r--github3/models/base.py19
-rw-r--r--github3/models/gists.py77
-rw-r--r--github3/models/orgs.py25
-rw-r--r--github3/models/repos.py31
-rw-r--r--github3/models/user.py65
-rw-r--r--github3/packages/omnijson/__init__.py13
-rw-r--r--github3/packages/omnijson/core.py93
-rw-r--r--github3/packages/omnijson/packages/simplejson/__init__.py438
-rw-r--r--github3/packages/omnijson/packages/simplejson/decoder.py421
-rw-r--r--github3/packages/omnijson/packages/simplejson/encoder.py503
-rw-r--r--github3/packages/omnijson/packages/simplejson/ordered_dict.py119
-rw-r--r--github3/packages/omnijson/packages/simplejson/scanner.py70
-rw-r--r--reqs.txt2
-rw-r--r--tests/gist_tests.py102
30 files changed, 1001 insertions, 2243 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..fa80f46
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+*.py?
+*.swp
diff --git a/README.rst b/README.rst
index ab6d9c7..c924c4b 100644
--- a/README.rst
+++ b/README.rst
@@ -1,3 +1,7 @@
+Fork
+======================================
+Refactor and complete api wrapper. Intensive work in progress
+
Github3: Python wrapper for the (new) GitHub API v3
===================================================
@@ -78,4 +82,4 @@ Roadmap
- Sphinx Documetnation
- Examples
- Unittests
-- OAuth Last (how?) \ No newline at end of file
+- OAuth Last (how?)
diff --git a/github3/__init__.py b/github3/__init__.py
index c2fdbad..40a96af 100644
--- a/github3/__init__.py
+++ b/github3/__init__.py
@@ -1,3 +1 @@
# -*- coding: utf-8 -*-
-
-from core import * \ No newline at end of file
diff --git a/github3/api.py b/github3/api.py
index 78f87b3..b7435ff 100644
--- a/github3/api.py
+++ b/github3/api.py
@@ -1,228 +1,129 @@
-# -*- coding: utf-8 -*-
-
-"""
-github3.api
-~~~~~~~~~~~
-
-This module provies the core GitHub3 API interface.
-"""
-
-from urlparse import urlparse, parse_qs
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
import requests
-from decorator import decorator
-
-from .packages import omnijson as json
-from .packages.link_header import parse_link_value
-
-from .models import *
-from .helpers import is_collection, to_python, to_api, get_scope
-from .config import settings
+import json
+from errors import GithubError
-
-
-
-PAGING_SIZE = 100
+RESOURCES_PER_PAGE = 100
class GithubCore(object):
+ """
+ Wrapper to github api requests
- _rate_limit = None
- _rate_limit_remaining = None
+ Methods: get, head, post, patch, put, delete
+ """
+
+ requests_remaining = None
+ base_url = 'https://api.github.com/'
def __init__(self):
+ """
+ Init `requests.session`
+ Init JSON parser
+ """
self.session = requests.session()
- self.session.params = {'per_page': PAGING_SIZE}
-
-
- @staticmethod
- def _resource_serialize(o):
- """Returns JSON serialization of given object."""
- return json.dumps(o)
-
-
- @staticmethod
- def _resource_deserialize(s):
- """Returns dict deserialization of a given JSON string."""
-
- try:
- return json.loads(s)
- except ValueError:
- raise ResponseError('The API Response was not valid.')
-
-
- @staticmethod
- def _generate_url(endpoint):
- """Generates proper endpoint URL."""
-
- if is_collection(endpoint):
- resource = map(str, endpoint)
- resource = '/'.join(endpoint)
+ self.session.params = {'per_page': RESOURCES_PER_PAGE}
+ self._parser = json
+
+ def get(self, request, paginate=False, **kwargs):
+ """
+ GET request
+
+ :param paginate: Boolean to return link header to paginate
+ """
+ response = self._request('GET', request, **kwargs)
+ content = self._parser.loads(response.content)
+ if paginate:
+ return response.headers.get('link'), content
else:
- resource = endpoint
-
- return (settings.base_url + resource)
-
-
- def _requests_post_hook(self, r):
- """Post-processing for HTTP response objects."""
-
- self._rate_limit = int(r.headers.get('x-ratelimit-limit', -1))
- self._rate_limit_remaining = int(r.headers.get('x-ratelimit-remaining', -1))
-
- return r
-
-
- def _http_resource(self, verb, endpoint, params=None, check_status=True, **etc):
-
- url = self._generate_url(endpoint)
- args = (verb, url)
-
- if params:
- kwargs = {'params': params}
- kwargs.update(etc)
+ return content
+
+ def head(self, request, **kwargs):
+ """ HEAD request """
+ return self._request('HEAD', request, **kwargs)
+
+ def post(self, request, data=None, **kwargs):
+ """
+ POST request
+
+ :param data: raw python object to send
+ """
+ kwargs['data'] = self._parser.dumps(data)
+ response = self._request('POST', request, **kwargs)
+ assert response.status_code == 201
+ return self._parser.loads(response.content)
+
+ def patch(self, request, data=None, **kwargs):
+ """
+ PATCH request
+
+ :param data: raw python object to send
+ """
+ kwargs['data'] = self._parser.dumps(data)
+ response = self._request('PATCH', request, **kwargs)
+ assert response.status_code == 200
+ return self._parser.loads(response.content)
+
+ def put(self, request, **kwargs):
+ """ PUT request """
+ response = self._request('PUT', request, **kwargs)
+ assert response.status_code == 204
+ return response
+
+ def delete(self, request, **kwargs):
+ """ DELETE request """
+
+ data = kwargs.get('data')
+ if data:
+ kwargs['data'] = self._parser.dumps(data)
+ response = self._request('DELETE', request, **kwargs)
+ assert response.status_code == 204
+ return response
+
+ def _parse_args(self, request_args):
+ """
+ Arg's parser to `_request` method
+
+ It check keyword args to parse extra request args to params
+ Sample:
+ _parse_args(arg1=1, arg2=2) => params = {'arg1': 1, 'arg2': 2}
+ """
+ request_core = (
+ 'params','data','headers','cookies','files','auth','tiemout',
+ 'allow_redirects','proxies','return_response','config')
+ request_params = request_args.get('params')
+ extra_params = {}
+ for k, v in request_args.items():
+ if k in request_core: continue
+ extra_params.update({k: v})
+ del request_args[k]
+ if request_params:
+ request_args['params'].update(extra_params)
else:
- kwargs = etc
-
- r = self.session.request(*args, **kwargs)
- r = self._requests_post_hook(r)
-
- if check_status:
- r.raise_for_status()
-
- return r
-
-
- def _get_resource(self, resource, obj, **kwargs):
-
- r = self._http_resource('GET', resource, params=kwargs)
- item = self._resource_deserialize(r.content)
-
- return obj.new_from_dict(item, gh=self)
-
- def _patch_resource(self, resource, data, **kwargs):
- r = self._http_resource('PATCH', resource, data=data, params=kwargs)
- msg = self._resource_deserialize(r.content)
-
- return msg
-
- def _post_resource(self, resource, obj, data, **kwargs):
- r = self._http_resource('POST', resource, data=data, params=kwargs)
- item = self._resource_deserialize(r.content)
-
- return obj.new_from_dict(item, gh=self)
+ request_args['params'] = extra_params
- @staticmethod
- def _total_pages_from_header(link_header):
+ return request_args
- if link_header is None:
- return link_header
-
- page_info = {}
-
- for link in link_header.split(','):
-
- uri, meta = map(str.strip, link.split(';'))
-
- # Strip <>'s
- uri = uri[1:-1]
-
- # Get query params from header.
- q = parse_qs(urlparse(uri).query)
- meta = meta[5:-1]
-
- page_info[meta] = q
-
- try:
- return int(page_info['last']['page'].pop())
- except KeyError:
- return True
-
- def _get_resources(self, resource, obj, limit=None, **kwargs):
-
- if limit is not None:
- assert limit > 0
-
- moar = True
- is_truncated = (limit > PAGING_SIZE) or (limit is None)
- r_count = 0
- page = 1
-
- while moar:
-
- if not is_truncated:
- kwargs['per_page'] = limit
- moar = False
- else:
- kwargs['page'] = page
- if limit:
- if (limit - r_count) < PAGING_SIZE:
- kwargs['per_page'] = (limit - r_count)
- moar = False
-
- r = self._http_resource('GET', resource, params=kwargs)
- max_page = self._total_pages_from_header(r.headers['link'])
-
- if (max_page is True) or (max_page is None):
- moar = False
-
- d_items = self._resource_deserialize(r.content)
-
- for item in d_items:
- if (r_count < limit) or (limit is None):
- r_count += 1
- yield obj.new_from_dict(item, gh=self)
- else:
- moar = False
-
- page += 1
-
-
- def _to_map(self, obj, iterable):
- """Maps given dict iterable to a given Resource object."""
-
- a = list()
-
- for it in iterable:
- a.append(obj.new_from_dict(it, rdd=self))
-
- return a
-
- def _get_url(self, resource):
-
- if is_collection(resource):
- resource = map(str, resource)
- resource = '/'.join(resource)
-
- return resource
+ def _request(self, verb, request, **kwargs):
+ """
+ Http request wrapper
+ :param verb: Http method
+ :param request : Url query request
+ :param kwargs: Keyword args to request
+ """
+ request = self.base_url + request
+ parsed_args = self._parse_args(kwargs)
+ response = self.session.request(verb, request, **parsed_args)
+ self.requests_remaining = response.headers.get(
+ 'x-ratelimit-remaining',-1)
+ error = GithubError(response)
+ error.process()
+ return response
class Github(GithubCore):
- """docstring for Github"""
-
- def __init__(self):
- super(Github, self).__init__()
- self.is_authenticated = False
-
-
- def get_user(self, username):
- """Get a single user."""
- return self._get_resource(('users', username), User)
-
-
- def get_me(self):
- """Get the authenticated user."""
- return self._get_resource(('user'), CurrentUser)
-
- def get_repo(self, username, reponame):
- """Get the given repo."""
- return self._get_resource(('repos', username, reponame), Repo)
-
- def get_org(self, login):
- """Get organization."""
- return self._get_resource(('orgs', login), Org)
-
-
-class ResponseError(Exception):
- """The API Response was unexpected."""
-
+ pass
diff --git a/github3/config.py b/github3/config.py
deleted file mode 100644
index 9fbf305..0000000
--- a/github3/config.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-github3.config
-~~~~~~~~~~~~~~
-
-This module provides the Github3 settings feature set.
-
-"""
-
-class Settings(object):
- _singleton = {}
-
- # attributes with defaults
- __attrs__ = []
-
- def __init__(self, **kwargs):
- super(Settings, self).__init__()
-
- self.__dict__ = self._singleton
-
-
- def __call__(self, *args, **kwargs):
- # new instance of class to call
- r = self.__class__()
-
- # cache previous settings for __exit__
- r.__cache = self.__dict__.copy()
- map(self.__cache.setdefault, self.__attrs__)
-
- # set new settings
- self.__dict__.update(*args, **kwargs)
-
- return r
-
-
- def __enter__(self):
- pass
-
-
- def __exit__(self, *args):
-
- # restore cached copy
- self.__dict__.update(self.__cache.copy())
- del self.__cache
-
-
- def __getattribute__(self, key):
- if key in object.__getattribute__(self, '__attrs__'):
- try:
- return object.__getattribute__(self, key)
- except AttributeError:
- return None
- return object.__getattribute__(self, key)
-
-settings = Settings()
-settings.verbose = False
-settings.base_url = 'https://api.github.com/' \ No newline at end of file
diff --git a/github3/converters.py b/github3/converters.py
new file mode 100644
index 0000000..1df61a6
--- /dev/null
+++ b/github3/converters.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+from .core import Converter
+
+class Rawlizer(Converter):
+ """ Raw converter """
+
+ def inject(self, fake):
+ pass
+
+ def loads(self, raw_resource):
+ return raw_resource
+
+ def dumps(self):
+ pass
+
+class Json(Converter):
+ """ Json converter """
+
+ def __init__(self):
+ import json
+ self.parser = json
+
+ def inject(self, fake):
+ pass
+
+ def loads(self, raw_resource):
+ return self.parser.dumps(raw_resource)
+
+ def dumps(self):
+ pass
+
+class Modelizer(Converter):
+ """ Own model converter """
+
+ def __init__(self, model=None):
+ if model:
+ self.inject(model)
+
+ def _parse_date(self, string_date):
+ from datetime import datetime
+ try:
+ date = datetime.strptime(string_date, '%Y-%m-%dT%H:%M:%SZ')
+ except TypeError:
+ date = None
+
+ return date
+
+ def inject(self, model):
+ self.model = model
+
+ def _parse_map(self, model, raw_resource):
+ return Modelizer(model).loads(raw_resource)
+
+ def _parse_collection_map(self, model, raw_resources):
+ # Dict of resources (Ex: Gist file)
+ if getattr(raw_resources, 'items', False):
+ dict_map = {}
+ for key, raw_resource in raw_resources.items():
+ dict_map[key] = Modelizer(model).loads(raw_resource)
+ return dict_map
+ # list of resources
+ else:
+ return [Modelizer(model).loads(raw_resource)
+ for raw_resource in raw_resources]
+
+ def loads(self, raw_resource):
+ attrs = {}
+ if not getattr(self, 'model', False):
+ raise NotImplementedError("%s needs model attr" %
+ self.__class__.__name__)
+ idl = self.model.idl()
+ attrs.update(
+ {attr: raw_resource[attr] for attr in idl.get('strs',())
+ if raw_resource.get(attr)})
+ attrs.update(
+ {attr: raw_resource[attr] for attr in idl.get('ints',())
+ if raw_resource.get(attr)})
+ attrs.update(
+ {attr: self._parse_date(raw_resource[attr])
+ for attr in idl.get('dates',()) if raw_resource.get(attr)})
+ attrs.update(
+ {attr: raw_resource[attr] for attr in idl.get('bools',())
+ if raw_resource.get(attr)})
+ attrs.update(
+ {attr: self._parse_map(model, raw_resource[attr])
+ for attr, model in idl.get('maps',{}).items()
+ if raw_resource.get(attr)})
+ attrs.update(
+ {attr: self._parse_collection_map(model, raw_resource[attr])
+ for attr, model in idl.get('collection_maps',{}).items()
+ if raw_resource.get(attr)})
+
+ return self.model(attrs)
+
+ def dumps(self, model):
+ # return JSON
+ pass
diff --git a/github3/core.py b/github3/core.py
index 6221f9e..d7237af 100644
--- a/github3/core.py
+++ b/github3/core.py
@@ -1,57 +1,70 @@
-# -*- coding: utf-8 -*-
-
-"""
-github3.core
-~~~~~~~~~~~~
-
-This module provides the entrance point for the GitHub3 module.
-"""
-
-__version__ = '0.0.0'
-__license__ = 'MIT'
-__author__ = 'Kenneth Reitz'
-
-
-import envoy
-
-from .api import Github, settings
-
-
-
-def no_auth():
- """Returns an un-authenticated Github object."""
-
- gh = Github()
-
- return gh
-
-
-def basic_auth(username, password):
- """Returns an authenticated Github object, via HTTP Basic."""
-
- gh = Github()
- gh.is_authenticated = True
- gh.session.auth = (username, password)
-
- return gh
-
-
-
-# def git_config():
-# """Returns an authenticated Github object, via HTTP Basic.
-
-# GitHub API token is taken from `git config`.
-# """
-
-# username = envoy.run('git config github.user').std_out.strip()
-# token = envoy.run('git config github.token').std_out.strip()
-
-# def enable_auth(*args, **kwargs):
-# kwargs['auth'] = (username, token)
-# return args, kwargs
-
-# gh = Github()
-# gh.is_authenticated = True
-# gh._requests_pre_hook = enable_auth
-
-# return gh \ No newline at end of file
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+class Paginate:
+ """ Paginate resource iterator
+
+ :param resource: URL resource
+ :param requester: Bound method to request. See `GithubCore.get`
+ :param kwargs: Args to request (params)
+ """
+
+ def __init__(self, resource, requester, **kwargs):
+ self.resource = resource
+ self.requester = requester
+ self.kwargs = kwargs
+ self.page = 1
+
+ def _last_page(self, link):
+ """ Get and cached last page from link header """
+ if not getattr(self, 'last', False):
+ from github3.packages.link_header import parse_link_value
+ from urlparse import urlparse, parse_qs
+ for link, rels in parse_link_value(link).items():
+ if rels.get('rel') == 'last':
+ query = urlparse(link).query
+ self.last = int(parse_qs(query).get('page').pop())
+
+ return self.last
+
+ # TODO: reset iterators... multiple?
+ def __iter__(self):
+ return self
+
+ def initial(self):
+ """ First request. Force requester to paginate returning link header """
+ link, content = self.requester(self.resource, paginate=True,
+ page=1, **self.kwargs)
+ self.last = self._last_page(link) if link else 1
+ return content
+
+ def next(self):
+ if self.page == 1:
+ content = self.initial()
+ self.page += 1
+ return content
+ else:
+ if self.page > self.last:
+ raise StopIteration
+ else:
+ content = self.requester(self.resource, page=self.page,
+ **self.kwargs)
+ self.page += 1
+ return content
+
+class Converter(object):
+ """ Abstract converter class """
+
+ def loads(self):
+ raise NotImplementedError("%s needs define '%s' method" %
+ (self.__class__.__name__, 'loads'))
+
+ def dumps(self):
+ raise NotImplementedError("%s needs define '%s' method" %
+ (self.__class__.__name__, 'dumps'))
+
+ def inject(self):
+ raise NotImplementedError("%s needs define '%s' method" %
+ (self.__class__.__name__, 'inject'))
diff --git a/github3/errors.py b/github3/errors.py
new file mode 100644
index 0000000..09e616b
--- /dev/null
+++ b/github3/errors.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+import json
+import github3.exceptions as exceptions
+
+class GithubError(object):
+ """ Handler for API errors """
+
+ def __init__(self, response):
+ self._parser = json
+ self.status_code = response.status_code
+ try:
+ self.debug = self._parser.loads(response.content)
+ except ValueError:
+ self.debug = {'message': response.content}
+
+ def error_400(self):
+ return exceptions.BadRequest("400 - %s" % self.debug.get('message'))
+
+ def error_404(self):
+ return exceptions.NotFound("404 - %s" % self.debug.get('message'))
+
+ def error_422(self):
+ errors = self.debug.get('errors')
+ if errors:
+ errors = ['{resource}: {code} => {field}'.format(**error)
+ for error in errors]
+ return exceptions.UnprocessableEntity(
+ '422 - %s %s' % (self.debug.get('message'), errors))
+
+ def process(self):
+ raise_error = getattr(self, 'error_%s' % self.status_code, False)
+ if raise_error:
+ raise raise_error()
diff --git a/github3/exceptions.py b/github3/exceptions.py
new file mode 100644
index 0000000..b0894a9
--- /dev/null
+++ b/github3/exceptions.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+class BadRequest(Exception):
+ pass
+class UnprocessableEntity(Exception):
+ pass
+class NotFound(Exception):
+ pass
+class AnomUser(Exception):
+ """ Exception for AnomUser handler """
+ pass
diff --git a/github3/packages/omnijson/packages/__init__.py b/github3/handlers/__init__.py
index e69de29..e69de29 100644
--- a/github3/packages/omnijson/packages/__init__.py
+++ b/github3/handlers/__init__.py
diff --git a/github3/handlers/base.py b/github3/handlers/base.py
new file mode 100644
index 0000000..50e2df8
--- /dev/null
+++ b/github3/handlers/base.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+from github3.core import Paginate
+from github3.converters import Modelizer
+
+class Handler(object):
+ """ Handler base. Requests to API and modelize responses """
+
+ def __init__(self, gh):
+ self._gh = gh
+ super(Handler, self).__init__()
+
+ def _prefix_resource(self, resource):
+ prefix = getattr(self, 'prefix', '')
+ return '/'.join((prefix, resource)).rstrip('/')
+
+ def _get_converter(self, **kwargs):
+ converter = kwargs.get(
+ 'converter', # 1. in kwargs
+ getattr(self, 'converter', # 2. in handler
+ Modelizer())) # 3. Default
+
+ return converter
+
+ def _put(self, resource, **kwargs):
+ """ Put proxy request"""
+
+ return self._bool(resource, method='put', **kwargs)
+
+ def _delete(self, resource, **kwargs):
+ """ Delete proxy request"""
+
+ return self._bool(resource, method='delete', **kwargs)
+
+ def _bool(self, resource, **kwargs):
+ """ Handler request to boolean response """
+
+ from github3.exceptions import NotFound
+ resource = self._prefix_resource(resource)
+ try:
+ callback = getattr(self._gh, kwargs.get('method',''), self._gh.head)
+ response = callback(resource, **kwargs)
+ except NotFound:
+ return False
+ assert response.status_code == 204
+ return True
+
+ #TODO: if limit is multiple of per_page... it do another request for nothing
+ def _get_resources(self, resource, model=None, limit=None, **kwargs):
+ """ Hander request to multiple resources """
+
+ resource = self._prefix_resource(resource)
+ page_resources = Paginate(resource, self._gh.get, **kwargs)
+ counter = 1
+ for page in page_resources:
+ for raw_resource in page:
+ if limit and counter > limit: break
+ counter += 1
+ converter = self._get_converter(**kwargs)
+ converter.inject(model)
+ yield converter.loads(raw_resource)
+ else:
+ continue
+ break
+
+ def _get_resource(self, resource, model=None, **kwargs):
+ """ Handler request to single resource """
+
+ resource = self._prefix_resource(resource)
+ raw_resource = self._gh.get(resource)
+ converter = self._get_converter(**kwargs)
+ converter.inject(model)
+ return converter.loads(raw_resource)
+
+ def _post_resource(self, resource, data, model=None, **kwargs):
+ """ Handler request to create a resource """
+
+ resource = self._prefix_resource(resource)
+ raw_resource = self._gh.post(resource, data=data)
+ converter = self._get_converter(**kwargs)
+ converter.inject(model)
+ return converter.loads(raw_resource)
diff --git a/github3/handlers/gists.py b/github3/handlers/gists.py
new file mode 100644
index 0000000..15f215c
--- /dev/null
+++ b/github3/handlers/gists.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: Antti Kaihola
+
+from .base import Handler
+from .. import models
+
+
+class Gist(Handler):
+ """ Gist handler """
+
+ prefix = 'gists'
+
+ def __repr__(self):
+ return '<Gist handler>'
+
+ def get(self, gist_id):
+ """ Return gist """
+
+ return self._get_resource(gist_id, model=models.Gist)
+
+ def create_gist(self, description, public=True, files={}):
+ """ Create a gist """
+ data = {'description': description,
+ 'public': public,
+ 'files': files}
+ return self._post_resource('', data=data, model=models.Gist)
diff --git a/github3/handlers/user.py b/github3/handlers/user.py
new file mode 100644
index 0000000..fb893b4
--- /dev/null
+++ b/github3/handlers/user.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+from .base import Handler
+import github3.models as models
+from github3.converters import Rawlizer
+
+class User(Handler):
+ """ User handler with public access """
+
+ prefix = 'users'
+
+ def __repr__(self):
+ return '<User handler> %s>' % getattr(self, 'username', 'without user')
+
+ def set_username(self, user):
+ """
+ Set username to query public handler
+
+ :param `user`: User model or username string
+ """
+
+ parse_user = str(getattr(user, 'login', user))
+ self.username = parse_user
+ self.prefix = '/'.join((self.prefix, parse_user))
+
+ def get(self):
+ """ Return user """
+
+ return self._get_resource('', model=models.User)
+
+ def get_followers(self):
+ """ Return user's followers """
+
+ return self._get_resources('followers', model=models.User)
+
+ def get_following(self):
+ """ Return users that follow """
+
+ return self._get_resources('following', model=models.User)
+
+ def get_repos(self):
+ """ Return user's public repositories """
+
+ return self._get_resources('repos', model=models.Repo)
+
+ def get_watched(self):
+ """ Return repositories that user whatch """
+
+ return self._get_resources('watched', model=models.Repo)
+
+ def get_orgs(self):
+ """ Return user's public organizations """
+
+ return self._get_resources('orgs', model=models.Org)
+
+ def get_gists(self):
+ """ Return user's gists """
+
+ return self._get_resources('gists', model=models.Gist)
+
+class AuthUser(User):
+ """ User handler with public and private access """
+
+ prefix = 'user'
+
+ def __repr__(self):
+ return '<AuthUser handler> %s>' % self._gh.session.auth[0]
+
+ def get(self):
+ return self._get_resource('', model=models.AuthUser)
+
+ def get_emails(self):
+ """ Return list of emails """
+
+ # Ignore converter, it must be Rawlizer
+ emails = self._get_resource('emails', converter=Rawlizer())
+ return emails
+
+ def create_emails(self, *args):
+ """
+ Add emails
+
+ :param args: Collection of emails
+ create_emails(*('test1@example.com', 'test2@example.cm'))
+ """
+ parsed_emails = map(str, args)
+ all_mails = self._post_resource(
+ 'emails', data=parsed_emails, converter=Rawlizer())
+ return all_mails
+
+ def delete_emails(self, *args):
+ """
+ Delete emails
+
+ :param args: Collection of emails
+ create_emails(*('test1@example.com', 'test2@example.cm'))
+ """
+ parsed_emails = map(str, args)
+ return self._delete('emails', data=parsed_emails)
+
+ def is_following(self, user):
+ """
+ Return true if you are following the user
+
+ :param `user`: User model or username string
+ """
+
+ parse_user = str(getattr(user, 'login', user))
+ return self._bool('following/%s' % parse_user)
+
+ def follow(self, user):
+ """
+ Follow user
+
+ :param `user`: User model or username string
+ """
+
+ parse_user = str(getattr(user, 'login', user))
+ return self._put('following/%s' % parse_user)
+
+ def unfollow(self, user):
+ """
+ Unfollow user
+
+ :param `user`: User model or username string
+ """
+
+ parse_user = str(getattr(user, 'login', user))
+ return self._delete('following/%s' % parse_user)
+
+ def get_keys(self):
+ """ Get public keys """
+
+ return self._get_resources('keys', model=models.Key)
+
+ def get_key(self, key_id):
+ """ Get public key by id """
+
+ return self._get_resource('keys/%s' % key_id, model=models.Key)
+
+ def create_key(self, **kwargs):
+ """
+ Create public key
+
+ :param title
+ :param key: Key string
+ """
+
+ #TODO: render key.pub file
+ key = {
+ 'title': kwargs.get('title',''),
+ 'key': kwargs.get('key','')
+ }
+ return self._post_resource('keys', data=key, model=models.Key)
+
+ def delete_key(self, key_id):
+ """ Delete public key """
+
+ return self._delete('keys/%s' % key_id)
+
+ def get_repos(self, filter='all'):
+ """
+ Return user's public repositories
+
+ param: filter: 'all', 'public', 'private' or 'member'
+ """
+
+ return self._get_resources('repos', model=models.Repo,
+ type=str(filter))
+
+ def is_watching_repo(self, owner, repo):
+ """
+ Return true if you are watching the user repository
+
+ :param owner: username
+ :param repo: repository name
+ is_watching_repo('copitux', 'python-github3')
+ """
+
+ owner = getattr(owner, 'login', owner)
+ repo = getattr(repo, 'name', repo)
+ return self._bool('watched/%s/%s' % (owner, repo))
+
+ def watch_repo(self, owner, repo):
+ """
+ Watch the repository
+
+ :param owner: username
+ :param repo: repository name
+ """
+
+ return self._put('watched/%s/%s' % (owner, repo))
+
+ def unwatch_repo(self, owner, repo):
+ """
+ Unwatch the repository
+
+ :param owner: username
+ :param repo: repository name
+ """
+
+ return self._delete('watched/%s/%s' % (owner, repo))
diff --git a/github3/helpers.py b/github3/helpers.py
index abdeb4a..205e097 100644
--- a/github3/helpers.py
+++ b/github3/helpers.py
@@ -57,6 +57,7 @@ def to_python(obj,
date_keys=None,
int_keys=None,
object_map=None,
+ list_map=None,
bool_keys=None, **kwargs):
"""Extends a given object for API Consumption.
@@ -96,14 +97,32 @@ def to_python(obj,
if object_map:
for (k, v) in object_map.items():
if in_dict.get(k):
+ if v == 'self':
+ v = obj.__class__
d[k] = v.new_from_dict(in_dict.get(k))
+ if list_map:
+ for k, model in list_map.items():
+ nested_map = in_dict.get(k)
+ if nested_map:
+ if getattr(nested_map, 'items', False):
+ map_dict = {}
+ for nested_item, nested_dict in nested_map.items():
+ map_dict[nested_item] = model.new_from_dict(nested_dict)
+ d[k] = map_dict
+ else:
+ map_list = []
+ for item_map in nested_map:
+ map_list.append(model.new_from_dict(item_map))
+ d[k] = map_list
+
obj.__dict__.update(d)
obj.__dict__.update(kwargs)
# Save the dictionary, for write comparisons.
obj._cache = d
obj.__cache = in_dict
+ obj.post_map()
return obj
@@ -166,4 +185,4 @@ def get_scope(f, args=None):
# scrub readability.models namespace
scope = scope.replace('readability.api.', '')
- return scope \ No newline at end of file
+ return scope
diff --git a/github3/models.py b/github3/models.py
deleted file mode 100644
index 8c4f9ba..0000000
--- a/github3/models.py
+++ /dev/null
@@ -1,254 +0,0 @@
-"""
-github3.models
-~~~~~~~~~~~~~~
-
-This module provides the Github3 object model.
-"""
-
-import json
-
-from .helpers import to_python, to_api, key_diff
-
-
-class BaseResource(object):
- """A BaseResource object."""
-
- _strs = []
- _ints = []
- _dates = []
- _bools = []
- _map = {}
- _writeable = []
- _cache = {}
-
-
- def __init__(self):
- self._bootstrap()
- super(BaseResource, self).__init__()
-
-
- def __dir__(self):
- return self.keys()
-
- def _bootstrap(self):
- """Bootstraps the model object based on configured values."""
-
- for attr in self.keys():
- setattr(self, attr, None)
-
- def keys(self):
- return self._strs + self._ints + self._dates + self._bools + self._map.keys()
-
- def dict(self):
- d = dict()
- for k in self.keys():
- d[k] = self.__dict__.get(k)
-
- return d
-
- @classmethod
- def new_from_dict(cls, d, gh=None):
-
- return to_python(
- obj=cls(), in_dict=d,
- str_keys = cls._strs,
- int_keys = cls._ints,
- date_keys = cls._dates,
- bool_keys = cls._bools,
- object_map = cls._map,
- _gh = gh
- )
-
-
- def update(self):
- deploy = key_diff(self._cache, self.dict(), pack=True)
-
- deploy = to_api(deploy, int_keys=self._ints, date_keys=self._dates, bool_keys=self._bools)
- deploy = json.dumps(deploy)
-
- r = self._gh._patch_resource(self.ri, deploy)
- return r
-
-
-class Plan(BaseResource):
- """Github Plan object model."""
-
- _strs = ['name']
- _ints = ['space', 'collaborators', 'private_repos']
-
- def __repr__(self):
- return '<plan {0}>'.format(str(self.name))
-
-
-
-class User(BaseResource):
- """Github User object model."""
-
- _strs = [
- 'login','avatar_url', 'url', 'name', 'company', 'blog', 'location',
- 'email', 'bio', 'html_url']
-
- _ints = ['id', 'public_repos', 'public_gists', 'followers', 'following']
- _dates = ['created_at',]
- _bools = ['hireable', ]
- # _map = {}
- # _writeable = []
-
- @property
- def ri(self):
- return ('users', self.login)
-
- def __repr__(self):
- return '<user {0}>'.format(self.login)
-
- def repos(self, limit=None):
- return self._gh._get_resources(('users', self.login, 'repos'), Repo, limit=limit)
-
- def repo(self, reponame):
- return self._gh._get_resource(('repos', self.login, reponame), Repo)
-
- def orgs(self):
- return self._gh._get_resources(('users', self.login, 'orgs'), Org)
-
- def gists(self):
- return self._gh._get_resources(('users', self.login, 'gists'), Gist)
-
- def create_gist(self, description, public=True, files={}):
- data = {'description': description,
- 'public': public,
- 'files': files}
- deploy = json.dumps(data)
- return self._gh._post_resource(('users', self.login, 'gists'), Gist, deploy)
-
-
-class CurrentUser(User):
- """Github Current User object model."""
-
- _ints = [
- 'id', 'public_repos', 'public_gists', 'followers', 'following',
- 'total_private_repos', 'owned_private_repos', 'private_gists',
- 'disk_usage', 'collaborators']
- _map = {'plan': Plan}
- _writeable = ['name', 'email', 'blog', 'company', 'location', 'hireable', 'bio']
-
- @property
- def ri(self):
- return ('user',)
-
- def __repr__(self):
- return '<current-user {0}>'.format(self.login)
-
- def repos(self, limit=None):
- return self._gh._get_resources(('user', 'repos'), Repo, limit=limit)
-
- def repo(self, reponame):
- return self._gh._get_resource(('repos', self.login, reponame), Repo)
-
- def orgs(self, limit=None):
- return self._gh._get_resources(('user', 'orgs'), Org, limit=limit)
-
- def org(self, orgname):
- return self._gh._get_resource(('orgs', orgname), Org)
-
- def gists(self, limit=None):
- return self._gh._get_resources('gists', Gist, limit=limit)
-
-
-
-class Org(BaseResource):
- """Github Organization object model."""
-
- _strs = [
- 'login', 'url', 'avatar_url', 'name', 'company', 'blog', 'location', 'email'
- 'html_url', 'type', 'billing_email']
- _ints = [
- 'id', 'public_repos', 'public_gists', 'followers', 'following',
- 'total_private_repos', 'owned_private_repos', 'private_gists', 'disk_usage',
- 'collaborators']
- _dates = ['created_at']
- _map = {'plan': Plan}
- _writable = ['billing_email', 'blog', 'company', 'email', 'location', 'name']
-
- @property
- def ri(self):
- return ('orgs', self.login)
-
- def __repr__(self):
- return '<org {0}>'.format(self.login)
-
- def repos(self, limit=None):
- return self._gh._get_resources(('orgs', self.login, 'repos'), Repo, limit=limit)
-
- def members(self, limit=None):
- return self._gh._get_resources(('orgs', self.login, 'members'), User, limit=limit)
-
- def is_member(self, username):
- if isinstance(username, User):
- username = username.login
-
- r = self._gh._http_resource('GET', ('orgs', self.login, 'members', username), check_status=False)
- return (r.status_code == 204)
-
- def publicize_member(self, username):
- if isinstance(username, User):
- username = username.login
-
- r = self._gh._http_resource('PUT', ('orgs', self.login, 'public_members', username), check_status=False, data='')
- return (r.status_code == 204)
-
- def conceal_member(self, username):
- if isinstance(username, User):
- username = username.login
-
- r = self._gh._http_resource('DELETE', ('orgs', self.login, 'public_members', username), check_status=False)
- return (r.status_code == 204)
-
- def remove_member(self, username):
- if isinstance(username, User):
- username = username.login
-
- r = self._gh._http_resource('DELETE', ('orgs', self.login, 'members', username), check_status=False)
- return (r.status_code == 204)
-
- def public_members(self, limit=None):
- return self._gh._get_resources(('orgs', self.login, 'public_members'), User, limit=limit)
-
- def is_public_member(self, username):
- if isinstance(username, User):
- username = username.login
-
- r = self._gh._http_resource('GET', ('orgs', self.login, 'public_members', username), check_status=False)
- return (r.status_code == 204)
-
-
-class Gist(BaseResource):
- _strs = ['url', 'description', 'html_url', 'git_pull_url', 'git_push_url']
- _ints = ['id', 'comments']
- _bools = ['public']
- _dates = ['created_at']
- _map = {'user': User} #TODO: file
-
- @property
- def ri(self):
- return ('users', self.user.login, self.id)
-
- def __repr__(self):
- return '<gist %s/%s>' % (self.user.login, self.description)
-
-class Repo(BaseResource):
- _strs = [
- 'url', 'html_url', 'clone_url', 'git_url', 'ssh_url', 'svn_url',
- 'name', 'description', 'homepage', 'language', 'master_branch']
- _bools = ['private', 'fork']
- _ints = ['forks', 'watchers', 'size',]
- _dates = ['pushed_at', 'created_at']
- _map = {'owner': User}
-
-
- @property
- def ri(self):
- return ('repos', self.owner.login, self.name)
-
- def __repr__(self):
- return '<repo {0}/{1}>'.format(self.owner.login, self.name)
- # owner
diff --git a/github3/models/__init__.py b/github3/models/__init__.py
new file mode 100644
index 0000000..ff0c28a
--- /dev/null
+++ b/github3/models/__init__.py
@@ -0,0 +1,4 @@
+from .user import AuthUser, User, Key
+from .repos import Repo
+from .orgs import Org
+from .gists import Gist
diff --git a/github3/models/base.py b/github3/models/base.py
new file mode 100644
index 0000000..df0c82b
--- /dev/null
+++ b/github3/models/base.py
@@ -0,0 +1,19 @@
+"""
+github3.models
+~~~~~~~~~~~~~~
+
+This package provides the Github3 object model.
+"""
+
+class BaseResource(object):
+ """A BaseResource object."""
+
+ def __init__(self, attrs=None):
+ if attrs:
+ for attr, value in attrs.items():
+ setattr(self, attr, value)
+ super(BaseResource, self).__init__()
+
+ @classmethod
+ def idl(self):
+ raise NotImplementedError('Each model need subcass that method')
diff --git a/github3/models/gists.py b/github3/models/gists.py
new file mode 100644
index 0000000..d1b416d
--- /dev/null
+++ b/github3/models/gists.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+from .base import BaseResource
+from .user import User
+
+class File(BaseResource):
+ """ File model """
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['filename', 'raw_url', 'content', 'language', 'type'],
+ 'ints': ['size'],
+ }
+
+ def __repr__(self):
+ return '<File gist> %s' % self.filename
+
+class GistFork(BaseResource):
+ """ GistFork model """
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['url'],
+ 'dates': ['created_at'],
+ 'maps': {'user': User}
+ }
+
+ def __repr__(self):
+ return '<Gist fork> %s>' % self.user.login
+
+class ChangeStatus(BaseResource):
+ """ ChangeStatus model """
+
+ @classmethod
+ def idl(self):
+ return {
+ 'ints': ['deletions', 'additions', 'total'],
+ }
+
+ def __repr__(self):
+ return '<Gist history> change_status>'
+
+class GistHistory(BaseResource):
+ """ """
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['url', 'version'],
+ 'maps': {'user': User, 'change_status': ChangeStatus},
+ 'dates': ['committed_at'],
+ }
+
+ def __repr__(self):
+ return '<GistHistory %s/%s>' % (self.user, self.committed_at)
+
+class Gist(BaseResource):
+ """ """
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['url', 'description', 'html_url', 'git_pull_url', 'git_push_url'],
+ 'ints': ['id', 'comments'],
+ 'bools': ['public'],
+ 'dates': ['created_at'],
+ 'maps': {'user': User},
+ 'collection_maps': {'files': File, 'forks': GistFork, 'history': GistHistory},
+ }
+
+ def __repr__(self):
+ return '<Gist %s/%s>' % (self.user, self.description)
diff --git a/github3/models/orgs.py b/github3/models/orgs.py
new file mode 100644
index 0000000..5e66c35
--- /dev/null
+++ b/github3/models/orgs.py
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+from .base import BaseResource
+from .user import Plan
+
+class Org(BaseResource):
+ """Github Organization object model."""
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['login', 'url', 'avatar_url', 'name', 'company', 'blog',
+ 'location', 'email', 'html_url', 'type', 'billing_email'],
+ 'ints': ['id', 'public_repos', 'public_gists', 'followers',
+ 'following', 'total_private_repos', 'owned_private_repos',
+ 'private_gists', 'disk_usage', 'collaborators'],
+ 'dates': ['created_at'],
+ 'maps': {'plan': Plan}
+ }
+
+ def __repr__(self):
+ return '<Org %s>' % self.login
diff --git a/github3/models/repos.py b/github3/models/repos.py
new file mode 100644
index 0000000..d1b7b75
--- /dev/null
+++ b/github3/models/repos.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+from .base import BaseResource
+from .user import User
+from .orgs import Org
+
+class Repo(BaseResource):
+ """ Repo model """
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': [
+ 'url', 'html_url', 'clone_url', 'git_url', 'ssh_url', 'svn_url',
+ 'name', 'description', 'homepage', 'language', 'master_branch'],
+ 'ints': ['forks', 'watchers', 'size', 'open_issues'],
+ 'dates': ['created_at', 'pushed_at'],
+ 'bools': ['private', 'fork', 'has_issues', 'has_wiki', 'has_downloads'],
+ 'maps': {
+ 'owner': User,
+ 'organization': Org,
+ 'parent': self.__class__,
+ 'source': self.__class__,
+ }
+ }
+
+ def __repr__(self):
+ return '<Repo %s/%s>' % (self.owner.login, self.name)
diff --git a/github3/models/user.py b/github3/models/user.py
new file mode 100644
index 0000000..7ec7999
--- /dev/null
+++ b/github3/models/user.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: David Medina
+
+from .base import BaseResource
+
+class Plan(BaseResource):
+ """Github Plan object model."""
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['name'],
+ 'ints': ['space', 'collaborators', 'private_repos'],
+ }
+
+ def __repr__(self):
+ return '<Plan %s>' % self.name
+
+class Key(BaseResource):
+ """Github Key object model."""
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['url', 'title', 'key'],
+ 'ints': ['id'],
+ }
+
+ def __repr__(self):
+ return '<Key %s>' % self.title
+
+class User(BaseResource):
+ """Github User object model."""
+
+ @classmethod
+ def idl(self):
+ return {
+ 'strs': ['login','avatar_url', 'url', 'name', 'company', 'blog',
+ 'location', 'email', 'bio', 'html_url', 'type'],
+ 'ints': [
+ 'id', 'public_repos', 'public_gists', 'followers', 'following',
+ 'total_private_repos', 'owned_private_repos', 'private_gists',
+ 'disk_usage', 'collaborators'],
+ 'maps': {'plan': Plan},
+ 'dates': ['created_at',],
+ 'bools': ['hireable', ],
+ }
+
+ def __repr__(self):
+ return '<User %s>' % self.login
+
+ #def handler(self):
+ # return self._gh.user_handler(self.login, force=True)
+
+class AuthUser(User):
+ """Github Authenticated User object model."""
+
+ #def handler(self):
+ # return self._gh.user_handler(self.login, force=True, private=True)
+
+ def __repr__(self):
+ return '<AuthUser %s>' % self.login
+
diff --git a/github3/packages/omnijson/__init__.py b/github3/packages/omnijson/__init__.py
deleted file mode 100644
index c10c328..0000000
--- a/github3/packages/omnijson/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# -*- coding: utf-8 -*-
-
-from __future__ import absolute_import
-
-from .core import loads, dumps, JSONError
-
-
-__all__ = ('loads', 'dumps', 'JSONError')
-
-
-__version__ = '0.1.2'
-__author__ = 'Kenneth Reitz'
-__license__ = 'MIT'
diff --git a/github3/packages/omnijson/core.py b/github3/packages/omnijson/core.py
deleted file mode 100644
index 8b49537..0000000
--- a/github3/packages/omnijson/core.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-omijson.core
-~~~~~~~~~~~~
-
-This module provides the core omnijson functionality.
-
-"""
-
-import sys
-
-engine = None
-_engine = None
-
-
-options = [
- ['ujson', 'loads', 'dumps', (ValueError,)],
- ['yajl', 'loads', 'dumps', (TypeError, ValueError)],
- ['jsonlib2', 'read', 'write', (ValueError,)],
- ['jsonlib', 'read', 'write', (ValueError,)],
- ['simplejson', 'loads', 'dumps', (TypeError, ValueError)],
- ['json', 'loads', 'dumps', (TypeError, ValueError)],
- ['simplejson_from_packages', 'loads', 'dumps', (ValueError,)],
-]
-
-
-def _import(engine):
- try:
- if '_from_' in engine:
- engine, package = engine.split('_from_')
- m = __import__(package, globals(), locals(), [engine], -1)
- return getattr(m, engine)
-
- return __import__(engine)
-
- except ImportError:
- return False
-
-
-def loads(s, **kwargs):
- """Loads JSON object."""
-
- try:
- return _engine[0](s)
-
- except:
- # crazy 2/3 exception hack
- # http://www.voidspace.org.uk/python/weblog/arch_d7_2010_03_20.shtml
-
- ExceptionClass, why = sys.exc_info()[:2]
-
- if any([(issubclass(ExceptionClass, e)) for e in _engine[2]]):
- raise JSONError(why)
- else:
- raise why
-
-
-def dumps(o, **kwargs):
- """Dumps JSON object."""
-
- try:
- return _engine[1](o)
-
- except:
- ExceptionClass, why = sys.exc_info()[:2]
-
- if any([(issubclass(ExceptionClass, e)) for e in _engine[2]]):
- raise JSONError(why)
- else:
- raise why
-
-
-class JSONError(ValueError):
- """JSON Failed."""
-
-
-# ------
-# Magic!
-# ------
-
-
-for e in options:
-
- __engine = _import(e[0])
-
- if __engine:
- engine, _engine = e[0], e[1:4]
-
- for i in (0, 1):
- _engine[i] = getattr(__engine, _engine[i])
-
- break
diff --git a/github3/packages/omnijson/packages/simplejson/__init__.py b/github3/packages/omnijson/packages/simplejson/__init__.py
deleted file mode 100644
index 210b957..0000000
--- a/github3/packages/omnijson/packages/simplejson/__init__.py
+++ /dev/null
@@ -1,438 +0,0 @@
-r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
-JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
-interchange format.
-
-:mod:`simplejson` exposes an API familiar to users of the standard library
-:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
-version of the :mod:`json` library contained in Python 2.6, but maintains
-compatibility with Python 2.4 and Python 2.5 and (currently) has
-significant performance advantages, even without using the optional C
-extension for speedups.
-
-Encoding basic Python object hierarchies::
-
- >>> import simplejson as json
- >>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
- '["foo", {"bar": ["baz", null, 1.0, 2]}]'
- >>> print json.dumps("\"foo\bar")
- "\"foo\bar"
- >>> print json.dumps(u'\u1234')
- "\u1234"
- >>> print json.dumps('\\')
- "\\"
- >>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
- {"a": 0, "b": 0, "c": 0}
- >>> from StringIO import StringIO
- >>> io = StringIO()
- >>> json.dump(['streaming API'], io)
- >>> io.getvalue()
- '["streaming API"]'
-
-Compact encoding::
-
- >>> import simplejson as json
- >>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
- '[1,2,3,{"4":5,"6":7}]'
-
-Pretty printing::
-
- >>> import simplejson as json
- >>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
- >>> print '\n'.join([l.rstrip() for l in s.splitlines()])
- {
- "4": 5,
- "6": 7
- }
-
-Decoding JSON::
-
- >>> import simplejson as json
- >>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
- >>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
- True
- >>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
- True
- >>> from StringIO import StringIO
- >>> io = StringIO('["streaming API"]')
- >>> json.load(io)[0] == 'streaming API'
- True
-
-Specializing JSON object decoding::
-
- >>> import simplejson as json
- >>> def as_complex(dct):
- ... if '__complex__' in dct:
- ... return complex(dct['real'], dct['imag'])
- ... return dct
- ...
- >>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
- ... object_hook=as_complex)
- (1+2j)
- >>> from decimal import Decimal
- >>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
- True
-
-Specializing JSON object encoding::
-
- >>> import simplejson as json
- >>> def encode_complex(obj):
- ... if isinstance(obj, complex):
- ... return [obj.real, obj.imag]
- ... raise TypeError(repr(o) + " is not JSON serializable")
- ...
- >>> json.dumps(2 + 1j, default=encode_complex)
- '[2.0, 1.0]'
- >>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
- '[2.0, 1.0]'
- >>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
- '[2.0, 1.0]'
-
-
-Using simplejson.tool from the shell to validate and pretty-print::
-
- $ echo '{"json":"obj"}' | python -m simplejson.tool
- {
- "json": "obj"
- }
- $ echo '{ 1.2:3.4}' | python -m simplejson.tool
- Expecting property name: line 1 column 2 (char 2)
-"""
-__version__ = '2.1.6'
-__all__ = [
- 'dump', 'dumps', 'load', 'loads',
- 'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
- 'OrderedDict',
-]
-
-__author__ = 'Bob Ippolito <bob@redivi.com>'
-
-from decimal import Decimal
-
-from decoder import JSONDecoder, JSONDecodeError
-from encoder import JSONEncoder
-def _import_OrderedDict():
- import collections
- try:
- return collections.OrderedDict
- except AttributeError:
- import ordered_dict
- return ordered_dict.OrderedDict
-OrderedDict = _import_OrderedDict()
-
-def _import_c_make_encoder():
- try:
- from simplejson._speedups import make_encoder
- return make_encoder
- except ImportError:
- return None
-
-_default_encoder = JSONEncoder(
- skipkeys=False,
- ensure_ascii=True,
- check_circular=True,
- allow_nan=True,
- indent=None,
- separators=None,
- encoding='utf-8',
- default=None,
- use_decimal=False,
-)
-
-def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, use_decimal=False, **kw):
- """Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
- ``.write()``-supporting file-like object).
-
- If ``skipkeys`` is true then ``dict`` keys that are not basic types
- (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
- will be skipped instead of raising a ``TypeError``.
-
- If ``ensure_ascii`` is false, then the some chunks written to ``fp``
- may be ``unicode`` instances, subject to normal Python ``str`` to
- ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
- understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
- to cause an error.
-
- If ``check_circular`` is false, then the circular reference check
- for container types will be skipped and a circular reference will
- result in an ``OverflowError`` (or worse).
-
- If ``allow_nan`` is false, then it will be a ``ValueError`` to
- serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
- in strict compliance of the JSON specification, instead of using the
- JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
-
- If *indent* is a string, then JSON array elements and object members
- will be pretty-printed with a newline followed by that string repeated
- for each level of nesting. ``None`` (the default) selects the most compact
- representation without any newlines. For backwards compatibility with
- versions of simplejson earlier than 2.1.0, an integer is also accepted
- and is converted to a string with that many spaces.
-
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
-
- ``encoding`` is the character encoding for str instances, default is UTF-8.
-
- ``default(obj)`` is a function that should return a serializable version
- of obj or raise TypeError. The default simply raises TypeError.
-
- If *use_decimal* is true (default: ``False``) then decimal.Decimal
- will be natively serialized to JSON with full precision.
-
- To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
- ``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
-
- """
- # cached encoder
- if (not skipkeys and ensure_ascii and
- check_circular and allow_nan and
- cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not use_decimal
- and not kw):
- iterable = _default_encoder.iterencode(obj)
- else:
- if cls is None:
- cls = JSONEncoder
- iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
- check_circular=check_circular, allow_nan=allow_nan, indent=indent,
- separators=separators, encoding=encoding,
- default=default, use_decimal=use_decimal, **kw).iterencode(obj)
- # could accelerate with writelines in some versions of Python, at
- # a debuggability cost
- for chunk in iterable:
- fp.write(chunk)
-
-
-def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
- allow_nan=True, cls=None, indent=None, separators=None,
- encoding='utf-8', default=None, use_decimal=False, **kw):
- """Serialize ``obj`` to a JSON formatted ``str``.
-
- If ``skipkeys`` is false then ``dict`` keys that are not basic types
- (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
- will be skipped instead of raising a ``TypeError``.
-
- If ``ensure_ascii`` is false, then the return value will be a
- ``unicode`` instance subject to normal Python ``str`` to ``unicode``
- coercion rules instead of being escaped to an ASCII ``str``.
-
- If ``check_circular`` is false, then the circular reference check
- for container types will be skipped and a circular reference will
- result in an ``OverflowError`` (or worse).
-
- If ``allow_nan`` is false, then it will be a ``ValueError`` to
- serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
- strict compliance of the JSON specification, instead of using the
- JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
-
- If ``indent`` is a string, then JSON array elements and object members
- will be pretty-printed with a newline followed by that string repeated
- for each level of nesting. ``None`` (the default) selects the most compact
- representation without any newlines. For backwards compatibility with
- versions of simplejson earlier than 2.1.0, an integer is also accepted
- and is converted to a string with that many spaces.
-
- If ``separators`` is an ``(item_separator, dict_separator)`` tuple
- then it will be used instead of the default ``(', ', ': ')`` separators.
- ``(',', ':')`` is the most compact JSON representation.
-
- ``encoding`` is the character encoding for str instances, default is UTF-8.
-
- ``default(obj)`` is a function that should return a serializable version
- of obj or raise TypeError. The default simply raises TypeError.
-
- If *use_decimal* is true (default: ``False``) then decimal.Decimal
- will be natively serialized to JSON with full precision.
-
- To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
- ``.default()`` method to serialize additional types), specify it with
- the ``cls`` kwarg.
-
- """
- # cached encoder
- if (not skipkeys and ensure_ascii and
- check_circular and allow_nan and
- cls is None and indent is None and separators is None and
- encoding == 'utf-8' and default is None and not use_decimal
- and not kw):
- return _default_encoder.encode(obj)
- if cls is None:
- cls = JSONEncoder
- return cls(
- skipkeys=skipkeys, ensure_ascii=ensure_ascii,
- check_circular=check_circular, allow_nan=allow_nan, indent=indent,
- separators=separators, encoding=encoding, default=default,
- use_decimal=use_decimal, **kw).encode(obj)
-
-
-_default_decoder = JSONDecoder(encoding=None, object_hook=None,
- object_pairs_hook=None)
-
-
-def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, object_pairs_hook=None,
- use_decimal=False, **kw):
- """Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
- a JSON document) to a Python object.
-
- *encoding* determines the encoding used to interpret any
- :class:`str` objects decoded by this instance (``'utf-8'`` by
- default). It has no effect when decoding :class:`unicode` objects.
-
- Note that currently only encodings that are a superset of ASCII work,
- strings of other encodings should be passed in as :class:`unicode`.
-
- *object_hook*, if specified, will be called with the result of every
- JSON object decoded and its return value will be used in place of the
- given :class:`dict`. This can be used to provide custom
- deserializations (e.g. to support JSON-RPC class hinting).
-
- *object_pairs_hook* is an optional function that will be called with
- the result of any object literal decode with an ordered list of pairs.
- The return value of *object_pairs_hook* will be used instead of the
- :class:`dict`. This feature can be used to implement custom decoders
- that rely on the order that the key and value pairs are decoded (for
- example, :func:`collections.OrderedDict` will remember the order of
- insertion). If *object_hook* is also defined, the *object_pairs_hook*
- takes priority.
-
- *parse_float*, if specified, will be called with the string of every
- JSON float to be decoded. By default, this is equivalent to
- ``float(num_str)``. This can be used to use another datatype or parser
- for JSON floats (e.g. :class:`decimal.Decimal`).
-
- *parse_int*, if specified, will be called with the string of every
- JSON int to be decoded. By default, this is equivalent to
- ``int(num_str)``. This can be used to use another datatype or parser
- for JSON integers (e.g. :class:`float`).
-
- *parse_constant*, if specified, will be called with one of the
- following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
- can be used to raise an exception if invalid JSON numbers are
- encountered.
-
- If *use_decimal* is true (default: ``False``) then it implies
- parse_float=decimal.Decimal for parity with ``dump``.
-
- To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
-
- """
- return loads(fp.read(),
- encoding=encoding, cls=cls, object_hook=object_hook,
- parse_float=parse_float, parse_int=parse_int,
- parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
- use_decimal=use_decimal, **kw)
-
-
-def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, object_pairs_hook=None,
- use_decimal=False, **kw):
- """Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
- document) to a Python object.
-
- *encoding* determines the encoding used to interpret any
- :class:`str` objects decoded by this instance (``'utf-8'`` by
- default). It has no effect when decoding :class:`unicode` objects.
-
- Note that currently only encodings that are a superset of ASCII work,
- strings of other encodings should be passed in as :class:`unicode`.
-
- *object_hook*, if specified, will be called with the result of every
- JSON object decoded and its return value will be used in place of the
- given :class:`dict`. This can be used to provide custom
- deserializations (e.g. to support JSON-RPC class hinting).
-
- *object_pairs_hook* is an optional function that will be called with
- the result of any object literal decode with an ordered list of pairs.
- The return value of *object_pairs_hook* will be used instead of the
- :class:`dict`. This feature can be used to implement custom decoders
- that rely on the order that the key and value pairs are decoded (for
- example, :func:`collections.OrderedDict` will remember the order of
- insertion). If *object_hook* is also defined, the *object_pairs_hook*
- takes priority.
-
- *parse_float*, if specified, will be called with the string of every
- JSON float to be decoded. By default, this is equivalent to
- ``float(num_str)``. This can be used to use another datatype or parser
- for JSON floats (e.g. :class:`decimal.Decimal`).
-
- *parse_int*, if specified, will be called with the string of every
- JSON int to be decoded. By default, this is equivalent to
- ``int(num_str)``. This can be used to use another datatype or parser
- for JSON integers (e.g. :class:`float`).
-
- *parse_constant*, if specified, will be called with one of the
- following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
- can be used to raise an exception if invalid JSON numbers are
- encountered.
-
- If *use_decimal* is true (default: ``False``) then it implies
- parse_float=decimal.Decimal for parity with ``dump``.
-
- To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
- kwarg.
-
- """
- if (cls is None and encoding is None and object_hook is None and
- parse_int is None and parse_float is None and
- parse_constant is None and object_pairs_hook is None
- and not use_decimal and not kw):
- return _default_decoder.decode(s)
- if cls is None:
- cls = JSONDecoder
- if object_hook is not None:
- kw['object_hook'] = object_hook
- if object_pairs_hook is not None:
- kw['object_pairs_hook'] = object_pairs_hook
- if parse_float is not None:
- kw['parse_float'] = parse_float
- if parse_int is not None:
- kw['parse_int'] = parse_int
- if parse_constant is not None:
- kw['parse_constant'] = parse_constant
- if use_decimal:
- if parse_float is not None:
- raise TypeError("use_decimal=True implies parse_float=Decimal")
- kw['parse_float'] = Decimal
- return cls(encoding=encoding, **kw).decode(s)
-
-
-def _toggle_speedups(enabled):
- import simplejson.decoder as dec
- import simplejson.encoder as enc
- import simplejson.scanner as scan
- c_make_encoder = _import_c_make_encoder()
- if enabled:
- dec.scanstring = dec.c_scanstring or dec.py_scanstring
- enc.c_make_encoder = c_make_encoder
- enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
- enc.py_encode_basestring_ascii)
- scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
- else:
- dec.scanstring = dec.py_scanstring
- enc.c_make_encoder = None
- enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
- scan.make_scanner = scan.py_make_scanner
- dec.make_scanner = scan.make_scanner
- global _default_decoder
- _default_decoder = JSONDecoder(
- encoding=None,
- object_hook=None,
- object_pairs_hook=None,
- )
- global _default_encoder
- _default_encoder = JSONEncoder(
- skipkeys=False,
- ensure_ascii=True,
- check_circular=True,
- allow_nan=True,
- indent=None,
- separators=None,
- encoding='utf-8',
- default=None,
- )
diff --git a/github3/packages/omnijson/packages/simplejson/decoder.py b/github3/packages/omnijson/packages/simplejson/decoder.py
deleted file mode 100644
index 3e36e56..0000000
--- a/github3/packages/omnijson/packages/simplejson/decoder.py
+++ /dev/null
@@ -1,421 +0,0 @@
-"""Implementation of JSONDecoder
-"""
-import re
-import sys
-import struct
-
-from .scanner import make_scanner
-def _import_c_scanstring():
- try:
- from simplejson._speedups import scanstring
- return scanstring
- except ImportError:
- return None
-c_scanstring = _import_c_scanstring()
-
-__all__ = ['JSONDecoder']
-
-FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
-
-def _floatconstants():
- _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
- # The struct module in Python 2.4 would get frexp() out of range here
- # when an endian is specified in the format string. Fixed in Python 2.5+
- if sys.byteorder != 'big':
- _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
- nan, inf = struct.unpack('dd', _BYTES)
- return nan, inf, -inf
-
-NaN, PosInf, NegInf = _floatconstants()
-
-
-class JSONDecodeError(ValueError):
- """Subclass of ValueError with the following additional properties:
-
- msg: The unformatted error message
- doc: The JSON document being parsed
- pos: The start index of doc where parsing failed
- end: The end index of doc where parsing failed (may be None)
- lineno: The line corresponding to pos
- colno: The column corresponding to pos
- endlineno: The line corresponding to end (may be None)
- endcolno: The column corresponding to end (may be None)
-
- """
- def __init__(self, msg, doc, pos, end=None):
- ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
- self.msg = msg
- self.doc = doc
- self.pos = pos
- self.end = end
- self.lineno, self.colno = linecol(doc, pos)
- if end is not None:
- self.endlineno, self.endcolno = linecol(doc, end)
- else:
- self.endlineno, self.endcolno = None, None
-
-
-def linecol(doc, pos):
- lineno = doc.count('\n', 0, pos) + 1
- if lineno == 1:
- colno = pos
- else:
- colno = pos - doc.rindex('\n', 0, pos)
- return lineno, colno
-
-
-def errmsg(msg, doc, pos, end=None):
- # Note that this function is called from _speedups
- lineno, colno = linecol(doc, pos)
- if end is None:
- #fmt = '{0}: line {1} column {2} (char {3})'
- #return fmt.format(msg, lineno, colno, pos)
- fmt = '%s: line %d column %d (char %d)'
- return fmt % (msg, lineno, colno, pos)
- endlineno, endcolno = linecol(doc, end)
- #fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
- #return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
- fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
- return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
-
-
-_CONSTANTS = {
- '-Infinity': NegInf,
- 'Infinity': PosInf,
- 'NaN': NaN,
-}
-
-STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
-BACKSLASH = {
- '"': u'"', '\\': u'\\', '/': u'/',
- 'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
-}
-
-DEFAULT_ENCODING = "utf-8"
-
-def py_scanstring(s, end, encoding=None, strict=True,
- _b=BACKSLASH, _m=STRINGCHUNK.match):
- """Scan the string s for a JSON string. End is the index of the
- character in s after the quote that started the JSON string.
- Unescapes all valid JSON string escape sequences and raises ValueError
- on attempt to decode an invalid string. If strict is False then literal
- control characters are allowed in the string.
-
- Returns a tuple of the decoded string and the index of the character in s
- after the end quote."""
- if encoding is None:
- encoding = DEFAULT_ENCODING
- chunks = []
- _append = chunks.append
- begin = end - 1
- while 1:
- chunk = _m(s, end)
- if chunk is None:
- raise JSONDecodeError(
- "Unterminated string starting at", s, begin)
- end = chunk.end()
- content, terminator = chunk.groups()
- # Content is contains zero or more unescaped string characters
- if content:
- if not isinstance(content, unicode):
- content = unicode(content, encoding)
- _append(content)
- # Terminator is the end of string, a literal control character,
- # or a backslash denoting that an escape sequence follows
- if terminator == '"':
- break
- elif terminator != '\\':
- if strict:
- msg = "Invalid control character %r at" % (terminator,)
- #msg = "Invalid control character {0!r} at".format(terminator)
- raise JSONDecodeError(msg, s, end)
- else:
- _append(terminator)
- continue
- try:
- esc = s[end]
- except IndexError:
- raise JSONDecodeError(
- "Unterminated string starting at", s, begin)
- # If not a unicode escape sequence, must be in the lookup table
- if esc != 'u':
- try:
- char = _b[esc]
- except KeyError:
- msg = "Invalid \\escape: " + repr(esc)
- raise JSONDecodeError(msg, s, end)
- end += 1
- else:
- # Unicode escape sequence
- esc = s[end + 1:end + 5]
- next_end = end + 5
- if len(esc) != 4:
- msg = "Invalid \\uXXXX escape"
- raise JSONDecodeError(msg, s, end)
- uni = int(esc, 16)
- # Check for surrogate pair on UCS-4 systems
- if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
- msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
- if not s[end + 5:end + 7] == '\\u':
- raise JSONDecodeError(msg, s, end)
- esc2 = s[end + 7:end + 11]
- if len(esc2) != 4:
- raise JSONDecodeError(msg, s, end)
- uni2 = int(esc2, 16)
- uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
- next_end += 6
- char = unichr(uni)
- end = next_end
- # Append the unescaped character
- _append(char)
- return u''.join(chunks), end
-
-
-# Use speedup if available
-scanstring = c_scanstring or py_scanstring
-
-WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
-WHITESPACE_STR = ' \t\n\r'
-
-def JSONObject((s, end), encoding, strict, scan_once, object_hook,
- object_pairs_hook, memo=None,
- _w=WHITESPACE.match, _ws=WHITESPACE_STR):
- # Backwards compatibility
- if memo is None:
- memo = {}
- memo_get = memo.setdefault
- pairs = []
- # Use a slice to prevent IndexError from being raised, the following
- # check will raise a more specific ValueError if the string is empty
- nextchar = s[end:end + 1]
- # Normally we expect nextchar == '"'
- if nextchar != '"':
- if nextchar in _ws:
- end = _w(s, end).end()
- nextchar = s[end:end + 1]
- # Trivial empty object
- if nextchar == '}':
- if object_pairs_hook is not None:
- result = object_pairs_hook(pairs)
- return result, end + 1
- pairs = {}
- if object_hook is not None:
- pairs = object_hook(pairs)
- return pairs, end + 1
- elif nextchar != '"':
- raise JSONDecodeError("Expecting property name", s, end)
- end += 1
- while True:
- key, end = scanstring(s, end, encoding, strict)
- key = memo_get(key, key)
-
- # To skip some function call overhead we optimize the fast paths where
- # the JSON key separator is ": " or just ":".
- if s[end:end + 1] != ':':
- end = _w(s, end).end()
- if s[end:end + 1] != ':':
- raise JSONDecodeError("Expecting : delimiter", s, end)
-
- end += 1
-
- try:
- if s[end] in _ws:
- end += 1
- if s[end] in _ws:
- end = _w(s, end + 1).end()
- except IndexError:
- pass
-
- try:
- value, end = scan_once(s, end)
- except StopIteration:
- raise JSONDecodeError("Expecting object", s, end)
- pairs.append((key, value))
-
- try:
- nextchar = s[end]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end]
- except IndexError:
- nextchar = ''
- end += 1
-
- if nextchar == '}':
- break
- elif nextchar != ',':
- raise JSONDecodeError("Expecting , delimiter", s, end - 1)
-
- try:
- nextchar = s[end]
- if nextchar in _ws:
- end += 1
- nextchar = s[end]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end]
- except IndexError:
- nextchar = ''
-
- end += 1
- if nextchar != '"':
- raise JSONDecodeError("Expecting property name", s, end - 1)
-
- if object_pairs_hook is not None:
- result = object_pairs_hook(pairs)
- return result, end
- pairs = dict(pairs)
- if object_hook is not None:
- pairs = object_hook(pairs)
- return pairs, end
-
-def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
- values = []
- nextchar = s[end:end + 1]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end:end + 1]
- # Look-ahead for trivial empty array
- if nextchar == ']':
- return values, end + 1
- _append = values.append
- while True:
- try:
- value, end = scan_once(s, end)
- except StopIteration:
- raise JSONDecodeError("Expecting object", s, end)
- _append(value)
- nextchar = s[end:end + 1]
- if nextchar in _ws:
- end = _w(s, end + 1).end()
- nextchar = s[end:end + 1]
- end += 1
- if nextchar == ']':
- break
- elif nextchar != ',':
- raise JSONDecodeError("Expecting , delimiter", s, end)
-
- try:
- if s[end] in _ws:
- end += 1
- if s[end] in _ws:
- end = _w(s, end + 1).end()
- except IndexError:
- pass
-
- return values, end
-
-class JSONDecoder(object):
- """Simple JSON <http://json.org> decoder
-
- Performs the following translations in decoding by default:
-
- +---------------+-------------------+
- | JSON | Python |
- +===============+===================+
- | object | dict |
- +---------------+-------------------+
- | array | list |
- +---------------+-------------------+
- | string | unicode |
- +---------------+-------------------+
- | number (int) | int, long |
- +---------------+-------------------+
- | number (real) | float |
- +---------------+-------------------+
- | true | True |
- +---------------+-------------------+
- | false | False |
- +---------------+-------------------+
- | null | None |
- +---------------+-------------------+
-
- It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
- their corresponding ``float`` values, which is outside the JSON spec.
-
- """
-
- def __init__(self, encoding=None, object_hook=None, parse_float=None,
- parse_int=None, parse_constant=None, strict=True,
- object_pairs_hook=None):
- """
- *encoding* determines the encoding used to interpret any
- :class:`str` objects decoded by this instance (``'utf-8'`` by
- default). It has no effect when decoding :class:`unicode` objects.
-
- Note that currently only encodings that are a superset of ASCII work,
- strings of other encodings should be passed in as :class:`unicode`.
-
- *object_hook*, if specified, will be called with the result of every
- JSON object decoded and its return value will be used in place of the
- given :class:`dict`. This can be used to provide custom
- deserializations (e.g. to support JSON-RPC class hinting).
-
- *object_pairs_hook* is an optional function that will be called with
- the result of any object literal decode with an ordered list of pairs.
- The return value of *object_pairs_hook* will be used instead of the
- :class:`dict`. This feature can be used to implement custom decoders
- that rely on the order that the key and value pairs are decoded (for
- example, :func:`collections.OrderedDict` will remember the order of
- insertion). If *object_hook* is also defined, the *object_pairs_hook*
- takes priority.
-
- *parse_float*, if specified, will be called with the string of every
- JSON float to be decoded. By default, this is equivalent to
- ``float(num_str)``. This can be used to use another datatype or parser
- for JSON floats (e.g. :class:`decimal.Decimal`).
-
- *parse_int*, if specified, will be called with the string of every
- JSON int to be decoded. By default, this is equivalent to
- ``int(num_str)``. This can be used to use another datatype or parser
- for JSON integers (e.g. :class:`float`).
-
- *parse_constant*, if specified, will be called with one of the
- following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
- can be used to raise an exception if invalid JSON numbers are
- encountered.
-
- *strict* controls the parser's behavior when it encounters an
- invalid control character in a string. The default setting of
- ``True`` means that unescaped control characters are parse errors, if
- ``False`` then control characters will be allowed in strings.
-
- """
- self.encoding = encoding
- self.object_hook = object_hook
- self.object_pairs_hook = object_pairs_hook
- self.parse_float = parse_float or float
- self.parse_int = parse_int or int
- self.parse_constant = parse_constant or _CONSTANTS.__getitem__
- self.strict = strict
- self.parse_object = JSONObject
- self.parse_array = JSONArray
- self.parse_string = scanstring
- self.memo = {}
- self.scan_once = make_scanner(self)
-
- def decode(self, s, _w=WHITESPACE.match):
- """Return the Python representation of ``s`` (a ``str`` or ``unicode``
- instance containing a JSON document)
-
- """
- obj, end = self.raw_decode(s, idx=_w(s, 0).end())
- end = _w(s, end).end()
- if end != len(s):
- raise JSONDecodeError("Extra data", s, end, len(s))
- return obj
-
- def raw_decode(self, s, idx=0):
- """Decode a JSON document from ``s`` (a ``str`` or ``unicode``
- beginning with a JSON document) and return a 2-tuple of the Python
- representation and the index in ``s`` where the document ended.
-
- This can be used to decode a JSON document from a string that may
- have extraneous data at the end.
-
- """
- try:
- obj, end = self.scan_once(s, idx)
- except StopIteration:
- raise JSONDecodeError("No JSON object could be decoded", s, idx)
- return obj, end
diff --git a/github3/packages/omnijson/packages/simplejson/encoder.py b/github3/packages/omnijson/packages/simplejson/encoder.py
deleted file mode 100644
index f1269f3..0000000
--- a/github3/packages/omnijson/packages/simplejson/encoder.py
+++ /dev/null
@@ -1,503 +0,0 @@
-"""Implementation of JSONEncoder
-"""
-import re
-from decimal import Decimal
-
-def _import_speedups():
- try:
- from simplejson import _speedups
- return _speedups.encode_basestring_ascii, _speedups.make_encoder
- except ImportError:
- return None, None
-c_encode_basestring_ascii, c_make_encoder = _import_speedups()
-
-from .decoder import PosInf
-
-ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
-ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
-HAS_UTF8 = re.compile(r'[\x80-\xff]')
-ESCAPE_DCT = {
- '\\': '\\\\',
- '"': '\\"',
- '\b': '\\b',
- '\f': '\\f',
- '\n': '\\n',
- '\r': '\\r',
- '\t': '\\t',
-}
-for i in range(0x20):
- #ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
- ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
-
-FLOAT_REPR = repr
-
-def encode_basestring(s):
- """Return a JSON representation of a Python string
-
- """
- if isinstance(s, str) and HAS_UTF8.search(s) is not None:
- s = s.decode('utf-8')
- def replace(match):
- return ESCAPE_DCT[match.group(0)]
- return u'"' + ESCAPE.sub(replace, s) + u'"'
-
-
-def py_encode_basestring_ascii(s):
- """Return an ASCII-only JSON representation of a Python string
-
- """
- if isinstance(s, str) and HAS_UTF8.search(s) is not None:
- s = s.decode('utf-8')
- def replace(match):
- s = match.group(0)
- try:
- return ESCAPE_DCT[s]
- except KeyError:
- n = ord(s)
- if n < 0x10000:
- #return '\\u{0:04x}'.format(n)
- return '\\u%04x' % (n,)
- else:
- # surrogate pair
- n -= 0x10000
- s1 = 0xd800 | ((n >> 10) & 0x3ff)
- s2 = 0xdc00 | (n & 0x3ff)
- #return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
- return '\\u%04x\\u%04x' % (s1, s2)
- return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
-
-
-encode_basestring_ascii = (
- c_encode_basestring_ascii or py_encode_basestring_ascii)
-
-class JSONEncoder(object):
- """Extensible JSON <http://json.org> encoder for Python data structures.
-
- Supports the following objects and types by default:
-
- +-------------------+---------------+
- | Python | JSON |
- +===================+===============+
- | dict | object |
- +-------------------+---------------+
- | list, tuple | array |
- +-------------------+---------------+
- | str, unicode | string |
- +-------------------+---------------+
- | int, long, float | number |
- +-------------------+---------------+
- | True | true |
- +-------------------+---------------+
- | False | false |
- +-------------------+---------------+
- | None | null |
- +-------------------+---------------+
-
- To extend this to recognize other objects, subclass and implement a
- ``.default()`` method with another method that returns a serializable
- object for ``o`` if possible, otherwise it should call the superclass
- implementation (to raise ``TypeError``).
-
- """
- item_separator = ', '
- key_separator = ': '
- def __init__(self, skipkeys=False, ensure_ascii=True,
- check_circular=True, allow_nan=True, sort_keys=False,
- indent=None, separators=None, encoding='utf-8', default=None,
- use_decimal=False):
- """Constructor for JSONEncoder, with sensible defaults.
-
- If skipkeys is false, then it is a TypeError to attempt
- encoding of keys that are not str, int, long, float or None. If
- skipkeys is True, such items are simply skipped.
-
- If ensure_ascii is true, the output is guaranteed to be str
- objects with all incoming unicode characters escaped. If
- ensure_ascii is false, the output will be unicode object.
-
- If check_circular is true, then lists, dicts, and custom encoded
- objects will be checked for circular references during encoding to
- prevent an infinite recursion (which would cause an OverflowError).
- Otherwise, no such check takes place.
-
- If allow_nan is true, then NaN, Infinity, and -Infinity will be
- encoded as such. This behavior is not JSON specification compliant,
- but is consistent with most JavaScript based encoders and decoders.
- Otherwise, it will be a ValueError to encode such floats.
-
- If sort_keys is true, then the output of dictionaries will be
- sorted by key; this is useful for regression tests to ensure
- that JSON serializations can be compared on a day-to-day basis.
-
- If indent is a string, then JSON array elements and object members
- will be pretty-printed with a newline followed by that string repeated
- for each level of nesting. ``None`` (the default) selects the most compact
- representation without any newlines. For backwards compatibility with
- versions of simplejson earlier than 2.1.0, an integer is also accepted
- and is converted to a string with that many spaces.
-
- If specified, separators should be a (item_separator, key_separator)
- tuple. The default is (', ', ': '). To get the most compact JSON
- representation you should specify (',', ':') to eliminate whitespace.
-
- If specified, default is a function that gets called for objects
- that can't otherwise be serialized. It should return a JSON encodable
- version of the object or raise a ``TypeError``.
-
- If encoding is not None, then all input strings will be
- transformed into unicode using that encoding prior to JSON-encoding.
- The default is UTF-8.
-
- If use_decimal is true (not the default), ``decimal.Decimal`` will
- be supported directly by the encoder. For the inverse, decode JSON
- with ``parse_float=decimal.Decimal``.
-
- """
-
- self.skipkeys = skipkeys
- self.ensure_ascii = ensure_ascii
- self.check_circular = check_circular
- self.allow_nan = allow_nan
- self.sort_keys = sort_keys
- self.use_decimal = use_decimal
- if isinstance(indent, (int, long)):
- indent = ' ' * indent
- self.indent = indent
- if separators is not None:
- self.item_separator, self.key_separator = separators
- elif indent is not None:
- self.item_separator = ','
- if default is not None:
- self.default = default
- self.encoding = encoding
-
- def default(self, o):
- """Implement this method in a subclass such that it returns
- a serializable object for ``o``, or calls the base implementation
- (to raise a ``TypeError``).
-
- For example, to support arbitrary iterators, you could
- implement default like this::
-
- def default(self, o):
- try:
- iterable = iter(o)
- except TypeError:
- pass
- else:
- return list(iterable)
- return JSONEncoder.default(self, o)
-
- """
- raise TypeError(repr(o) + " is not JSON serializable")
-
- def encode(self, o):
- """Return a JSON string representation of a Python data structure.
-
- >>> from simplejson import JSONEncoder
- >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
- '{"foo": ["bar", "baz"]}'
-
- """
- # This is for extremely simple cases and benchmarks.
- if isinstance(o, basestring):
- if isinstance(o, str):
- _encoding = self.encoding
- if (_encoding is not None
- and not (_encoding == 'utf-8')):
- o = o.decode(_encoding)
- if self.ensure_ascii:
- return encode_basestring_ascii(o)
- else:
- return encode_basestring(o)
- # This doesn't pass the iterator directly to ''.join() because the
- # exceptions aren't as detailed. The list call should be roughly
- # equivalent to the PySequence_Fast that ''.join() would do.
- chunks = self.iterencode(o, _one_shot=True)
- if not isinstance(chunks, (list, tuple)):
- chunks = list(chunks)
- if self.ensure_ascii:
- return ''.join(chunks)
- else:
- return u''.join(chunks)
-
- def iterencode(self, o, _one_shot=False):
- """Encode the given object and yield each string
- representation as available.
-
- For example::
-
- for chunk in JSONEncoder().iterencode(bigobject):
- mysocket.write(chunk)
-
- """
- if self.check_circular:
- markers = {}
- else:
- markers = None
- if self.ensure_ascii:
- _encoder = encode_basestring_ascii
- else:
- _encoder = encode_basestring
- if self.encoding != 'utf-8':
- def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
- if isinstance(o, str):
- o = o.decode(_encoding)
- return _orig_encoder(o)
-
- def floatstr(o, allow_nan=self.allow_nan,
- _repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
- # Check for specials. Note that this type of test is processor
- # and/or platform-specific, so do tests which don't depend on
- # the internals.
-
- if o != o:
- text = 'NaN'
- elif o == _inf:
- text = 'Infinity'
- elif o == _neginf:
- text = '-Infinity'
- else:
- return _repr(o)
-
- if not allow_nan:
- raise ValueError(
- "Out of range float values are not JSON compliant: " +
- repr(o))
-
- return text
-
-
- key_memo = {}
- if (_one_shot and c_make_encoder is not None
- and self.indent is None):
- _iterencode = c_make_encoder(
- markers, self.default, _encoder, self.indent,
- self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, self.allow_nan, key_memo, self.use_decimal)
- else:
- _iterencode = _make_iterencode(
- markers, self.default, _encoder, self.indent, floatstr,
- self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, _one_shot, self.use_decimal)
- try:
- return _iterencode(o, 0)
- finally:
- key_memo.clear()
-
-
-class JSONEncoderForHTML(JSONEncoder):
- """An encoder that produces JSON safe to embed in HTML.
-
- To embed JSON content in, say, a script tag on a web page, the
- characters &, < and > should be escaped. They cannot be escaped
- with the usual entities (e.g. &amp;) because they are not expanded
- within <script> tags.
- """
-
- def encode(self, o):
- # Override JSONEncoder.encode because it has hacks for
- # performance that make things more complicated.
- chunks = self.iterencode(o, True)
- if self.ensure_ascii:
- return ''.join(chunks)
- else:
- return u''.join(chunks)
-
- def iterencode(self, o, _one_shot=False):
- chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
- for chunk in chunks:
- chunk = chunk.replace('&', '\\u0026')
- chunk = chunk.replace('<', '\\u003c')
- chunk = chunk.replace('>', '\\u003e')
- yield chunk
-
-
-def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
- _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
- _use_decimal,
- ## HACK: hand-optimized bytecode; turn globals into locals
- False=False,
- True=True,
- ValueError=ValueError,
- basestring=basestring,
- Decimal=Decimal,
- dict=dict,
- float=float,
- id=id,
- int=int,
- isinstance=isinstance,
- list=list,
- long=long,
- str=str,
- tuple=tuple,
- ):
-
- def _iterencode_list(lst, _current_indent_level):
- if not lst:
- yield '[]'
- return
- if markers is not None:
- markerid = id(lst)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = lst
- buf = '['
- if _indent is not None:
- _current_indent_level += 1
- newline_indent = '\n' + (_indent * _current_indent_level)
- separator = _item_separator + newline_indent
- buf += newline_indent
- else:
- newline_indent = None
- separator = _item_separator
- first = True
- for value in lst:
- if first:
- first = False
- else:
- buf = separator
- if isinstance(value, basestring):
- yield buf + _encoder(value)
- elif value is None:
- yield buf + 'null'
- elif value is True:
- yield buf + 'true'
- elif value is False:
- yield buf + 'false'
- elif isinstance(value, (int, long)):
- yield buf + str(value)
- elif isinstance(value, float):
- yield buf + _floatstr(value)
- elif _use_decimal and isinstance(value, Decimal):
- yield buf + str(value)
- else:
- yield buf
- if isinstance(value, (list, tuple)):
- chunks = _iterencode_list(value, _current_indent_level)
- elif isinstance(value, dict):
- chunks = _iterencode_dict(value, _current_indent_level)
- else:
- chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
- if newline_indent is not None:
- _current_indent_level -= 1
- yield '\n' + (_indent * _current_indent_level)
- yield ']'
- if markers is not None:
- del markers[markerid]
-
- def _iterencode_dict(dct, _current_indent_level):
- if not dct:
- yield '{}'
- return
- if markers is not None:
- markerid = id(dct)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = dct
- yield '{'
- if _indent is not None:
- _current_indent_level += 1
- newline_indent = '\n' + (_indent * _current_indent_level)
- item_separator = _item_separator + newline_indent
- yield newline_indent
- else:
- newline_indent = None
- item_separator = _item_separator
- first = True
- if _sort_keys:
- items = dct.items()
- items.sort(key=lambda kv: kv[0])
- else:
- items = dct.iteritems()
- for key, value in items:
- if isinstance(key, basestring):
- pass
- # JavaScript is weakly typed for these, so it makes sense to
- # also allow them. Many encoders seem to do something like this.
- elif isinstance(key, float):
- key = _floatstr(key)
- elif key is True:
- key = 'true'
- elif key is False:
- key = 'false'
- elif key is None:
- key = 'null'
- elif isinstance(key, (int, long)):
- key = str(key)
- elif _skipkeys:
- continue
- else:
- raise TypeError("key " + repr(key) + " is not a string")
- if first:
- first = False
- else:
- yield item_separator
- yield _encoder(key)
- yield _key_separator
- if isinstance(value, basestring):
- yield _encoder(value)
- elif value is None:
- yield 'null'
- elif value is True:
- yield 'true'
- elif value is False:
- yield 'false'
- elif isinstance(value, (int, long)):
- yield str(value)
- elif isinstance(value, float):
- yield _floatstr(value)
- elif _use_decimal and isinstance(value, Decimal):
- yield str(value)
- else:
- if isinstance(value, (list, tuple)):
- chunks = _iterencode_list(value, _current_indent_level)
- elif isinstance(value, dict):
- chunks = _iterencode_dict(value, _current_indent_level)
- else:
- chunks = _iterencode(value, _current_indent_level)
- for chunk in chunks:
- yield chunk
- if newline_indent is not None:
- _current_indent_level -= 1
- yield '\n' + (_indent * _current_indent_level)
- yield '}'
- if markers is not None:
- del markers[markerid]
-
- def _iterencode(o, _current_indent_level):
- if isinstance(o, basestring):
- yield _encoder(o)
- elif o is None:
- yield 'null'
- elif o is True:
- yield 'true'
- elif o is False:
- yield 'false'
- elif isinstance(o, (int, long)):
- yield str(o)
- elif isinstance(o, float):
- yield _floatstr(o)
- elif isinstance(o, (list, tuple)):
- for chunk in _iterencode_list(o, _current_indent_level):
- yield chunk
- elif isinstance(o, dict):
- for chunk in _iterencode_dict(o, _current_indent_level):
- yield chunk
- elif _use_decimal and isinstance(o, Decimal):
- yield str(o)
- else:
- if markers is not None:
- markerid = id(o)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = o
- o = _default(o)
- for chunk in _iterencode(o, _current_indent_level):
- yield chunk
- if markers is not None:
- del markers[markerid]
-
- return _iterencode
diff --git a/github3/packages/omnijson/packages/simplejson/ordered_dict.py b/github3/packages/omnijson/packages/simplejson/ordered_dict.py
deleted file mode 100644
index 87ad888..0000000
--- a/github3/packages/omnijson/packages/simplejson/ordered_dict.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger
-
-http://code.activestate.com/recipes/576693/
-
-"""
-from UserDict import DictMixin
-
-# Modified from original to support Python 2.4, see
-# http://code.google.com/p/simplejson/issues/detail?id=53
-try:
- all
-except NameError:
- def all(seq):
- for elem in seq:
- if not elem:
- return False
- return True
-
-class OrderedDict(dict, DictMixin):
-
- def __init__(self, *args, **kwds):
- if len(args) > 1:
- raise TypeError('expected at most 1 arguments, got %d' % len(args))
- try:
- self.__end
- except AttributeError:
- self.clear()
- self.update(*args, **kwds)
-
- def clear(self):
- self.__end = end = []
- end += [None, end, end] # sentinel node for doubly linked list
- self.__map = {} # key --> [key, prev, next]
- dict.clear(self)
-
- def __setitem__(self, key, value):
- if key not in self:
- end = self.__end
- curr = end[1]
- curr[2] = end[1] = self.__map[key] = [key, curr, end]
- dict.__setitem__(self, key, value)
-
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- key, prev, next = self.__map.pop(key)
- prev[2] = next
- next[1] = prev
-
- def __iter__(self):
- end = self.__end
- curr = end[2]
- while curr is not end:
- yield curr[0]
- curr = curr[2]
-
- def __reversed__(self):
- end = self.__end
- curr = end[1]
- while curr is not end:
- yield curr[0]
- curr = curr[1]
-
- def popitem(self, last=True):
- if not self:
- raise KeyError('dictionary is empty')
- # Modified from original to support Python 2.4, see
- # http://code.google.com/p/simplejson/issues/detail?id=53
- if last:
- key = reversed(self).next()
- else:
- key = iter(self).next()
- value = self.pop(key)
- return key, value
-
- def __reduce__(self):
- items = [[k, self[k]] for k in self]
- tmp = self.__map, self.__end
- del self.__map, self.__end
- inst_dict = vars(self).copy()
- self.__map, self.__end = tmp
- if inst_dict:
- return (self.__class__, (items,), inst_dict)
- return self.__class__, (items,)
-
- def keys(self):
- return list(self)
-
- setdefault = DictMixin.setdefault
- update = DictMixin.update
- pop = DictMixin.pop
- values = DictMixin.values
- items = DictMixin.items
- iterkeys = DictMixin.iterkeys
- itervalues = DictMixin.itervalues
- iteritems = DictMixin.iteritems
-
- def __repr__(self):
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, self.items())
-
- def copy(self):
- return self.__class__(self)
-
- @classmethod
- def fromkeys(cls, iterable, value=None):
- d = cls()
- for key in iterable:
- d[key] = value
- return d
-
- def __eq__(self, other):
- if isinstance(other, OrderedDict):
- return len(self)==len(other) and \
- all(p==q for p, q in zip(self.items(), other.items()))
- return dict.__eq__(self, other)
-
- def __ne__(self, other):
- return not self == other
diff --git a/github3/packages/omnijson/packages/simplejson/scanner.py b/github3/packages/omnijson/packages/simplejson/scanner.py
deleted file mode 100644
index 80b40a9..0000000
--- a/github3/packages/omnijson/packages/simplejson/scanner.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""JSON token scanner
-"""
-import re
-
-__all__ = ['make_scanner']
-
-NUMBER_RE = re.compile(
- r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
- (re.VERBOSE | re.MULTILINE | re.DOTALL))
-
-def py_make_scanner(context):
- parse_object = context.parse_object
- parse_array = context.parse_array
- parse_string = context.parse_string
- match_number = NUMBER_RE.match
- encoding = context.encoding
- strict = context.strict
- parse_float = context.parse_float
- parse_int = context.parse_int
- parse_constant = context.parse_constant
- object_hook = context.object_hook
- object_pairs_hook = context.object_pairs_hook
- memo = context.memo
-
- def _scan_once(string, idx):
- try:
- nextchar = string[idx]
- except IndexError:
- raise StopIteration
-
- if nextchar == '"':
- return parse_string(string, idx + 1, encoding, strict)
- elif nextchar == '{':
- return parse_object((string, idx + 1), encoding, strict,
- _scan_once, object_hook, object_pairs_hook, memo)
- elif nextchar == '[':
- return parse_array((string, idx + 1), _scan_once)
- elif nextchar == 'n' and string[idx:idx + 4] == 'null':
- return None, idx + 4
- elif nextchar == 't' and string[idx:idx + 4] == 'true':
- return True, idx + 4
- elif nextchar == 'f' and string[idx:idx + 5] == 'false':
- return False, idx + 5
-
- m = match_number(string, idx)
- if m is not None:
- integer, frac, exp = m.groups()
- if frac or exp:
- res = parse_float(integer + (frac or '') + (exp or ''))
- else:
- res = parse_int(integer)
- return res, m.end()
- elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
- return parse_constant('NaN'), idx + 3
- elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
- return parse_constant('Infinity'), idx + 8
- elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
- return parse_constant('-Infinity'), idx + 9
- else:
- raise StopIteration
-
- def scan_once(string, idx):
- try:
- return _scan_once(string, idx)
- finally:
- memo.clear()
-
- return scan_once
-
-make_scanner = py_make_scanner
diff --git a/reqs.txt b/reqs.txt
index 35463f5..e97df0b 100644
--- a/reqs.txt
+++ b/reqs.txt
@@ -1,4 +1,2 @@
requests==0.7.4
python-dateutil==2.0
-decorator==3.3.1
-envoy==0.0.2
diff --git a/tests/gist_tests.py b/tests/gist_tests.py
new file mode 100644
index 0000000..7e31066
--- /dev/null
+++ b/tests/gist_tests.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# -*- encoding: utf-8 -*-
+#
+# author: Antti Kaihola
+
+from datetime import datetime
+import json
+from mock import MagicMock, Mock, patch
+import unittest
+
+import github3.api
+import github3.handlers.gists
+import github3.handlers.user
+import github3.models
+
+
+GIST_RESPONSE = '{"user":{"gravatar_id":"123","url":"https://api.github.com/users/testuser","avatar_url":"https://secure.gravatar.com/avatar/123?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-1.png","login":"testuser","id":12345},"url":"https://api.github.com/gists/791920","history":[{"version":"deadbeefdeadbeefdeadbeefdeadbeefdeadbeef","url":"https://api.github.com/gists/791920/deadbeefdeadbeefdeadbeefdeadbeefdeadbeef","user":{"gravatar_id":"123","url":"https://api.github.com/users/testuser","avatar_url":"https://secure.gravatar.com/avatar/123?d=https://a248.e.akamai.net/assets.github.com%2Fimages%2Fgravatars%2Fgravatar-1.png","login":"testuser","id":12345},"committed_at":"2011-11-09T08:50:53Z","change_status":{"deletions":0,"additions":1,"total":1}}],"description":"description","created_at":"2011-11-09T08:50:53Z","public":true,"comments":0,"updated_at":"2011-11-09T08:50:53Z","git_pull_url":"git://gist.github.com/791920.git","forks":[],"git_push_url":"git@gist.github.com:791920.git","html_url":"https://gist.github.com/791920","id":"791920","files":{"filename.ext":{"raw_url":"https://gist.github.com/raw/791920/badafadacadafadabadacadafadabadabadacada/filename.ext","type":"text/plain","content":"content","size":7,"language":null,"filename":"filename.ext"}}}'
+
+
+class GistsTestCase(unittest.TestCase):
+ def test_create_gist(self):
+ """The HTTP request for creating a gist is correct"""
+ g = github3.api.Github()
+ g.session.auth = ('testuser', 'password')
+ u = github3.handlers.user.AuthUser(g)
+ gists = github3.handlers.gists.Gist(g)
+ OpenerDirector = MagicMock(name='OpenerDirector')
+ opener = OpenerDirector.return_value
+ response = opener.open.return_value
+ response.read.return_value = GIST_RESPONSE
+ response.code = 201
+
+ with patch('urllib2.OpenerDirector', OpenerDirector):
+
+ gist = gists.create_gist(
+ 'description',
+ files={'filename.ext': {'content': 'content'}})
+
+ request = opener.open.call_args[0][0]
+ self.assertEqual(request.method, 'POST')
+ self.assertEqual(request.get_full_url(),
+ 'https://api.github.com/gists?per_page=100')
+ self.assertEqual(request.headers['Authorization'],
+ 'Basic dGVzdHVzZXI6cGFzc3dvcmQ=')
+ self.assertEqual(json.loads(request.data),
+ {u'description': u'description',
+ u'files': {u'filename.ext': {u'content': u'content'}},
+ u'public': True})
+
+
+class GistHandlerTestCase(unittest.TestCase):
+ def test_response_conversion(self):
+ """A gist response is decoded correctly to a Gist object"""
+ g = github3.api.Github()
+ handler = github3.handlers.gists.Gist(g)
+ converter = handler._get_converter()
+ converter.inject(github3.models.Gist)
+
+ gist = converter.loads(json.loads(GIST_RESPONSE))
+
+ self.assertEqual(
+ {filename: value.__dict__
+ for filename, value in gist.files.iteritems()},
+ {u'filename.ext': {
+ 'content': u'content',
+ 'filename': u'filename.ext',
+ 'raw_url': (u'https://gist.github.com/'
+ u'raw/791920/'
+ u'badafadacadafadabadacadafadabadabadacada/'
+ u'filename.ext'),
+ 'size': 7,
+ 'type': u'text/plain'}})
+ self.assertEqual(gist.description, u'description')
+ self.assertEqual(gist.url, u'https://api.github.com/gists/791920')
+ self.assertEqual(gist.created_at, datetime(2011, 11, 9, 8, 50, 53))
+ self.assertEqual(gist.html_url, u'https://gist.github.com/791920')
+ self.assertEqual(gist.public, True)
+ self.assertEqual(
+ gist.user.__dict__,
+ {'avatar_url': (u'https://secure.gravatar.com/avatar/123'
+ u'?d=https://a248.e.akamai.net/'
+ u'assets.github.com%2Fimages%2Fgravatars'
+ u'%2Fgravatar-1.png'),
+ 'id': 12345,
+ 'login': u'testuser',
+ 'url': u'https://api.github.com/users/testuser'})
+ self.assertEqual(gist.git_pull_url, u'git://gist.github.com/791920.git')
+ self.assertEqual(gist.git_push_url, u'git@gist.github.com:791920.git')
+ self.assertEqual(gist.id, u'791920')
+ self.assertEqual(len(gist.history), 1)
+ h = gist.history[0]
+ self.assertEqual(h.change_status.__dict__, {'additions': 1, 'total': 1})
+ self.assertEqual(h.committed_at, datetime(2011, 11, 9, 8, 50, 53))
+ self.assertEqual(h.url,
+ u'https://api.github.com/gists/791920/'
+ u'deadbeefdeadbeefdeadbeefdeadbeefdeadbeef')
+ self.assertEqual(h.user.__dict__, gist.user.__dict__)
+ self.assertEqual(h.version, u'deadbeefdeadbeefdeadbeefdeadbeefdeadbeef')
+
+
+if __name__ == '__main__':
+ unittest.main()