summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2010-04-28 05:37:57 -0400
committerJason A. Donenfeld <Jason@zx2c4.com>2010-04-28 05:37:57 -0400
commit58718aaeb42b7d7fb5330217da552e97a4a9e829 (patch)
tree73462f3584edbc1f558e6a88d963c17de4034324
parentSubmit moments to api on cron job. (diff)
downloadMomentSelector-58718aaeb42b7d7fb5330217da552e97a4a9e829.tar.xz
MomentSelector-58718aaeb42b7d7fb5330217da552e97a4a9e829.zip
Updated google appengine sdk.
-rw-r--r--google-appengine/RELEASE_NOTES36
-rw-r--r--google-appengine/VERSION4
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/api_base_pb.py0
-rwxr-xr-xgoogle-appengine/google/appengine/api/appinfo.py21
-rwxr-xr-xgoogle-appengine/google/appengine/api/blobstore/blobstore.py181
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py242
-rwxr-xr-xgoogle-appengine/google/appengine/api/blobstore/blobstore_stub.py50
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/capabilities/capability_service_pb.py0
-rwxr-xr-xgoogle-appengine/google/appengine/api/croninfo.py8
-rwxr-xr-xgoogle-appengine/google/appengine/api/datastore.py137
-rwxr-xr-xgoogle-appengine/google/appengine/api/datastore_admin.py7
-rwxr-xr-xgoogle-appengine/google/appengine/api/datastore_errors.py14
-rwxr-xr-xgoogle-appengine/google/appengine/api/datastore_file_stub.py44
-rwxr-xr-xgoogle-appengine/google/appengine/api/datastore_types.py43
-rwxr-xr-xgoogle-appengine/google/appengine/api/dosinfo.py5
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/images/images_service_pb.py170
-rwxr-xr-xgoogle-appengine/google/appengine/api/images/images_stub.py22
-rwxr-xr-xgoogle-appengine/google/appengine/api/labs/taskqueue/taskqueue.py254
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py424
-rwxr-xr-xgoogle-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py213
-rwxr-xr-xgoogle-appengine/google/appengine/api/mail.py12
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/mail_service_pb.py0
-rwxr-xr-xgoogle-appengine/google/appengine/api/memcache/__init__.py33
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/memcache/memcache_service_pb.py0
-rwxr-xr-xgoogle-appengine/google/appengine/api/namespace_manager/__init__.py70
-rwxr-xr-xgoogle-appengine/google/appengine/api/namespace_manager/namespace_manager.py97
-rwxr-xr-xgoogle-appengine/google/appengine/api/queueinfo.py51
-rwxr-xr-xgoogle-appengine/google/appengine/api/urlfetch_errors.py3
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/urlfetch_service_pb.py94
-rwxr-xr-xgoogle-appengine/google/appengine/api/urlfetch_stub.py21
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/user_service_pb.py104
-rwxr-xr-xgoogle-appengine/google/appengine/api/validation.py81
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py0
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/base/capabilities_pb.py0
-rwxr-xr-xgoogle-appengine/google/appengine/cron/groctimespecification.py17
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/datastore/datastore_pb.py196
-rw-r--r--google-appengine/google/appengine/datastore/datastore_sqlite_stub.py1501
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/datastore/entity_pb.py0
-rw-r--r--google-appengine/google/appengine/datastore/sortable_pb_encoder.py282
-rwxr-xr-xgoogle-appengine/google/appengine/dist/py_imp.py2
-rwxr-xr-xgoogle-appengine/google/appengine/ext/admin/__init__.py52
-rw-r--r--google-appengine/google/appengine/ext/admin/templates/datastore.html8
-rw-r--r--google-appengine/google/appengine/ext/admin/templates/datastore_edit.html11
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/ext/appstats/datamodel_pb.py0
-rwxr-xr-xgoogle-appengine/google/appengine/ext/appstats/sample_appengine_config.py8
-rwxr-xr-xgoogle-appengine/google/appengine/ext/appstats/static/appstats_js.js147
-rw-r--r--google-appengine/google/appengine/ext/appstats/static/gantt.js405
-rw-r--r--google-appengine/google/appengine/ext/appstats/templates/details.html19
-rwxr-xr-xgoogle-appengine/google/appengine/ext/blobstore/blobstore.py58
-rwxr-xr-xgoogle-appengine/google/appengine/ext/bulkload/__init__.py422
-rwxr-xr-xgoogle-appengine/google/appengine/ext/bulkload/bulkload_deprecated.py359
-rwxr-xr-xgoogle-appengine/google/appengine/ext/db/__init__.py160
-rwxr-xr-xgoogle-appengine/google/appengine/ext/db/stats.py5
-rwxr-xr-xgoogle-appengine/google/appengine/ext/gql/__init__.py19
-rwxr-xr-xgoogle-appengine/google/appengine/ext/remote_api/handler.py2
-rwxr-xr-x[-rw-r--r--]google-appengine/google/appengine/ext/remote_api/remote_api_pb.py0
-rwxr-xr-xgoogle-appengine/google/appengine/ext/remote_api/remote_api_stub.py7
-rwxr-xr-xgoogle-appengine/google/appengine/ext/webapp/__init__.py13
-rwxr-xr-xgoogle-appengine/google/appengine/ext/webapp/blobstore_handlers.py157
-rwxr-xr-xgoogle-appengine/google/appengine/tools/adaptive_thread_pool.py3
-rwxr-xr-xgoogle-appengine/google/appengine/tools/appcfg.py171
-rwxr-xr-xgoogle-appengine/google/appengine/tools/bulkloader.py1
-rwxr-xr-xgoogle-appengine/google/appengine/tools/dev_appserver.py53
-rwxr-xr-xgoogle-appengine/google/appengine/tools/dev_appserver_main.py8
-rwxr-xr-xgoogle-appengine/google/appengine/tools/dev_appserver_upload.py24
-rwxr-xr-xgoogle-appengine/google/appengine/tools/remote_api_shell.py3
-rw-r--r--google-appengine/lib/ipaddr/ipaddr/__init__.py1972
-rwxr-xr-xgoogle-appengine/lib/ipaddr/ipaddr/ipaddr_test.py923
-rwxr-xr-xgoogle-appengine/lib/ipaddr/ipaddr/setup.py1
-rwxr-xr-xgoogle-appengine/new_project_template/main.py15
70 files changed, 7011 insertions, 2454 deletions
diff --git a/google-appengine/RELEASE_NOTES b/google-appengine/RELEASE_NOTES
index 185fe23..a10ffc7 100644
--- a/google-appengine/RELEASE_NOTES
+++ b/google-appengine/RELEASE_NOTES
@@ -3,6 +3,42 @@ All rights reserved.
App Engine Python SDK - Release Notes
+Version 1.3.3
+=================================
+- A new experimental feature allows you to set dev_appserver datastore file
+ stub to use sqlite. To enable, set the flag --use_sqlite=true.
+- It is now possible to implement properties on db.Expando.
+- Fixed a datastore issue where an error was thrown when setting a query offset
+ to more than the number of results throws an error.
+ http://code.google.com/p/googleappengine/issues/detail?id=2875
+- Fixed issue not allowing ByteString type to be viewed in the Development
+ Console datastore viewer.
+ http://code.google.com/p/googleappengine/issues/detail?id=1176
+
+Version 1.3.2
+=================================
+- New API to read the contents of uploaded Blobs (fetch_data)
+ http://code.google.com/p/googleappengine/issues/detail?id=2536
+- URLFetch now supports accessing ports 80-90, 440-450, and 1024-65535
+- Mail API now allows common document formats as attachments
+ http://code.google.com/p/googleappengine/issues/detail?id=494
+- The Task Queue API now supports adding multiple tasks in a single call to
+ Queue.add()
+- Fixed charset handling for inbound emails
+ http://code.google.com/p/googleappengine/issues/detail?id=2326
+- Fixed issue with compositing background colors in dev_appserver
+- New feature in the datastore to specify whether to use strong or eventually
+ consistent reads (the default is strong)
+- New datastore feature allows setting deadlines for operations
+- Increased the maximum Task Queue refill rate from 20/s to 50/s
+- Support for IP blacklisting to prevent denial of service (DoS) attacks
+- Fix an issue with Mac Launcher in Mac OSX 10.5.5
+ http://code.google.com/p/googleappengine/issues/detail?id=778
+- Fix issue with slow updates when there are many skipped files
+ http://code.google.com/p/googleappengine/issues/detail?id=2492
+- Fix issue with cursor not updating when using a GqlQuery
+ http://code.google.com/p/googleappengine/issues/detail?id=2757
+
Version 1.3.1
================================
- Datastore Query Cursors
diff --git a/google-appengine/VERSION b/google-appengine/VERSION
index 4df3b78..626918d 100644
--- a/google-appengine/VERSION
+++ b/google-appengine/VERSION
@@ -1,3 +1,3 @@
-release: "1.3.1"
-timestamp: 1263355585
+release: "1.3.3"
+timestamp: 1270494723
api_versions: ['1']
diff --git a/google-appengine/google/appengine/api/api_base_pb.py b/google-appengine/google/appengine/api/api_base_pb.py
index aa30190..aa30190 100644..100755
--- a/google-appengine/google/appengine/api/api_base_pb.py
+++ b/google-appengine/google/appengine/api/api_base_pb.py
diff --git a/google-appengine/google/appengine/api/appinfo.py b/google-appengine/google/appengine/api/appinfo.py
index 6ad6fbf..c5ce64c 100755
--- a/google-appengine/google/appengine/api/appinfo.py
+++ b/google-appengine/google/appengine/api/appinfo.py
@@ -37,10 +37,10 @@ from google.appengine.api import yaml_object
_URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'
_FILES_REGEX = r'(?!\^).*(?!\$).'
-_DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)'
+_DELTA_REGEX = r'([0-9]+)([DdHhMm]|[sS]?)'
_EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
-_SERVICE_RE_STRING = r'(mail|xmpp_message)'
+_SERVICE_RE_STRING = r'(mail|xmpp_message|rest)'
_PAGE_NAME_REGEX = r'^.+$'
@@ -56,7 +56,19 @@ APP_ID_MAX_LEN = 100
MAJOR_VERSION_ID_MAX_LEN = 100
MAX_URL_MAPS = 100
-APPLICATION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % APP_ID_MAX_LEN
+PARTITION_SEPARATOR = '~'
+
+DOMAIN_SEPARATOR = ':'
+
+PARTITION_RE_STRING = (r'[a-z\d\-]{1,%d}\%s' %
+ (APP_ID_MAX_LEN, PARTITION_SEPARATOR))
+DOMAIN_RE_STRING = (r'(?!\-)[a-z\d\-\.]{1,%d}%s' %
+ (APP_ID_MAX_LEN, DOMAIN_SEPARATOR))
+DISPLAY_APP_ID_RE_STRING = (r'(?!-)[a-z\d\-]{1,%d}' % (APP_ID_MAX_LEN))
+APPLICATION_RE_STRING = (r'(?:%s)?(?:%s)?%s' %
+ (PARTITION_RE_STRING,
+ DOMAIN_RE_STRING,
+ DISPLAY_APP_ID_RE_STRING))
VERSION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % MAJOR_VERSION_ID_MAX_LEN
RUNTIME_RE_STRING = r'[a-z]{1,30}'
@@ -114,6 +126,7 @@ SKIP_FILES = 'skip_files'
SERVICES = 'inbound_services'
DERIVED_FILE_TYPE = 'derived_file_type'
JAVA_PRECOMPILED = 'java_precompiled'
+PYTHON_PRECOMPILED = 'python_precompiled'
ADMIN_CONSOLE = 'admin_console'
PAGES = 'pages'
@@ -361,7 +374,7 @@ class AppInfoExternal(validation.Validated):
DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES),
DERIVED_FILE_TYPE: validation.Optional(validation.Repeated(
- validation.Options(JAVA_PRECOMPILED))),
+ validation.Options(JAVA_PRECOMPILED, PYTHON_PRECOMPILED))),
ADMIN_CONSOLE: validation.Optional(AdminConsole),
}
diff --git a/google-appengine/google/appengine/api/blobstore/blobstore.py b/google-appengine/google/appengine/api/blobstore/blobstore.py
index f22bb78..61d5a4e 100755
--- a/google-appengine/google/appengine/api/blobstore/blobstore.py
+++ b/google-appengine/google/appengine/api/blobstore/blobstore.py
@@ -36,27 +36,36 @@ from google.appengine.api.blobstore import blobstore_service_pb
from google.appengine.runtime import apiproxy_errors
-__all__ = ['BASE_CREATION_HEADER_FORMAT',
- 'BLOB_INFO_KIND',
+__all__ = ['BLOB_INFO_KIND',
'BLOB_KEY_HEADER',
- 'BlobKey',
- 'CreationFormatError',
+ 'BLOB_RANGE_HEADER',
+ 'MAX_BLOB_FETCH_SIZE',
'UPLOAD_INFO_CREATION_HEADER',
+ 'BlobFetchSizeTooLargeError',
+ 'BlobKey',
+ 'BlobNotFoundError',
+ 'DataIndexOutOfRangeError',
'Error',
'InternalError',
'create_upload_url',
'delete',
- 'parse_creation',
+ 'fetch_data',
]
+BlobKey = datastore_types.BlobKey
+
+
BLOB_INFO_KIND = '__BlobInfo__'
BLOB_KEY_HEADER = 'X-AppEngine-BlobKey'
-UPLOAD_INFO_CREATION_HEADER = 'X-AppEngine-Upload-Creation'
+BLOB_RANGE_HEADER = 'X-AppEngine-BlobRange'
+
+MAX_BLOB_FETCH_SIZE = (1 << 20) - (1 << 15)
-BASE_CREATION_HEADER_FORMAT = '%Y-%m-%d %H:%M:%S'
+UPLOAD_INFO_CREATION_HEADER = 'X-AppEngine-Upload-Creation'
+_BASE_CREATION_HEADER_FORMAT = '%Y-%m-%d %H:%M:%S'
class Error(Exception):
"""Base blobstore error type."""
@@ -66,7 +75,19 @@ class InternalError(Error):
"""Raised when an internal error occurs within API."""
-class CreationFormatError(Error):
+class BlobNotFoundError(Error):
+ """Raised when attempting to access blob data for non-existant blob."""
+
+
+class DataIndexOutOfRangeError(Error):
+ """Raised when attempting to access indexes out of range in wrong order."""
+
+
+class BlobFetchSizeTooLargeError(Error):
+ """Raised when attempting to fetch too large a block from a blob."""
+
+
+class _CreationFormatError(Error):
"""Raised when attempting to parse bad creation date format."""
@@ -79,6 +100,12 @@ def _ToBlobstoreError(error):
error_map = {
blobstore_service_pb.BlobstoreServiceError.INTERNAL_ERROR:
InternalError,
+ blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND:
+ BlobNotFoundError,
+ blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE:
+ DataIndexOutOfRangeError,
+ blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE:
+ BlobFetchSizeTooLargeError,
}
if error.application_error in error_map:
@@ -87,6 +114,68 @@ def _ToBlobstoreError(error):
return error
+def _format_creation(stamp):
+ """Format an upload creation timestamp with milliseconds.
+
+ This method is necessary to format a timestamp with microseconds on Python
+ versions before 2.6.
+
+ Cannot simply convert datetime objects to str because the microseconds are
+ stripped from the format when set to 0. The upload creation date format will
+ always have microseconds padded out to 6 places.
+
+ Args:
+ stamp: datetime.datetime object to format.
+
+ Returns:
+ Formatted datetime as Python 2.6 format '%Y-%m-%d %H:%M:%S.%f'.
+ """
+ return '%s.%06d' % (stamp.strftime(_BASE_CREATION_HEADER_FORMAT),
+ stamp.microsecond)
+
+
+def _parse_creation(creation_string, field_name):
+ """Parses upload creation string from header format.
+
+ Parse creation date of the format:
+
+ YYYY-mm-dd HH:MM:SS.ffffff
+
+ Y: Year
+ m: Month (01-12)
+ d: Day (01-31)
+ H: Hour (00-24)
+ M: Minute (00-59)
+ S: Second (00-59)
+ f: Microsecond
+
+ Args:
+ creation_string: String creation date format.
+
+ Returns:
+ datetime object parsed from creation_string.
+
+ Raises:
+ _CreationFormatError when the creation string is formatted incorrectly.
+ """
+ split_creation_string = creation_string.split('.', 1)
+ if len(split_creation_string) != 2:
+ raise _CreationFormatError(
+ 'Could not parse creation %s in field %s.' % (creation_string,
+ field_name))
+ timestamp_string, microsecond = split_creation_string
+
+ try:
+ timestamp = time.strptime(timestamp_string,
+ _BASE_CREATION_HEADER_FORMAT)
+ microsecond = int(microsecond)
+ except ValueError:
+ raise _CreationFormatError('Could not parse creation %s in field %s.'
+ % (creation_string, field_name))
+
+ return datetime.datetime(*timestamp[:6] + tuple([microsecond]))
+
+
def create_upload_url(success_path,
_make_sync_call=apiproxy_stub_map.MakeSyncCall):
"""Create upload URL for POST form.
@@ -127,47 +216,63 @@ def delete(blob_keys, _make_sync_call=apiproxy_stub_map.MakeSyncCall):
raise _ToBlobstoreError(e)
-def parse_creation(creation_string):
- """Parses creation string from header format.
-
- Parse creation date of the format:
+def fetch_data(blob_key, start_index, end_index,
+ _make_sync_call=apiproxy_stub_map.MakeSyncCall):
+ """Fetch data for blob.
- YYYY-mm-dd HH:MM:SS.ffffff
-
- Y: Year
- m: Month (01-12)
- d: Day (01-31)
- H: Hour (00-24)
- M: Minute (00-59)
- S: Second (00-59)
- f: Microsecond
+ See docstring for ext.blobstore.fetch_data for more details.
Args:
- creation_string: String creation date format.
+ blob: BlobKey, str or unicode representation of BlobKey of
+ blob to fetch data from.
+ start_index: Start index of blob data to fetch. May not be negative.
+ end_index: End index (exclusive) of blob data to fetch. Must be
+ >= start_index.
Returns:
- datetime object parsed from creation_string.
+ str containing partial data of blob. See docstring for
+ ext.blobstore.fetch_data for more details.
Raises:
- CreationFormatError when the creation string is formatted incorrectly.
+ See docstring for ext.blobstore.fetch_data for more details.
"""
+ if not isinstance(start_index, (int, long)):
+ raise TypeError('start_index must be integer.')
- def split(string, by, count):
- result = string.split(by, count)
- if len(result) != count + 1:
- raise CreationFormatError(
- 'Could not parse creation %s.' % creation_string)
- return result
+ if not isinstance(end_index, (int, long)):
+ raise TypeError('end_index must be integer.')
- timestamp_string, microsecond = split(creation_string, '.', 1)
+ if isinstance(blob_key, BlobKey):
+ blob_key = str(blob_key).decode('utf-8')
+ elif isinstance(blob_key, str):
+ blob_key = blob_key.decode('utf-8')
+ elif not isinstance(blob_key, unicode):
+ raise TypeError('Blob-key must be str, unicode or BlobKey: %s' % blob_key)
- try:
- timestamp = time.strptime(timestamp_string, BASE_CREATION_HEADER_FORMAT)
- microsecond = int(microsecond)
- except ValueError:
- raise CreationFormatError('Could not parse creation %s.' % creation_string)
+ if start_index < 0:
+ raise DataIndexOutOfRangeError(
+ 'May not fetch blob at negative index.')
- return datetime.datetime(*timestamp[:6] + tuple([microsecond]))
+ if end_index < start_index:
+ raise DataIndexOutOfRangeError(
+ 'Start index %d > end index %d' % (start_index, end_index))
+ fetch_size = end_index - start_index + 1
-BlobKey = datastore_types.BlobKey
+ if fetch_size > MAX_BLOB_FETCH_SIZE:
+ raise BlobFetchSizeTooLargeError(
+ 'Blob fetch size is too large: %d' % fetch_size)
+
+ request = blobstore_service_pb.FetchDataRequest()
+ response = blobstore_service_pb.FetchDataResponse()
+
+ request.set_blob_key(blob_key)
+ request.set_start_index(start_index)
+ request.set_end_index(end_index)
+
+ try:
+ _make_sync_call('blobstore', 'FetchData', request, response)
+ except apiproxy_errors.ApplicationError, e:
+ raise _ToBlobstoreError(e)
+
+ return response.data()
diff --git a/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py b/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py
index 6efecbb..a0f8bc4 100644..100755
--- a/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py
+++ b/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py
@@ -29,12 +29,18 @@ class BlobstoreServiceError(ProtocolBuffer.ProtocolMessage):
INTERNAL_ERROR = 1
URL_TOO_LONG = 2
PERMISSION_DENIED = 3
+ BLOB_NOT_FOUND = 4
+ DATA_INDEX_OUT_OF_RANGE = 5
+ BLOB_FETCH_SIZE_TOO_LARGE = 6
_ErrorCode_NAMES = {
0: "OK",
1: "INTERNAL_ERROR",
2: "URL_TOO_LONG",
3: "PERMISSION_DENIED",
+ 4: "BLOB_NOT_FOUND",
+ 5: "DATA_INDEX_OUT_OF_RANGE",
+ 6: "BLOB_FETCH_SIZE_TOO_LARGE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -350,6 +356,240 @@ class DeleteBlobRequest(ProtocolBuffer.ProtocolMessage):
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+class FetchDataRequest(ProtocolBuffer.ProtocolMessage):
+ has_blob_key_ = 0
+ blob_key_ = ""
+ has_start_index_ = 0
+ start_index_ = 0
+ has_end_index_ = 0
+ end_index_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def blob_key(self): return self.blob_key_
+
+ def set_blob_key(self, x):
+ self.has_blob_key_ = 1
+ self.blob_key_ = x
+
+ def clear_blob_key(self):
+ if self.has_blob_key_:
+ self.has_blob_key_ = 0
+ self.blob_key_ = ""
+
+ def has_blob_key(self): return self.has_blob_key_
+
+ def start_index(self): return self.start_index_
+
+ def set_start_index(self, x):
+ self.has_start_index_ = 1
+ self.start_index_ = x
+
+ def clear_start_index(self):
+ if self.has_start_index_:
+ self.has_start_index_ = 0
+ self.start_index_ = 0
+
+ def has_start_index(self): return self.has_start_index_
+
+ def end_index(self): return self.end_index_
+
+ def set_end_index(self, x):
+ self.has_end_index_ = 1
+ self.end_index_ = x
+
+ def clear_end_index(self):
+ if self.has_end_index_:
+ self.has_end_index_ = 0
+ self.end_index_ = 0
+
+ def has_end_index(self): return self.has_end_index_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_blob_key()): self.set_blob_key(x.blob_key())
+ if (x.has_start_index()): self.set_start_index(x.start_index())
+ if (x.has_end_index()): self.set_end_index(x.end_index())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_blob_key_ != x.has_blob_key_: return 0
+ if self.has_blob_key_ and self.blob_key_ != x.blob_key_: return 0
+ if self.has_start_index_ != x.has_start_index_: return 0
+ if self.has_start_index_ and self.start_index_ != x.start_index_: return 0
+ if self.has_end_index_ != x.has_end_index_: return 0
+ if self.has_end_index_ and self.end_index_ != x.end_index_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_blob_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: blob_key not set.')
+ if (not self.has_start_index_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: start_index not set.')
+ if (not self.has_end_index_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: end_index not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.blob_key_))
+ n += self.lengthVarInt64(self.start_index_)
+ n += self.lengthVarInt64(self.end_index_)
+ return n + 3
+
+ def Clear(self):
+ self.clear_blob_key()
+ self.clear_start_index()
+ self.clear_end_index()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.blob_key_)
+ out.putVarInt32(16)
+ out.putVarInt64(self.start_index_)
+ out.putVarInt32(24)
+ out.putVarInt64(self.end_index_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_blob_key(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_start_index(d.getVarInt64())
+ continue
+ if tt == 24:
+ self.set_end_index(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_blob_key_: res+=prefix+("blob_key: %s\n" % self.DebugFormatString(self.blob_key_))
+ if self.has_start_index_: res+=prefix+("start_index: %s\n" % self.DebugFormatInt64(self.start_index_))
+ if self.has_end_index_: res+=prefix+("end_index: %s\n" % self.DebugFormatInt64(self.end_index_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kblob_key = 1
+ kstart_index = 2
+ kend_index = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "blob_key",
+ 2: "start_index",
+ 3: "end_index",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class FetchDataResponse(ProtocolBuffer.ProtocolMessage):
+ has_data_ = 0
+ data_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def data(self): return self.data_
+
+ def set_data(self, x):
+ self.has_data_ = 1
+ self.data_ = x
+
+ def clear_data(self):
+ if self.has_data_:
+ self.has_data_ = 0
+ self.data_ = ""
+
+ def has_data(self): return self.has_data_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_data()): self.set_data(x.data())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_data_ != x.has_data_: return 0
+ if self.has_data_ and self.data_ != x.data_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_data_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: data not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.data_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_data()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8002)
+ out.putPrefixedString(self.data_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8002:
+ self.set_data(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_data_: res+=prefix+("data: %s\n" % self.DebugFormatString(self.data_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kdata = 1000
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1000: "data",
+ }, 1000)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1000: ProtocolBuffer.Encoder.STRING,
+ }, 1000, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
class DecodeBlobKeyRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
@@ -529,4 +769,4 @@ class DecodeBlobKeyResponse(ProtocolBuffer.ProtocolMessage):
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
-__all__ = ['BlobstoreServiceError','CreateUploadURLRequest','CreateUploadURLResponse','DeleteBlobRequest','DecodeBlobKeyRequest','DecodeBlobKeyResponse']
+__all__ = ['BlobstoreServiceError','CreateUploadURLRequest','CreateUploadURLResponse','DeleteBlobRequest','FetchDataRequest','FetchDataResponse','DecodeBlobKeyRequest','DecodeBlobKeyResponse']
diff --git a/google-appengine/google/appengine/api/blobstore/blobstore_stub.py b/google-appengine/google/appengine/api/blobstore/blobstore_stub.py
index 3150bd6..3855341 100755
--- a/google-appengine/google/appengine/api/blobstore/blobstore_stub.py
+++ b/google-appengine/google/appengine/api/blobstore/blobstore_stub.py
@@ -34,6 +34,7 @@ from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
+from google.appengine.api import blobstore
from google.appengine.api.blobstore import blobstore_service_pb
from google.appengine.runtime import apiproxy_errors
@@ -232,7 +233,54 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub):
response: Not used but should be a VoidProto.
"""
for blob_key in request.blob_key_list():
- key = datastore_types.Key.from_path('__BlobInfo__', str(blob_key))
+ key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND,
+ str(blob_key))
datastore.Delete(key)
self.__storage.DeleteBlob(blob_key)
+
+ def _Dynamic_FetchData(self, request, response):
+ """Fetch a blob fragment from a blob by its blob-key.
+
+ Fetches a blob fragment using its blob-key. Start index is inclusive,
+ end index is inclusive. Valid requests for information outside of
+ the range of the blob return a partial string or empty string if entirely
+ out of range.
+
+ Args:
+ request: A fully initialized FetchDataRequest instance.
+ response: A FetchDataResponse instance.
+
+ Raises:
+ ApplicationError when application has the following errors:
+ INDEX_OUT_OF_RANGE: Index is negative or end > start.
+ BLOB_FETCH_SIZE_TOO_LARGE: Request blob fragment is larger than
+ MAX_BLOB_FRAGMENT_SIZE.
+ BLOB_NOT_FOUND: If invalid blob-key is provided or is not found.
+ """
+ start_index = request.start_index()
+ if start_index < 0:
+ raise apiproxy_errors.ApplicationError(
+ blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE)
+
+ end_index = request.end_index()
+ if end_index < start_index:
+ raise apiproxy_errors.ApplicationError(
+ blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE)
+
+ fetch_size = end_index - start_index + 1
+ if fetch_size > blobstore.MAX_BLOB_FETCH_SIZE:
+ raise apiproxy_errors.ApplicationError(
+ blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE)
+
+ blob_key = request.blob_key()
+ blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key)
+ try:
+ datastore.Get(blob_info_key)
+ except datastore_errors.EntityNotFoundError, err:
+ raise apiproxy_errors.ApplicationError(
+ blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND)
+
+ blob_file = self.__storage.OpenBlob(blob_key)
+ blob_file.seek(start_index)
+ response.set_data(blob_file.read(fetch_size))
diff --git a/google-appengine/google/appengine/api/capabilities/capability_service_pb.py b/google-appengine/google/appengine/api/capabilities/capability_service_pb.py
index 9f9ba29..9f9ba29 100644..100755
--- a/google-appengine/google/appengine/api/capabilities/capability_service_pb.py
+++ b/google-appengine/google/appengine/api/capabilities/capability_service_pb.py
diff --git a/google-appengine/google/appengine/api/croninfo.py b/google-appengine/google/appengine/api/croninfo.py
index 0eab26e..6967d06 100755
--- a/google-appengine/google/appengine/api/croninfo.py
+++ b/google-appengine/google/appengine/api/croninfo.py
@@ -33,6 +33,7 @@ except ImportError:
pytz = None
from google.appengine.cron import groc
+from google.appengine.cron import groctimespecification
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_listener
@@ -46,15 +47,14 @@ _DESCRIPTION_REGEX = r'^.{0,499}$'
class GrocValidator(validation.Validator):
"""Checks that a schedule is in valid groc format."""
- def Validate(self, value):
+ def Validate(self, value, key=None):
"""Validates a schedule."""
if value is None:
raise validation.MissingAttribute('schedule must be specified')
if not isinstance(value, basestring):
raise TypeError('schedule must be a string, not \'%r\''%type(value))
- schedule = groc.CreateParser(value)
try:
- schedule.timespec()
+ groctimespecification.GrocTimeSpecification(value)
except groc.GrocException, e:
raise validation.ValidationError('schedule \'%s\' failed to parse: %s'%(
value, e.args[0]))
@@ -64,7 +64,7 @@ class GrocValidator(validation.Validator):
class TimezoneValidator(validation.Validator):
"""Checks that a timezone can be correctly parsed and is known."""
- def Validate(self, value):
+ def Validate(self, value, key=None):
"""Validates a timezone."""
if value is None:
return
diff --git a/google-appengine/google/appengine/api/datastore.py b/google-appengine/google/appengine/api/datastore.py
index bcd4aea..0d2e83e 100755
--- a/google-appengine/google/appengine/api/datastore.py
+++ b/google-appengine/google/appengine/api/datastore.py
@@ -81,6 +81,15 @@ _txes = {}
_ALLOWED_API_KWARGS = frozenset(['rpc'])
+_ALLOWED_FAILOVER_READ_METHODS = set(
+ ('Get', 'RunQuery', 'RunCompiledQuery', 'Count', 'Next'))
+
+ARBITRARY_FAILOVER_MS = -1
+
+STRONG_CONSISTENCY = 0
+EVENTUAL_CONSISTENCY = 1
+
+
def NormalizeAndTypeCheck(arg, types):
"""Normalizes and type checks the given argument.
@@ -178,18 +187,21 @@ def _MakeSyncCall(service, call, request, response, rpc=None):
return response
-def CreateRPC(service='datastore_v3', deadline=None, callback=None):
+def CreateRPC(service='datastore_v3', deadline=None, callback=None,
+ read_policy=STRONG_CONSISTENCY):
"""Create an rpc for use in configuring datastore calls.
Args:
deadline: float, deadline for calls in seconds.
callback: callable, a callback triggered when this rpc completes,
accepts one argument: the returned rpc.
+ read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually
+ consistent reads
Returns:
A datastore.DatastoreRPC instance.
"""
- return DatastoreRPC(service, deadline, callback)
+ return DatastoreRPC(service, deadline, callback, read_policy)
class DatastoreRPC(apiproxy_stub_map.UserRPC):
@@ -203,6 +215,20 @@ class DatastoreRPC(apiproxy_stub_map.UserRPC):
deadline, on API calls. It will be used to make the actual call.
"""
+ def __init__(self, service='datastore_v3', deadline=None, callback=None,
+ read_policy=STRONG_CONSISTENCY):
+ super(DatastoreRPC, self).__init__(service, deadline, callback)
+ self.read_policy = read_policy
+
+ def make_call(self, call, request, response):
+ if self.read_policy == EVENTUAL_CONSISTENCY:
+ if call not in _ALLOWED_FAILOVER_READ_METHODS:
+ raise datastore_errors.BadRequestError(
+ 'read_policy is only supported on read operations.')
+ if call != 'Next':
+ request.set_failover_ms(ARBITRARY_FAILOVER_MS)
+ super(DatastoreRPC, self).make_call(call, request, response)
+
def clone(self):
"""Make a shallow copy of this instance.
@@ -211,7 +237,8 @@ class DatastoreRPC(apiproxy_stub_map.UserRPC):
developer's easy control.
"""
assert self.state == apiproxy_rpc.RPC.IDLE
- return self.__class__(self.service, self.deadline, self.callback)
+ return self.__class__(
+ self.service, self.deadline, self.callback, self.read_policy)
def Put(entities, **kwargs):
@@ -368,7 +395,7 @@ class Entity(dict):
provides dictionary-style access to properties.
"""
def __init__(self, kind, parent=None, _app=None, name=None, id=None,
- unindexed_properties=[], _namespace=None):
+ unindexed_properties=[], namespace=None, **kwds):
"""Constructor. Takes the kind and transaction root, which cannot be
changed after the entity is constructed, and an optional parent. Raises
BadArgumentError or BadKeyError if kind is invalid or parent is not an
@@ -386,12 +413,23 @@ class Entity(dict):
# if provided, a sequence of property names that should not be indexed
# by the built-in single property indices.
unindexed_properties: list or tuple of strings
+ namespace: string
+ # if provided, overrides the default namespace_manager setting.
"""
ref = entity_pb.Reference()
_app = datastore_types.ResolveAppId(_app)
- _namespace = datastore_types.ResolveNamespace(_namespace)
ref.set_app(_app)
- datastore_types.SetNamespace(ref, _namespace)
+
+ _namespace = kwds.pop('_namespace', None)
+ if kwds:
+ raise datastore_errors.BadArgumentError(
+ 'Excess keyword arguments ' + repr(kwds))
+
+ if namespace is None:
+ namespace = _namespace
+ elif _namespace is not None:
+ raise datastore_errors.BadArgumentError(
+ "Must not set both _namespace and namespace parameters.")
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
@@ -401,12 +439,17 @@ class Entity(dict):
raise datastore_errors.BadArgumentError(
" %s doesn't match parent's app %s" %
(_app, parent.app()))
- if _namespace != parent.namespace():
+ if namespace is None:
+ namespace = parent.namespace()
+ elif namespace != parent.namespace():
raise datastore_errors.BadArgumentError(
" %s doesn't match parent's namespace %s" %
- (_namespace, parent.namespace()))
+ (namespace, parent.namespace()))
ref.CopyFrom(parent._Key__reference)
+ namespace = datastore_types.ResolveNamespace(namespace)
+ datastore_types.SetNamespace(ref, namespace)
+
last_path = ref.mutable_path().add_element()
last_path.set_type(kind.encode('utf-8'))
@@ -699,12 +742,13 @@ class Entity(dict):
unindexed_properties = [p.name() for p in pb.raw_property_list()]
- namespace = pb.key().name_space()
- if not namespace:
- namespace = None
+ if pb.key().has_name_space():
+ namespace = pb.key().name_space()
+ else:
+ namespace = ''
e = Entity(unicode(last_path.type().decode('utf-8')),
unindexed_properties=unindexed_properties,
- _app=pb.key().app(), _namespace=namespace)
+ _app=pb.key().app(), namespace=namespace)
ref = e.__key._Key__reference
ref.CopyFrom(pb.key())
@@ -716,8 +760,8 @@ class Entity(dict):
value = datastore_types.FromPropertyPb(prop)
except (AssertionError, AttributeError, TypeError, ValueError), e:
raise datastore_errors.Error(
- 'Property %s is corrupt in the datastore. %s: %s' %
- (e.__class__, prop.name(), e))
+ 'Property %s is corrupt in the datastore:\n%s' %
+ (prop.name(), traceback.format_exc()))
multiple = prop.multiple()
if multiple:
@@ -727,7 +771,7 @@ class Entity(dict):
cur_value = temporary_values.get(name)
if cur_value is None:
temporary_values[name] = value
- elif not multiple:
+ elif not multiple or not isinstance(cur_value, list):
raise datastore_errors.Error(
'Property %s is corrupt in the datastore; it has multiple '
'values, but is not marked as multiply valued.' % name)
@@ -858,7 +902,7 @@ class Query(dict):
__inequality_count = 0
def __init__(self, kind=None, filters={}, _app=None, keys_only=False,
- compile=True, cursor=None, _namespace=None):
+ compile=True, cursor=None, namespace=None, **kwds):
"""Constructor.
Raises BadArgumentError if kind is not a string. Raises BadValueError or
@@ -870,7 +914,20 @@ class Query(dict):
kind: string
filters: dict
keys_only: boolean
+ namespace: string
"""
+
+ _namespace = kwds.pop('_namespace', None)
+ if kwds:
+ raise datastore_errors.BadArgumentError(
+ 'Excess keyword arguments ' + repr(kwds))
+
+ if namespace is None:
+ namespace = _namespace
+ elif _namespace is not None:
+ raise datastore_errors.BadArgumentError(
+ "Must not set both _namespace and namespace parameters.")
+
if kind is not None:
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
@@ -881,7 +938,7 @@ class Query(dict):
self.update(filters)
self.__app = datastore_types.ResolveAppId(_app)
- self.__namespace = datastore_types.ResolveNamespace(_namespace)
+ self.__namespace = datastore_types.ResolveNamespace(namespace)
self.__keys_only = keys_only
self.__compile = compile
self.__cursor = cursor
@@ -2172,9 +2229,8 @@ def RunInTransactionCustomRetries(retries, function, *args, **kwargs):
_MakeSyncCall('datastore_v3', 'Rollback',
tx.handle, api_base_pb.VoidProto())
except:
- exc_info = sys.exc_info()
logging.info('Exception sending Rollback:\n' +
- ''.join(traceback.format_exception(*exc_info)))
+ traceback.format_exc())
type, value, trace = original_exception
if type is datastore_errors.Rollback:
@@ -2410,16 +2466,35 @@ def _ToDatastoreError(err):
Returns:
a subclass of datastore_errors.Error
"""
- errors = {
- datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
- datastore_pb.Error.CONCURRENT_TRANSACTION:
- datastore_errors.TransactionFailedError,
- datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
- datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
- datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
- }
-
- if err.application_error in errors:
- return errors[err.application_error](err.error_detail)
+ return _DatastoreExceptionFromErrorCodeAndDetail(err.application_error,
+ err.error_detail)
+
+
+def _DatastoreExceptionFromErrorCodeAndDetail(error, detail):
+ """Converts a datastore_pb.Error into a datastore_errors.Error.
+
+ Args:
+ error: A member of the datastore_pb.Error enumeration.
+ detail: A string providing extra details about the error.
+
+ Returns:
+ A subclass of datastore_errors.Error.
+ """
+ exception_class = {
+ datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
+ datastore_pb.Error.CONCURRENT_TRANSACTION:
+ datastore_errors.TransactionFailedError,
+ datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
+ datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
+ datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
+ datastore_pb.Error.BIGTABLE_ERROR: datastore_errors.Timeout,
+ datastore_pb.Error.COMMITTED_BUT_STILL_APPLYING:
+ datastore_errors.CommittedButStillApplying,
+ datastore_pb.Error.CAPABILITY_DISABLED:
+ apiproxy_errors.CapabilityDisabledError,
+ }.get(error, datastore_errors.Error)
+
+ if detail is None:
+ return exception_class()
else:
- return datastore_errors.Error(err.error_detail)
+ return exception_class(detail)
diff --git a/google-appengine/google/appengine/api/datastore_admin.py b/google-appengine/google/appengine/api/datastore_admin.py
index 4b26081..d6a80d4 100755
--- a/google-appengine/google/appengine/api/datastore_admin.py
+++ b/google-appengine/google/appengine/api/datastore_admin.py
@@ -29,7 +29,8 @@ from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.runtime import apiproxy_errors
-def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None):
+def GetSchema(_app=None, namespace=None, properties=True, start_kind=None,
+ end_kind=None):
"""Infers an app's schema from the entities in the datastore.
Note that the PropertyValue PBs in the returned EntityProtos are empty
@@ -42,12 +43,16 @@ def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None):
properties: boolean, whether to include property names and types
start_kind, end_kind: optional range endpoints for the kinds to return,
compared lexicographically
+ namespace: string, specified namespace of schema to be fetched
Returns:
list of entity_pb.EntityProto, with kind and property names and types
"""
req = datastore_pb.GetSchemaRequest()
req.set_app(datastore_types.ResolveAppId(_app))
+ namespace = datastore_types.ResolveNamespace(namespace)
+ if namespace:
+ req.set_name_space(namespace)
req.set_properties(properties)
if start_kind is not None:
req.set_start_kind(start_kind)
diff --git a/google-appengine/google/appengine/api/datastore_errors.py b/google-appengine/google/appengine/api/datastore_errors.py
index ff53ba2..f1acdf3 100755
--- a/google-appengine/google/appengine/api/datastore_errors.py
+++ b/google-appengine/google/appengine/api/datastore_errors.py
@@ -99,7 +99,15 @@ class NeedIndexError(Error):
"""
class Timeout(Error):
- """The datastore operation timed out. This can happen when you attempt to
- put, get, or delete too many entities or an entity with too many properties,
- or if the datastore is overloaded or having trouble.
+ """The datastore operation timed out, or the data was temporarily
+ unavailable. This can happen when you attempt to put, get, or delete too
+ many entities or an entity with too many properties, or if the datastore is
+ overloaded or having trouble.
+ """
+
+class CommittedButStillApplying(Timeout):
+ """The write or transaction was committed, but some entities or index rows
+ may not have been fully updated. Those updates should automatically be
+ applied soon. You can roll them forward immediately by reading one of the
+ entities inside a transaction.
"""
diff --git a/google-appengine/google/appengine/api/datastore_file_stub.py b/google-appengine/google/appengine/api/datastore_file_stub.py
index 611624b..f0a809b 100755
--- a/google-appengine/google/appengine/api/datastore_file_stub.py
+++ b/google-appengine/google/appengine/api/datastore_file_stub.py
@@ -162,7 +162,7 @@ class _Cursor(object):
offset += query.offset()
if offset > 0:
- self.__last_result = results[offset - 1]
+ self.__last_result = results[min(len(results), offset) - 1]
else:
self.__last_result = cursor_entity
@@ -208,7 +208,7 @@ class _Cursor(object):
while lo < hi:
mid = (lo + hi) // 2
if compare(results[mid], cursor_entity) < 0:
- lo = mid + 1
+ lo = mid + 1
else:
hi = mid
else:
@@ -313,8 +313,6 @@ class _Cursor(object):
self.__last_result.ToPb().Encode()))
position.set_start_key(str(start_key))
position.set_start_inclusive(False)
- elif self.__query.has_compiled_cursor:
- compiled_cursor.CopyFrom(self.__query.compiled_cursor())
def PopulateQueryResult(self, result, count, compile=False):
"""Populates a QueryResult with this cursor and the given number of results.
@@ -342,7 +340,8 @@ class _Cursor(object):
result.set_more_results(self.__offset < self.count)
if compile:
- self._EncodeCompiledCursor(self.__query, result.mutable_compiled_cursor())
+ self._EncodeCompiledCursor(
+ self.__query, result.mutable_compiled_cursor())
class DatastoreFileStub(apiproxy_stub.APIProxyStub):
@@ -1063,18 +1062,30 @@ class DatastoreFileStub(apiproxy_stub.APIProxyStub):
self.__tx_snapshot = dict(snapshot)
self.__tx_actions = []
- def _Dynamic_AddAction(self, request, void):
- self.__ValidateTransaction(request.transaction())
+ def _Dynamic_AddActions(self, request, _):
+ """Associates the creation of one or more tasks with a transaction.
+
+ Args:
+ request: A taskqueue_service_pb.TaskQueueBulkAddRequest containing the
+ tasks that should be created when the transaction is comitted.
+ """
+
- if len(self.__tx_actions) >= _MAX_ACTIONS_PER_TXN:
+ if ((len(self.__tx_actions) + request.add_request_size()) >
+ _MAX_ACTIONS_PER_TXN):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Too many messages, maximum allowed %s' % _MAX_ACTIONS_PER_TXN)
- clone = taskqueue_service_pb.TaskQueueAddRequest()
- clone.CopyFrom(request)
- clone.clear_transaction()
- self.__tx_actions.append(clone)
+ new_actions = []
+ for add_request in request.add_request_list():
+ self.__ValidateTransaction(add_request.transaction())
+ clone = taskqueue_service_pb.TaskQueueAddRequest()
+ clone.CopyFrom(add_request)
+ clone.clear_transaction()
+ new_actions.append(clone)
+
+ self.__tx_actions.extend(new_actions)
def _Dynamic_Commit(self, transaction, transaction_response):
self.__ValidateTransaction(transaction)
@@ -1108,15 +1119,18 @@ class DatastoreFileStub(apiproxy_stub.APIProxyStub):
app_str = req.app()
self.__ValidateAppId(app_str)
+ namespace_str = req.name_space()
+ app_namespace_str = datastore_types.EncodeAppIdNamespace(app_str,
+ namespace_str)
kinds = []
- for app, kind in self.__entities:
- if (app != app_str or
+ for app_namespace, kind in self.__entities:
+ if (app_namespace != app_namespace_str or
(req.has_start_kind() and kind < req.start_kind()) or
(req.has_end_kind() and kind > req.end_kind())):
continue
- app_kind = (app, kind)
+ app_kind = (app_namespace_str, kind)
if app_kind in self.__schema_cache:
kinds.append(self.__schema_cache[app_kind])
continue
diff --git a/google-appengine/google/appengine/api/datastore_types.py b/google-appengine/google/appengine/api/datastore_types.py
index c7a7c20..9c80104 100755
--- a/google-appengine/google/appengine/api/datastore_types.py
+++ b/google-appengine/google/appengine/api/datastore_types.py
@@ -175,10 +175,11 @@ def ResolveNamespace(namespace):
Raises:
BadArgumentError if the value is not a string.
"""
- if not namespace:
- namespace = namespace_manager.get_namespace();
- ValidateString(
- namespace, 'namespace', datastore_errors.BadArgumentError, empty_ok=True)
+ if namespace is None:
+ namespace = namespace_manager.get_namespace()
+ else:
+ namespace_manager.validate_namespace(
+ namespace, datastore_errors.BadArgumentError)
return namespace
@@ -215,9 +216,9 @@ def PartitionString(value, separator):
value: String to be partitioned
separator: Separator string
"""
- index = value.find(separator);
+ index = value.find(separator)
if index == -1:
- return (value, '', value[0:0]);
+ return (value, '', value[0:0])
else:
return (value[0:index], separator, value[index+len(separator):len(value)])
@@ -312,12 +313,9 @@ class Key(object):
Args:
kind: the entity kind (a str or unicode instance)
id_or_name: the id (an int or long) or name (a str or unicode instance)
-
- Additional positional arguments are allowed and should be
- alternating kind and id/name.
-
- Keyword args:
parent: optional parent Key; default None.
+ namespace: optional namespace to use otherwise namespace_manager's
+ default namespace is used.
Returns:
A new Key instance whose .kind() and .id() or .name() methods return
@@ -329,7 +327,8 @@ class Key(object):
"""
parent = kwds.pop('parent', None)
app_id = ResolveAppId(kwds.pop('_app', None))
- namespace = ResolveNamespace(kwds.pop('namespace', None))
+
+ namespace = kwds.pop('namespace', None)
if kwds:
raise datastore_errors.BadArgumentError(
@@ -345,14 +344,18 @@ class Key(object):
raise datastore_errors.BadArgumentError(
'Expected None or a Key as parent; received %r (a %s).' %
(parent, typename(parent)))
+ if namespace is None:
+ namespace = parent.namespace()
if not parent.has_id_or_name():
raise datastore_errors.BadKeyError(
'The parent Key is incomplete.')
if app_id != parent.app() or namespace != parent.namespace():
raise datastore_errors.BadArgumentError(
- 'The app/namespace arguments (%r) should match ' +
- 'parent.app/namespace() (%s)' %
- ((app_id, namespace), (parent.app(), parent.namespace())))
+ 'The app/namespace arguments (%s/%s) should match '
+ 'parent.app/namespace() (%s/%s)' %
+ (app_id, namespace, parent.app(), parent.namespace()))
+
+ namespace = ResolveNamespace(namespace)
key = Key()
ref = key.__reference
@@ -394,11 +397,11 @@ class Key(object):
return None
def namespace(self):
- """Returns this entity's app id, a string."""
+ """Returns this entity's namespace, a string."""
if self.__reference.has_name_space():
return self.__reference.name_space().decode('utf-8')
else:
- return None
+ return ''
def kind(self):
"""Returns this entity's kind, as a string."""
@@ -581,7 +584,7 @@ class Key(object):
args.append('_app=%r' % self.__reference.app().decode('utf-8'))
if self.__reference.has_name_space():
- args.append('_namespace=%r' %
+ args.append('namespace=%r' %
self.__reference.name_space().decode('utf-8'))
return u'datastore_types.Key.from_path(%s)' % ', '.join(args)
@@ -602,10 +605,10 @@ class Key(object):
if not isinstance(other, Key):
return -2
- self_args = [self.__reference.app()]
+ self_args = [self.__reference.app(), self.__reference.name_space()]
self_args += self.to_path(_default_id=0)
- other_args = [other.__reference.app()]
+ other_args = [other.__reference.app(), other.__reference.name_space()]
other_args += other.to_path(_default_id=0)
for self_component, other_component in zip(self_args, other_args):
diff --git a/google-appengine/google/appengine/api/dosinfo.py b/google-appengine/google/appengine/api/dosinfo.py
index 6fefeb7..c1c01ae 100755
--- a/google-appengine/google/appengine/api/dosinfo.py
+++ b/google-appengine/google/appengine/api/dosinfo.py
@@ -21,6 +21,7 @@ Library for parsing dos.yaml files and working with these in memory.
"""
+
import google
import ipaddr
@@ -39,12 +40,12 @@ SUBNET = 'subnet'
class SubnetValidator(validation.Validator):
"""Checks that a subnet can be parsed and is a valid IPv4 or IPv6 subnet."""
- def Validate(self, value):
+ def Validate(self, value, key=None):
"""Validates a subnet."""
if value is None:
raise validation.MissingAttribute('subnet must be specified')
try:
- ipaddr.IP(value)
+ ipaddr.IPNetwork(value)
except ValueError:
raise validation.ValidationError('%s is not a valid IPv4 or IPv6 subnet' %
value)
diff --git a/google-appengine/google/appengine/api/images/images_service_pb.py b/google-appengine/google/appengine/api/images/images_service_pb.py
index f5ecb31..62bae52 100644..100755
--- a/google-appengine/google/appengine/api/images/images_service_pb.py
+++ b/google-appengine/google/appengine/api/images/images_service_pb.py
@@ -2016,5 +2016,173 @@ class ImagesHistogramResponse(ProtocolBuffer.ProtocolMessage):
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+class ImagesGetUrlBaseRequest(ProtocolBuffer.ProtocolMessage):
+ has_blob_key_ = 0
+ blob_key_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def blob_key(self): return self.blob_key_
+
+ def set_blob_key(self, x):
+ self.has_blob_key_ = 1
+ self.blob_key_ = x
+
+ def clear_blob_key(self):
+ if self.has_blob_key_:
+ self.has_blob_key_ = 0
+ self.blob_key_ = ""
+
+ def has_blob_key(self): return self.has_blob_key_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_blob_key()): self.set_blob_key(x.blob_key())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_blob_key_ != x.has_blob_key_: return 0
+ if self.has_blob_key_ and self.blob_key_ != x.blob_key_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_blob_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: blob_key not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.blob_key_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_blob_key()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.blob_key_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_blob_key(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_blob_key_: res+=prefix+("blob_key: %s\n" % self.DebugFormatString(self.blob_key_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kblob_key = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "blob_key",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesGetUrlBaseResponse(ProtocolBuffer.ProtocolMessage):
+ has_url_ = 0
+ url_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def url(self): return self.url_
+
+ def set_url(self, x):
+ self.has_url_ = 1
+ self.url_ = x
+
+ def clear_url(self):
+ if self.has_url_:
+ self.has_url_ = 0
+ self.url_ = ""
+
+ def has_url(self): return self.has_url_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_url()): self.set_url(x.url())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_url_ != x.has_url_: return 0
+ if self.has_url_ and self.url_ != x.url_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.url_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_url()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.url_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_url(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kurl = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "url",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
-__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','OutputSettings','ImagesTransformRequest','ImagesTransformResponse','CompositeImageOptions','ImagesCanvas','ImagesCompositeRequest','ImagesCompositeResponse','ImagesHistogramRequest','ImagesHistogram','ImagesHistogramResponse']
+__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','OutputSettings','ImagesTransformRequest','ImagesTransformResponse','CompositeImageOptions','ImagesCanvas','ImagesCompositeRequest','ImagesCompositeResponse','ImagesHistogramRequest','ImagesHistogram','ImagesHistogramResponse','ImagesGetUrlBaseRequest','ImagesGetUrlBaseResponse']
diff --git a/google-appengine/google/appengine/api/images/images_stub.py b/google-appengine/google/appengine/api/images/images_stub.py
index a9b0b4d..be7ceab 100755
--- a/google-appengine/google/appengine/api/images/images_stub.py
+++ b/google-appengine/google/appengine/api/images/images_stub.py
@@ -57,6 +57,27 @@ def _ArgbToRgbaTuple(argb):
(unsigned_argb >> 24) & 0xFF)
+def _BackendPremultiplication(color):
+ """Apply premultiplication and unpremultiplication to match production.
+
+ Args:
+ color: color tuple as returned by _ArgbToRgbaTuple.
+
+ Returns:
+ RGBA tuple.
+ """
+ alpha = color[3]
+ rgb = color[0:3]
+ multiplied = [(x * (alpha + 1)) >> 8 for x in rgb]
+ if alpha:
+ alpha_inverse = 0xffffff / alpha
+ unmultiplied = [(x * alpha_inverse) >> 16 for x in multiplied]
+ else:
+ unmultiplied = [0] * 3
+
+ return tuple(unmultiplied + [alpha])
+
+
class ImagesServiceStub(apiproxy_stub.APIProxyStub):
"""Stub version of images API to be used with the dev_appserver."""
@@ -82,6 +103,7 @@ class ImagesServiceStub(apiproxy_stub.APIProxyStub):
width = request.canvas().width()
height = request.canvas().height()
color = _ArgbToRgbaTuple(request.canvas().color())
+ color = _BackendPremultiplication(color)
canvas = Image.new("RGBA", (width, height), color)
sources = []
if (not request.canvas().width() or request.canvas().width() > 4000 or
diff --git a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py
index 213e3f8..fc2774e 100755
--- a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py
+++ b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py
@@ -30,6 +30,7 @@ base path. A default queue is also provided for simple usage.
import datetime
+import os
import re
import time
import urllib
@@ -109,10 +110,22 @@ class PermissionDeniedError(Error):
"""The requested operation is not allowed for this app."""
+class DuplicateTaskNameError(Error):
+ """The add arguments contain tasks with identical names."""
+
+
+class TooManyTasksError(Error):
+ """Too many tasks were present in a single function call."""
+
+
class DatastoreError(Error):
"""There was a datastore error while accessing the queue."""
+class BadTransactionState(Error):
+ """The state of the current transaction does not permit this operation."""
+
+
MAX_QUEUE_NAME_LENGTH = 100
MAX_TASK_NAME_LENGTH = 500
@@ -145,6 +158,42 @@ _QUEUE_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_QUEUE_NAME_LENGTH
_QUEUE_NAME_RE = re.compile(_QUEUE_NAME_PATTERN)
+_ERROR_MAPPING = {
+ taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE: UnknownQueueError,
+ taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR:
+ TransientError,
+ taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR: InternalError,
+ taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE:
+ TaskTooLargeError,
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME:
+ InvalidTaskNameError,
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME:
+ InvalidQueueNameError,
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_URL: InvalidUrlError,
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE:
+ InvalidQueueError,
+ taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED:
+ PermissionDeniedError,
+ taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS:
+ TaskAlreadyExistsError,
+ taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK:
+ TombstonedTaskError,
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA: InvalidTaskError,
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST: Error,
+ taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK: Error,
+ taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE: Error,
+ taskqueue_service_pb.TaskQueueServiceError.DUPLICATE_TASK_NAME:
+ DuplicateTaskNameError,
+
+ taskqueue_service_pb.TaskQueueServiceError.TOO_MANY_TASKS:
+ TooManyTasksError,
+
+}
+
+_PRESERVE_ENVIRONMENT_HEADERS = (
+ ('X-AppEngine-Default-Namespace', 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'),
+ ('X-AppEngine-Current-Namespace', 'HTTP_X_APPENGINE_CURRENT_NAMESPACE'))
+
class _UTCTimeZone(datetime.tzinfo):
"""UTC timezone."""
@@ -284,6 +333,11 @@ class Task(object):
self.__payload = None
params = kwargs.get('params', {})
+ for header_name, environ_name in _PRESERVE_ENVIRONMENT_HEADERS:
+ value = os.environ.get(environ_name)
+ if value is not None:
+ self.__headers.setdefault(header_name, value)
+
if query and params:
raise InvalidTaskError('Query string and parameters both present; '
'only one of these may be supplied')
@@ -509,65 +563,140 @@ class Queue(object):
self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
def add(self, task, transactional=False):
- """Adds a Task to this Queue.
+ """Adds a Task or list of Tasks to this Queue.
+
+ If a list of more than one Tasks is given, a raised exception does not
+ guarantee that no tasks were added to the queue (unless transactional is set
+ to True). To determine which tasks were successfully added when an exception
+ is raised, check the Task.was_enqueued property.
Args:
- task: The Task to add.
- transactional: If false adds the task to a queue irrespectively to the
- enclosing transaction success or failure. (optional)
+ task: A Task instance or a list of Task instances that will added to the
+ queue.
+ transactional: If False adds the Task(s) to a queue irrespectively to the
+ enclosing transaction success or failure. An exception is raised if True
+ and called outside of a transaction. (optional)
Returns:
- The Task that was supplied to this method.
+ The Task or list of tasks that was supplied to this method.
Raises:
- BadTaskStateError if the Task has already been added to a queue.
+ BadTaskStateError: if the Task(s) has already been added to a queue.
+ BadTransactionState: if the transactional argument is true but this call
+ is being made outside of the context of a transaction.
Error-subclass on application errors.
"""
+ try:
+ tasks = list(iter(task))
+ except TypeError:
+ tasks = [task]
+ multiple = False
+ else:
+ multiple = True
+
+ self.__AddTasks(tasks, transactional)
+
+ if multiple:
+ return tasks
+ else:
+ assert len(tasks) == 1
+ return tasks[0]
+
+ def __AddTasks(self, tasks, transactional):
+ """Internal implementation of .add() where tasks must be a list."""
+
+ request = taskqueue_service_pb.TaskQueueBulkAddRequest()
+ response = taskqueue_service_pb.TaskQueueBulkAddResponse()
+
+ task_names = set()
+ for task in tasks:
+ if task.name:
+ if task.name in task_names:
+ raise DuplicateTaskNameError(
+ 'The task name %r is used more than once in the request' %
+ task.name)
+ task_names.add(task.name)
+
+ self.__FillAddRequest(task, request.add_add_request(), transactional)
+
+ try:
+ apiproxy_stub_map.MakeSyncCall('taskqueue', 'BulkAdd', request, response)
+ except apiproxy_errors.ApplicationError, e:
+ raise self.__TranslateError(e.application_error, e.error_detail)
+
+ assert response.taskresult_size() == len(tasks), (
+ 'expected %d results from BulkAdd(), got %d' % (
+ len(tasks), response.taskresult_size()))
+
+ exception = None
+ for task, task_result in zip(tasks, response.taskresult_list()):
+ if task_result.result() == taskqueue_service_pb.TaskQueueServiceError.OK:
+ if task_result.has_chosen_task_name():
+ task._Task__name = task_result.chosen_task_name()
+ task._Task__enqueued = True
+ elif (task_result.result() ==
+ taskqueue_service_pb.TaskQueueServiceError.SKIPPED):
+ pass
+ elif exception is None:
+ exception = self.__TranslateError(task_result.result())
+
+ if exception is not None:
+ raise exception
+
+ return tasks
+
+ def __FillAddRequest(self, task, task_request, transactional):
+ """Populates a TaskQueueAddRequest with the data from a Task instance.
+
+ Args:
+ task: The Task instance to use as a source for the data to be added to
+ task_request.
+ task_request: The taskqueue_service_pb.TaskQueueAddRequest to populate.
+ transactional: If true then populates the task_request.transaction message
+ with information from the enclosing transaction (if any).
+
+ Raises:
+ BadTaskStateError: If the task was already added to a Queue.
+ BadTransactionState: If the transactional argument is True and there is no
+ enclosing transaction.
+ InvalidTaskNameError: If the transactional argument is True and the task
+ is named.
+ """
if task.was_enqueued:
raise BadTaskStateError('Task has already been enqueued')
- request = taskqueue_service_pb.TaskQueueAddRequest()
- response = taskqueue_service_pb.TaskQueueAddResponse()
-
adjusted_url = task.url
if task.on_queue_url:
adjusted_url = self.__url + task.url
- request.set_queue_name(self.__name)
- request.set_eta_usec(int(time.mktime(task.eta.utctimetuple())) * 10**6)
- request.set_method(_METHOD_MAP.get(task.method))
- request.set_url(adjusted_url)
+ task_request.set_queue_name(self.__name)
+ task_request.set_eta_usec(
+ int(time.mktime(task.eta.utctimetuple())) * 10**6)
+ task_request.set_method(_METHOD_MAP.get(task.method))
+ task_request.set_url(adjusted_url)
if task.name:
- request.set_task_name(task.name)
+ task_request.set_task_name(task.name)
else:
- request.set_task_name('')
+ task_request.set_task_name('')
if task.payload:
- request.set_body(task.payload)
+ task_request.set_body(task.payload)
for key, value in _flatten_params(task.headers):
- header = request.add_header()
+ header = task_request.add_header()
header.set_key(key)
header.set_value(value)
if transactional:
from google.appengine.api import datastore
- datastore._MaybeSetupTransaction(request, [])
-
- if request.has_transaction() and task.name:
- raise InvalidTaskNameError('Task bound to a transaction cannot be named.')
-
- call_tuple = ('taskqueue', 'Add', request, response)
- try:
- apiproxy_stub_map.MakeSyncCall(*call_tuple)
- except apiproxy_errors.ApplicationError, e:
- self.__TranslateError(e)
+ if not datastore._MaybeSetupTransaction(task_request, []):
+ raise BadTransactionState(
+ 'Transactional adds are not allowed outside of transactions')
- if response.has_chosen_task_name():
- task._Task__name = response.chosen_task_name()
- task._Task__enqueued = True
- return task
+ if task_request.has_transaction() and task.name:
+ raise InvalidTaskNameError(
+ 'Task bound to a transaction cannot be named.')
@property
def name(self):
@@ -575,70 +704,37 @@ class Queue(object):
return self.__name
@staticmethod
- def __TranslateError(error):
+ def __TranslateError(error, detail=''):
"""Translates a TaskQueueServiceError into an exception.
Args:
error: Value from TaskQueueServiceError enum.
+ detail: A human-readable description of the error.
- Raises:
+ Returns:
The corresponding Exception sub-class for that error code.
"""
- if (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE):
- raise UnknownQueueError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR):
- raise TransientError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR):
- raise InternalError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE):
- raise TaskTooLargeError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME):
- raise InvalidTaskNameError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME):
- raise InvalidQueueNameError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.INVALID_URL):
- raise InvalidUrlError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE):
- raise InvalidQueueError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED):
- raise PermissionDeniedError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS):
- raise TaskAlreadyExistsError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK):
- raise TombstonedTaskError(error.error_detail)
- elif (error.application_error ==
- taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA):
- raise InvalidTaskError(error.error_detail)
- elif ((error.application_error >=
- taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR) and
- isinstance(error.application_error, int)):
+ if (error >= taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR
+ and isinstance(error, int)):
from google.appengine.api import datastore
- error.application_error = (error.application_error -
- taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR)
- datastore_exception = datastore._ToDatastoreError(error)
+ datastore_exception = datastore._DatastoreExceptionFromErrorCodeAndDetail(
+ error - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR,
+ detail)
class JointException(datastore_exception.__class__, DatastoreError):
"""There was a datastore error while accessing the queue."""
__msg = (u'taskqueue.DatastoreError caused by: %s %s' %
- (datastore_exception.__class__, error.error_detail))
+ (datastore_exception.__class__, detail))
def __str__(self):
return JointException.__msg
- raise JointException
+ return JointException()
else:
- raise Error('Application error %s: %s' %
- (error.application_error, error.error_detail))
+ exception_class = _ERROR_MAPPING.get(error, None)
+ if exception_class:
+ return exception_class(detail)
+ else:
+ return Error('Application error %s: %s' % (error, detail))
def add(*args, **kwargs):
diff --git a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
index 4fe5c89..af28df3 100644..100755
--- a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
+++ b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
@@ -23,6 +23,7 @@ __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.datastore.datastore_v3_pb import *
+from google.net.proto.message_set import MessageSet
class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
@@ -41,6 +42,10 @@ class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
INVALID_REQUEST = 13
UNKNOWN_TASK = 14
TOMBSTONED_QUEUE = 15
+ DUPLICATE_TASK_NAME = 16
+ SKIPPED = 17
+ TOO_MANY_TASKS = 18
+ INVALID_PAYLOAD = 19
DATASTORE_ERROR = 10000
_ErrorCode_NAMES = {
@@ -60,6 +65,10 @@ class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
13: "INVALID_REQUEST",
14: "UNKNOWN_TASK",
15: "TOMBSTONED_QUEUE",
+ 16: "DUPLICATE_TASK_NAME",
+ 17: "SKIPPED",
+ 18: "TOO_MANY_TASKS",
+ 19: "INVALID_PAYLOAD",
10000: "DATASTORE_ERROR",
}
@@ -352,6 +361,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
crontimetable_ = None
has_description_ = 0
description_ = ""
+ has_payload_ = 0
+ payload_ = None
def __init__(self, contents=None):
self.header_ = []
@@ -514,6 +525,24 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
def has_description(self): return self.has_description_
+ def payload(self):
+ if self.payload_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.payload_ is None: self.payload_ = MessageSet()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.payload_
+
+ def mutable_payload(self): self.has_payload_ = 1; return self.payload()
+
+ def clear_payload(self):
+ if self.has_payload_:
+ self.has_payload_ = 0;
+ if self.payload_ is not None: self.payload_.Clear()
+
+ def has_payload(self): return self.has_payload_
+
def MergeFrom(self, x):
assert x is not self
@@ -528,6 +557,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_description()): self.set_description(x.description())
+ if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
def Equals(self, x):
if x is self: return 1
@@ -554,6 +584,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
+ if self.has_payload_ != x.has_payload_: return 0
+ if self.has_payload_ and self.payload_ != x.payload_: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -570,14 +602,11 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
- if (not self.has_url_):
- initialized = 0
- if debug_strs is not None:
- debug_strs.append('Required field: url not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
@@ -586,7 +615,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
- n += self.lengthString(len(self.url_))
+ if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
@@ -594,7 +623,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_description_): n += 1 + self.lengthString(len(self.description_))
- return n + 4
+ if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
+ return n + 3
def Clear(self):
self.clear_queue_name()
@@ -608,6 +638,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
self.clear_app_id()
self.clear_crontimetable()
self.clear_description()
+ self.clear_payload()
def OutputUnchecked(self, out):
out.putVarInt32(10)
@@ -616,8 +647,9 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
- out.putVarInt32(34)
- out.putPrefixedString(self.url_)
+ if (self.has_url_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.url_)
if (self.has_method_):
out.putVarInt32(40)
out.putVarInt32(self.method_)
@@ -642,6 +674,10 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
if (self.has_description_):
out.putVarInt32(122)
out.putPrefixedString(self.description_)
+ if (self.has_payload_):
+ out.putVarInt32(130)
+ out.putVarInt32(self.payload_.ByteSize())
+ self.payload_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
@@ -682,6 +718,12 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
if tt == 122:
self.set_description(d.getPrefixedString())
continue
+ if tt == 130:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_payload().TryMerge(tmp)
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
@@ -712,6 +754,10 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
res+=self.crontimetable_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
+ if self.has_payload_:
+ res+=prefix+"payload <\n"
+ res+=self.payload_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
return res
@@ -733,6 +779,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
kCronTimetableschedule = 13
kCronTimetabletimezone = 14
kdescription = 15
+ kpayload = 16
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
@@ -751,7 +798,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
13: "schedule",
14: "timezone",
15: "description",
- }, 15)
+ 16: "payload",
+ }, 16)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
@@ -770,7 +818,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
13: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.STRING,
15: ProtocolBuffer.Encoder.STRING,
- }, 15, ProtocolBuffer.Encoder.MAX_TYPE)
+ 16: ProtocolBuffer.Encoder.STRING,
+ }, 16, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
@@ -855,6 +904,299 @@ class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage):
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
+class TaskQueueBulkAddRequest(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.add_request_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def add_request_size(self): return len(self.add_request_)
+ def add_request_list(self): return self.add_request_
+
+ def add_request(self, i):
+ return self.add_request_[i]
+
+ def mutable_add_request(self, i):
+ return self.add_request_[i]
+
+ def add_add_request(self):
+ x = TaskQueueAddRequest()
+ self.add_request_.append(x)
+ return x
+
+ def clear_add_request(self):
+ self.add_request_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.add_request_size()): self.add_add_request().CopyFrom(x.add_request(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.add_request_) != len(x.add_request_): return 0
+ for e1, e2 in zip(self.add_request_, x.add_request_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.add_request_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.add_request_)
+ for i in xrange(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_add_request()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.add_request_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.add_request_[i].ByteSize())
+ self.add_request_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_add_request().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.add_request_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("add_request%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kadd_request = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "add_request",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueBulkAddResponse_TaskResult(ProtocolBuffer.ProtocolMessage):
+ has_result_ = 0
+ result_ = 0
+ has_chosen_task_name_ = 0
+ chosen_task_name_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def result(self): return self.result_
+
+ def set_result(self, x):
+ self.has_result_ = 1
+ self.result_ = x
+
+ def clear_result(self):
+ if self.has_result_:
+ self.has_result_ = 0
+ self.result_ = 0
+
+ def has_result(self): return self.has_result_
+
+ def chosen_task_name(self): return self.chosen_task_name_
+
+ def set_chosen_task_name(self, x):
+ self.has_chosen_task_name_ = 1
+ self.chosen_task_name_ = x
+
+ def clear_chosen_task_name(self):
+ if self.has_chosen_task_name_:
+ self.has_chosen_task_name_ = 0
+ self.chosen_task_name_ = ""
+
+ def has_chosen_task_name(self): return self.has_chosen_task_name_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_result()): self.set_result(x.result())
+ if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_result_ != x.has_result_: return 0
+ if self.has_result_ and self.result_ != x.result_: return 0
+ if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
+ if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_result_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: result not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.result_)
+ if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_result()
+ self.clear_chosen_task_name()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(16)
+ out.putVarInt32(self.result_)
+ if (self.has_chosen_task_name_):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.chosen_task_name_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 16:
+ self.set_result(d.getVarInt32())
+ continue
+ if tt == 26:
+ self.set_chosen_task_name(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_))
+ if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
+ return res
+
+class TaskQueueBulkAddResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.taskresult_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def taskresult_size(self): return len(self.taskresult_)
+ def taskresult_list(self): return self.taskresult_
+
+ def taskresult(self, i):
+ return self.taskresult_[i]
+
+ def mutable_taskresult(self, i):
+ return self.taskresult_[i]
+
+ def add_taskresult(self):
+ x = TaskQueueBulkAddResponse_TaskResult()
+ self.taskresult_.append(x)
+ return x
+
+ def clear_taskresult(self):
+ self.taskresult_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.taskresult_size()): self.add_taskresult().CopyFrom(x.taskresult(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.taskresult_) != len(x.taskresult_): return 0
+ for e1, e2 in zip(self.taskresult_, x.taskresult_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.taskresult_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.taskresult_)
+ for i in xrange(len(self.taskresult_)): n += self.taskresult_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_taskresult()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.taskresult_)):
+ out.putVarInt32(11)
+ self.taskresult_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_taskresult().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.taskresult_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("TaskResult%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kTaskResultGroup = 1
+ kTaskResultresult = 2
+ kTaskResultchosen_task_name = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "TaskResult",
+ 2: "result",
+ 3: "chosen_task_name",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
class TaskQueueDeleteRequest(ProtocolBuffer.ProtocolMessage):
has_queue_name_ = 0
queue_name_ = ""
@@ -3222,6 +3564,8 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
runlog_ = None
has_description_ = 0
description_ = ""
+ has_payload_ = 0
+ payload_ = None
def __init__(self, contents=None):
self.header_ = []
@@ -3397,6 +3741,24 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
def has_description(self): return self.has_description_
+ def payload(self):
+ if self.payload_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.payload_ is None: self.payload_ = MessageSet()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.payload_
+
+ def mutable_payload(self): self.has_payload_ = 1; return self.payload()
+
+ def clear_payload(self):
+ if self.has_payload_:
+ self.has_payload_ = 0;
+ if self.payload_ is not None: self.payload_.Clear()
+
+ def has_payload(self): return self.has_payload_
+
def MergeFrom(self, x):
assert x is not self
@@ -3412,6 +3774,7 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable())
if (x.has_runlog()): self.mutable_runlog().MergeFrom(x.runlog())
if (x.has_description()): self.set_description(x.description())
+ if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload())
def Equals(self, x):
if x is self: return 1
@@ -3440,6 +3803,8 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
if self.has_runlog_ and self.runlog_ != x.runlog_: return 0
if self.has_description_ != x.has_description_: return 0
if self.has_description_ and self.description_ != x.description_: return 0
+ if self.has_payload_ != x.has_payload_: return 0
+ if self.has_payload_ and self.payload_ != x.payload_: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -3452,10 +3817,6 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: eta_usec not set.')
- if (not self.has_url_):
- initialized = 0
- if debug_strs is not None:
- debug_strs.append('Required field: url not set.')
if (not self.has_method_):
initialized = 0
if debug_strs is not None:
@@ -3468,13 +3829,14 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
debug_strs.append('Required field: creation_time_usec not set.')
if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0
if (self.has_runlog_ and not self.runlog_.IsInitialized(debug_strs)): initialized = 0
+ if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.task_name_))
n += self.lengthVarInt64(self.eta_usec_)
- n += self.lengthString(len(self.url_))
+ if (self.has_url_): n += 1 + self.lengthString(len(self.url_))
n += self.lengthVarInt64(self.method_)
if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_)
n += 2 * len(self.header_)
@@ -3485,7 +3847,8 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize()
if (self.has_runlog_): n += 4 + self.runlog_.ByteSize()
if (self.has_description_): n += 2 + self.lengthString(len(self.description_))
- return n + 5
+ if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize())
+ return n + 4
def Clear(self):
self.clear_task_name()
@@ -3500,14 +3863,16 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
self.clear_crontimetable()
self.clear_runlog()
self.clear_description()
+ self.clear_payload()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.task_name_)
out.putVarInt32(24)
out.putVarInt64(self.eta_usec_)
- out.putVarInt32(34)
- out.putPrefixedString(self.url_)
+ if (self.has_url_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.url_)
out.putVarInt32(40)
out.putVarInt32(self.method_)
if (self.has_retry_count_):
@@ -3536,6 +3901,10 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
if (self.has_description_):
out.putVarInt32(170)
out.putPrefixedString(self.description_)
+ if (self.has_payload_):
+ out.putVarInt32(178)
+ out.putVarInt32(self.payload_.ByteSize())
+ self.payload_.OutputUnchecked(out)
def TryMerge(self, d):
while 1:
@@ -3577,6 +3946,12 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
if tt == 170:
self.set_description(d.getPrefixedString())
continue
+ if tt == 178:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_payload().TryMerge(tmp)
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
@@ -3608,6 +3983,10 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage):
res+=self.runlog_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_))
+ if self.has_payload_:
+ res+=prefix+"payload <\n"
+ res+=self.payload_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
return res
class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
@@ -3712,6 +4091,7 @@ class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
kTaskRunLogelapsed_usec = 19
kTaskRunLogresponse_code = 20
kTaskdescription = 21
+ kTaskpayload = 22
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
@@ -3736,7 +4116,8 @@ class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
19: "elapsed_usec",
20: "response_code",
21: "description",
- }, 21)
+ 22: "payload",
+ }, 22)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
@@ -3761,9 +4142,10 @@ class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage):
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STRING,
- }, 21, ProtocolBuffer.Encoder.MAX_TYPE)
+ 22: ProtocolBuffer.Encoder.STRING,
+ }, 22, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
-__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task']
+__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueBulkAddRequest','TaskQueueBulkAddResponse','TaskQueueBulkAddResponse_TaskResult','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task']
diff --git a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
index c9de267..1ab31f8 100755
--- a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
+++ b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
@@ -221,6 +221,17 @@ class _DummyTaskStore(object):
task.set_method(
taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.GET)
task.set_retry_count(max(0, random.randint(-10, 5)))
+ if random.random() < 0.3:
+ random_headers = [('nexus', 'one'),
+ ('foo', 'bar'),
+ ('content-type', 'text/plain'),
+ ('from', 'user@email.com')]
+ for _ in xrange(random.randint(1, 4)):
+ elem = random.randint(0, len(random_headers)-1)
+ key, value = random_headers.pop(elem)
+ header_proto = task.add_header()
+ header_proto.set_key(key)
+ header_proto.set_value(value)
return task
for _ in range(num_tasks):
@@ -312,55 +323,164 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
self._app_queues = {}
- def _Dynamic_Add(self, request, response):
- """Local implementation of the Add RPC in TaskQueueService.
+ def _ChooseTaskName(self):
+ """Returns a string containing a unique task name."""
+ self._next_task_id += 1
+ return 'task%d' % (self._next_task_id - 1)
- Must adhere to the '_Dynamic_' naming convention for stubbing to work.
- See taskqueue_service.proto for a full description of the RPC.
+ def _VerifyTaskQueueAddRequest(self, request):
+ """Checks that a TaskQueueAddRequest is valid.
+
+ Checks that a TaskQueueAddRequest specifies a valid eta and a valid queue.
Args:
- request: A taskqueue_service_pb.TaskQueueAddRequest.
- response: A taskqueue_service_pb.TaskQueueAddResponse.
+ request: The taskqueue_service_pb.TaskQueueAddRequest to validate.
+
+ Returns:
+ A taskqueue_service_pb.TaskQueueServiceError indicating any problems with
+ the request or taskqueue_service_pb.TaskQueueServiceError.OK if it is
+ valid.
"""
if request.eta_usec() < 0:
- raise apiproxy_errors.ApplicationError(
- taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+ return taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA
eta = datetime.datetime.utcfromtimestamp(request.eta_usec() / 1e6)
max_eta = (datetime.datetime.utcnow() +
datetime.timedelta(days=MAX_ETA_DELTA_DAYS))
if eta > max_eta:
- raise apiproxy_errors.ApplicationError(
- taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+ return taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA
+
+ return taskqueue_service_pb.TaskQueueServiceError.OK
+
+ def _Dynamic_Add(self, request, response):
+ bulk_request = taskqueue_service_pb.TaskQueueBulkAddRequest()
+ bulk_response = taskqueue_service_pb.TaskQueueBulkAddResponse()
- if not self._IsValidQueue(request.queue_name()):
+ bulk_request.add_add_request().CopyFrom(request)
+ self._Dynamic_BulkAdd(bulk_request, bulk_response)
+
+ assert bulk_response.taskresult_size() == 1
+ result = bulk_response.taskresult(0).result()
+
+ if result != taskqueue_service_pb.TaskQueueServiceError.OK:
+ raise apiproxy_errors.ApplicationError(result)
+ elif bulk_response.taskresult(0).has_chosen_task_name():
+ response.set_chosen_task_name(
+ bulk_response.taskresult(0).chosen_task_name())
+
+ def _Dynamic_BulkAdd(self, request, response):
+ """Add many tasks to a queue using a single request.
+
+ Args:
+ request: The taskqueue_service_pb.TaskQueueBulkAddRequest. See
+ taskqueue_service.proto.
+ response: The taskqueue_service_pb.TaskQueueBulkAddResponse. See
+ taskqueue_service.proto.
+ """
+
+ assert request.add_request_size(), 'taskqueue should prevent empty requests'
+
+ if not self._IsValidQueue(request.add_request(0).queue_name()):
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
- if not request.task_name():
- request.set_task_name('task%d' % self._next_task_id)
- response.set_chosen_task_name(request.task_name())
- self._next_task_id += 1
+ error_found = False
+ task_results_with_chosen_names = []
+
+ for add_request in request.add_request_list():
+ task_result = response.add_taskresult()
+ error = self._VerifyTaskQueueAddRequest(add_request)
+ if error == taskqueue_service_pb.TaskQueueServiceError.OK:
+ if not add_request.task_name():
+ chosen_name = self._ChooseTaskName()
+ add_request.set_task_name(chosen_name)
+ task_results_with_chosen_names.append(task_result)
+ task_result.set_result(
+ taskqueue_service_pb.TaskQueueServiceError.SKIPPED)
+ else:
+ error_found = True
+ task_result.set_result(error)
+
+ if error_found:
+ return
+
+ if request.add_request(0).has_transaction():
+ self._TransactionalBulkAdd(request)
+ elif request.add_request(0).has_app_id():
+ self._DummyTaskStoreBulkAdd(request, response)
+ else:
+ self._NonTransactionalBulkAdd(request, response)
+
+ for add_request, task_result in zip(request.add_request_list(),
+ response.taskresult_list()):
+ if (task_result.result() ==
+ taskqueue_service_pb.TaskQueueServiceError.SKIPPED):
+ task_result.set_result(taskqueue_service_pb.TaskQueueServiceError.OK)
+ if task_result in task_results_with_chosen_names:
+ task_result.set_chosen_task_name(add_request.task_name())
- if request.has_transaction():
+ def _TransactionalBulkAdd(self, request):
+ """Uses datastore.AddActions to associate tasks with a transaction.
+
+ Args:
+ request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the
+ tasks to add. N.B. all tasks in the request have been validated and
+ assigned unique names.
+ """
+ try:
+ apiproxy_stub_map.MakeSyncCall(
+ 'datastore_v3', 'AddActions', request, api_base_pb.VoidProto())
+ except apiproxy_errors.ApplicationError, e:
+ raise apiproxy_errors.ApplicationError(
+ e.application_error +
+ taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR,
+ e.error_detail)
+
+ def _DummyTaskStoreBulkAdd(self, request, response):
+ """Adds tasks to the appropriate DummyTaskStore.
+
+ Args:
+ request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the
+ tasks to add. N.B. all tasks in the request have been validated and
+ those with empty names have been assigned unique names.
+ response: The taskqueue_service_pb.TaskQueueBulkAddResponse to populate
+ with the results. N.B. the chosen_task_name field in the response will
+ not be filled-in.
+ """
+ store = self.GetDummyTaskStore(request.add_request(0).app_id(),
+ request.add_request(0).queue_name())
+ for add_request, task_result in zip(request.add_request_list(),
+ response.taskresult_list()):
try:
- apiproxy_stub_map.MakeSyncCall(
- 'datastore_v3', 'AddAction', request, api_base_pb.VoidProto())
+ store.Add(add_request)
except apiproxy_errors.ApplicationError, e:
- e.application_error = (e.application_error +
- taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR)
- raise e
- elif request.has_app_id():
- store = self.GetDummyTaskStore(request.app_id(), request.queue_name())
- store.Add(request)
- else:
- tasks = self._taskqueues.setdefault(request.queue_name(), [])
- for task in tasks:
- if task.task_name() == request.task_name():
- raise apiproxy_errors.ApplicationError(
- taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
- tasks.append(request)
- tasks.sort(_CompareTasksByEta)
+ task_result.set_result(e.application_error)
+ else:
+ task_result.set_result(taskqueue_service_pb.TaskQueueServiceError.OK)
+
+ def _NonTransactionalBulkAdd(self, request, response):
+ """Adds tasks to the appropriate list in in self._taskqueues.
+
+ Args:
+ request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the
+ tasks to add. N.B. all tasks in the request have been validated and
+ those with empty names have been assigned unique names.
+ response: The taskqueue_service_pb.TaskQueueBulkAddResponse to populate
+ with the results. N.B. the chosen_task_name field in the response will
+ not be filled-in.
+ """
+ existing_tasks = self._taskqueues.setdefault(
+ request.add_request(0).queue_name(), [])
+ existing_task_names = set(task.task_name() for task in existing_tasks)
+
+ for add_request, task_result in zip(request.add_request_list(),
+ response.taskresult_list()):
+ if add_request.task_name() in existing_task_names:
+ task_result.set_result(
+ taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
+ else:
+ existing_tasks.append(add_request)
+ existing_tasks.sort(_CompareTasksByEta)
def _IsValidQueue(self, queue_name):
"""Determines whether a queue is valid, i.e. tasks can be added to it.
@@ -536,6 +656,10 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
Not used.
"""
queues = self._app_queues.setdefault(request.app_id(), {})
+ if request.queue_name() in queues and queues[request.queue_name()] is None:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE)
+
defensive_copy = taskqueue_service_pb.TaskQueueUpdateQueueRequest()
defensive_copy.CopyFrom(request)
queues[request.queue_name()] = defensive_copy
@@ -551,7 +675,13 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
response: A taskqueue_service_pb.TaskQueueFetchQueuesResponse.
"""
queues = self._app_queues.get(request.app_id(), {})
- for unused_key, queue in sorted(queues.items()[:request.max_rows()]):
+ for unused_key, queue in sorted(queues.items()):
+ if request.max_rows() == response.queue_size():
+ break
+
+ if queue is None:
+ continue
+
response_queue = response.add_queue()
response_queue.set_queue_name(queue.queue_name())
response_queue.set_bucket_refill_per_second(
@@ -668,7 +798,11 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
if request.queue_name() not in queues:
raise apiproxy_errors.ApplicationError(
taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
- del queues[request.queue_name()]
+ elif queues[request.queue_name()] is None:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE)
+
+ queues[request.queue_name()] = None
def _Dynamic_PurgeQueue(self, request, response):
"""Local purge implementation of TaskQueueService.PurgeQueue.
@@ -682,10 +816,13 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME)
queues = self._app_queues.get(request.app_id(), {})
- if (request.queue_name() != DEFAULT_QUEUE_NAME and
- request.queue_name() not in queues):
- raise apiproxy_errors.ApplicationError(
- taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
+ if request.queue_name() != DEFAULT_QUEUE_NAME:
+ if request.queue_name() not in queues:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
+ elif queues[request.queue_name()] is None:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE)
store = self.GetDummyTaskStore(request.app_id(), request.queue_name())
for task in store.Lookup(store.Count()):
diff --git a/google-appengine/google/appengine/api/mail.py b/google-appengine/google/appengine/api/mail.py
index 2895bdc..10ee784 100755
--- a/google-appengine/google/appengine/api/mail.py
+++ b/google-appengine/google/appengine/api/mail.py
@@ -64,6 +64,7 @@ EXTENSION_MIME_MAP = {
'bmp': 'image/x-ms-bmp',
'css': 'text/css',
'csv': 'text/csv',
+ 'doc': 'application/msword',
'diff': 'text/plain',
'flac': 'audio/flac',
'gif': 'image/gif',
@@ -81,16 +82,23 @@ EXTENSION_MIME_MAP = {
'mpe': 'video/mpeg',
'mpeg': 'video/mpeg',
'mpg': 'video/mpeg',
+ 'odp': 'application/vnd.oasis.opendocument.presentation',
+ 'ods': 'application/vnd.oasis.opendocument.spreadsheet',
+ 'odt': 'application/vnd.oasis.opendocument.text',
'oga': 'audio/ogg',
'ogg': 'audio/ogg',
'ogv': 'video/ogg',
'pdf': 'application/pdf',
'png': 'image/png',
'pot': 'text/plain',
+ 'pps': 'application/vnd.ms-powerpoint',
+ 'ppt': 'application/vnd.ms-powerpoint',
'qt': 'video/quicktime',
'rmi': 'audio/mid',
'rss': 'text/rss+xml',
'snd': 'audio/basic',
+ 'sxc': 'application/vnd.sun.xml.calc',
+ 'sxw': 'application/vnd.sun.xml.writer',
'text': 'text/plain',
'tif': 'image/tiff',
'tiff': 'image/tiff',
@@ -98,6 +106,7 @@ EXTENSION_MIME_MAP = {
'vcf': 'text/directory',
'wav': 'audio/x-wav',
'wbmp': 'image/vnd.wap.wbmp',
+ 'xls': 'application/vnd.ms-excel',
}
EXTENSION_WHITELIST = frozenset(EXTENSION_MIME_MAP.iterkeys())
@@ -831,7 +840,8 @@ class _EmailMessageBase(object):
filename = mime_message.get_param('name')
payload = EncodedPayload(payload,
- mime_message.get_charset(),
+ (mime_message.get_content_charset() or
+ mime_message.get_charset()),
mime_message['content-transfer-encoding'])
if filename:
diff --git a/google-appengine/google/appengine/api/mail_service_pb.py b/google-appengine/google/appengine/api/mail_service_pb.py
index 1b608ea..1b608ea 100644..100755
--- a/google-appengine/google/appengine/api/mail_service_pb.py
+++ b/google-appengine/google/appengine/api/mail_service_pb.py
diff --git a/google-appengine/google/appengine/api/memcache/__init__.py b/google-appengine/google/appengine/api/memcache/__init__.py
index 4ae08fa..6a9f92a 100755
--- a/google-appengine/google/appengine/api/memcache/__init__.py
+++ b/google-appengine/google/appengine/api/memcache/__init__.py
@@ -85,6 +85,23 @@ TYPE_BOOL = 5
CAPABILITY = capabilities.CapabilitySet('memcache')
+def _add_name_space(message, namespace=None):
+ """Populate the name_space field in a messagecol buffer.
+
+ Args:
+ message: A messagecol buffer supporting the set_name_space() operation.
+ namespace: The name of the namespace part. If None, use the
+ default namespace. The empty namespace (i.e. '') will clear
+ the name_space field.
+ """
+ if namespace is None:
+ namespace = namespace_manager.get_namespace()
+ if not namespace:
+ message.clear_name_space()
+ else:
+ message.set_name_space(namespace)
+
+
def _key_string(key, key_prefix='', server_to_user_dict=None):
"""Utility function to handle different ways of requesting keys.
@@ -405,7 +422,7 @@ class Client(object):
"""
request = MemcacheGetRequest()
request.add_key(_key_string(key))
- namespace_manager._add_name_space(request, namespace)
+ _add_name_space(request, namespace)
response = MemcacheGetResponse()
try:
self._make_sync_call('memcache', 'Get', request, response)
@@ -441,7 +458,7 @@ class Client(object):
the keys in the returned dictionary.
"""
request = MemcacheGetRequest()
- namespace_manager._add_name_space(request, namespace)
+ _add_name_space(request, namespace)
response = MemcacheGetResponse()
user_key = {}
for key in keys:
@@ -486,7 +503,7 @@ class Client(object):
raise ValueError('Delete timeout must be non-negative.')
request = MemcacheDeleteRequest()
- namespace_manager._add_name_space(request, namespace)
+ _add_name_space(request, namespace)
response = MemcacheDeleteResponse()
delete_item = request.add_item()
@@ -530,7 +547,7 @@ class Client(object):
raise ValueError('Delete timeout must not be negative.')
request = MemcacheDeleteRequest()
- namespace_manager._add_name_space(request, namespace)
+ _add_name_space(request, namespace)
response = MemcacheDeleteResponse()
for key in keys:
@@ -641,7 +658,7 @@ class Client(object):
item.set_flags(flags)
item.set_set_policy(policy)
item.set_expiration_time(int(math.ceil(time)))
- namespace_manager._add_name_space(request, namespace)
+ _add_name_space(request, namespace)
response = MemcacheSetResponse()
try:
self._make_sync_call('memcache', 'Set', request, response)
@@ -682,6 +699,7 @@ class Client(object):
raise ValueError('Expiration must not be negative.')
request = MemcacheSetRequest()
+ _add_name_space(request, namespace)
user_key = {}
server_keys = []
for key, value in mapping.iteritems():
@@ -695,7 +713,6 @@ class Client(object):
item.set_flags(flags)
item.set_set_policy(policy)
item.set_expiration_time(int(math.ceil(time)))
- namespace_manager._add_name_space(request, namespace)
response = MemcacheSetResponse()
try:
@@ -902,7 +919,7 @@ class Client(object):
pass
request = MemcacheIncrementRequest()
- namespace_manager._add_name_space(request, namespace)
+ _add_name_space(request, namespace)
response = MemcacheIncrementResponse()
request.set_key(_key_string(key))
request.set_delta(delta)
@@ -950,7 +967,7 @@ class Client(object):
request = MemcacheBatchIncrementRequest()
response = MemcacheBatchIncrementResponse()
- namespace_manager._add_name_space(request, namespace)
+ _add_name_space(request, namespace)
for key, delta in mapping.iteritems():
if not isinstance(delta, (int, long)):
diff --git a/google-appengine/google/appengine/api/memcache/memcache_service_pb.py b/google-appengine/google/appengine/api/memcache/memcache_service_pb.py
index baf82c6..baf82c6 100644..100755
--- a/google-appengine/google/appengine/api/memcache/memcache_service_pb.py
+++ b/google-appengine/google/appengine/api/memcache/memcache_service_pb.py
diff --git a/google-appengine/google/appengine/api/namespace_manager/__init__.py b/google-appengine/google/appengine/api/namespace_manager/__init__.py
index 3d00b7b..fe3e795 100755
--- a/google-appengine/google/appengine/api/namespace_manager/__init__.py
+++ b/google-appengine/google/appengine/api/namespace_manager/__init__.py
@@ -15,73 +15,7 @@
# limitations under the License.
#
-"""Control the namespacing system used by various APIs.
+"""Namespace Manager Module."""
-Each API call can specify an alternate namespace, but the functions
-here can be used to change the default namespace. The default is set
-before user code begins executing.
-"""
-
-
-import os
-
-ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'
-ENV_CURRENT_NAMESPACE = '__INTERNAL_CURRENT_NAMESPACE'
-
-
-def set_namespace(namespace):
- """Set the default namespace to use for future calls, for this request only.
-
- Args:
- namespace: A string naming the new namespace to use. None
- string specifies the root namespace for this app.
- """
- if namespace:
- os.environ[ENV_CURRENT_NAMESPACE] = namespace
- else:
- os.environ.pop(ENV_CURRENT_NAMESPACE, None)
-
-def set_request_namespace(namespace):
- """Deprecated. Use set_namespace(namespace)."""
- return set_namespace(namespace)
-
-def get_namespace():
- """Get the name of the current default namespace.
-
- None indicates that the root namespace is the default.
- """
- return os.getenv(ENV_CURRENT_NAMESPACE, None)
-
-def get_request_namespace():
- """Deprecated. Use get_namespace()."""
- return get_namespace()
-
-def _enable_request_namespace():
- """Automatically enable namespace to default for domain.
-
- Calling this function will automatically default the namespace to the
- chosen Google Apps domain for the current request.
- """
- if ENV_CURRENT_NAMESPACE not in os.environ:
- if ENV_DEFAULT_NAMESPACE in os.environ:
- os.environ[ENV_CURRENT_NAMESPACE] = os.environ[ENV_DEFAULT_NAMESPACE]
- else:
- os.environ.pop(ENV_CURRENT_NAMESPACE, None)
-
-
-def _add_name_space(request, namespace=None):
- """Add a name_space field to a request.
-
- Args:
- request: A protocol buffer supporting the set_name_space() operation.
- namespace: The name of the namespace part. If None, use the
- default namespace.
- """
- _ns = namespace
- if not _ns:
- _ns = get_namespace()
- if not _ns:
- request.clear_name_space()
- else:
- request.set_name_space(_ns)
+from namespace_manager import *
diff --git a/google-appengine/google/appengine/api/namespace_manager/namespace_manager.py b/google-appengine/google/appengine/api/namespace_manager/namespace_manager.py
new file mode 100755
index 0000000..cb9fe54
--- /dev/null
+++ b/google-appengine/google/appengine/api/namespace_manager/namespace_manager.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Control the namespacing system used by various APIs.
+
+A namespace may be specified in various API calls exemplified
+by the datastore and memcache interfaces. The default can be
+specified using this module.
+"""
+
+
+
+import os
+import re
+
+__all__ = ['BadValueError',
+ 'set_namespace',
+ 'get_namespace',
+ 'enable_request_namespace',
+ 'validate_namespace',
+ ]
+
+
+_ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'
+_ENV_CURRENT_NAMESPACE = 'HTTP_X_APPENGINE_CURRENT_NAMESPACE'
+
+_NAMESPACE_MAX_LENGTH = 100
+_NAMESPACE_PATTERN = r'^[0-9A-Za-z._-]{0,%s}$' % _NAMESPACE_MAX_LENGTH
+_NAMESPACE_RE = re.compile(_NAMESPACE_PATTERN)
+
+def set_namespace(namespace):
+ """Set the default namespace for the current HTTP request.
+
+ Args:
+ namespace: A string naming the new namespace to use. A value of None
+ will unset the default namespace value.
+ """
+ if namespace is None:
+ os.environ.pop(_ENV_CURRENT_NAMESPACE, None)
+ else:
+ validate_namespace(namespace)
+ os.environ[_ENV_CURRENT_NAMESPACE] = namespace
+
+
+def get_namespace():
+ """Get the the current default namespace or ('') namespace if unset."""
+ return os.environ.get(_ENV_CURRENT_NAMESPACE, '')
+
+
+def enable_request_namespace():
+ """Set the default namespace to the Google Apps domain referring this request.
+
+ Calling this function will set the default namespace to the
+ Google Apps domain that was used to create the url used for this request
+ and only for the current request and only if the current default namespace
+ is unset.
+ """
+ if _ENV_CURRENT_NAMESPACE not in os.environ:
+ if _ENV_DEFAULT_NAMESPACE in os.environ:
+ os.environ[_ENV_CURRENT_NAMESPACE] = os.environ[_ENV_DEFAULT_NAMESPACE]
+
+
+class BadValueError(Exception):
+ """Raised by ValidateNamespaceString."""
+
+
+def validate_namespace(value, exception=BadValueError):
+ """Raises an exception if value is not a valid Namespace string.
+
+ A Namespace string must be of a string class and
+ may only contain lower case alphabetic characters or digits or '-'
+ but must additionally not start or end with a '-'.
+ ([0-9A-Za-z._-]{0,100})
+
+ Args:
+ value: the value to validate.
+ exception: exception type to raise.
+ """
+ if not isinstance(value, basestring):
+ raise exception('value should be a string; received %r (a %s):' %
+ (value, type(value)))
+ if not _NAMESPACE_RE.match(value):
+ raise exception('value does not match pattern "%s"' % _NAMESPACE_PATTERN)
diff --git a/google-appengine/google/appengine/api/queueinfo.py b/google-appengine/google/appengine/api/queueinfo.py
index bdaa358..b8d0dc5 100755
--- a/google-appengine/google/appengine/api/queueinfo.py
+++ b/google-appengine/google/appengine/api/queueinfo.py
@@ -41,6 +41,19 @@ queue:
If this queue had been idle for a while before some jobs were submitted to it,
the first 10 jobs submitted would be run immediately, then subsequent ones
would be run once every 40s or so. The limit of 2000 per day would still apply.
+
+An app's queues are also subject to storage quota limits for their stored tasks,
+i.e. those tasks that have been added to queues but not yet executed. This quota
+is part of their total storage quota (including datastore and blobstore quota).
+We allow an app to override the default portion of this quota available for
+taskqueue storage (100M) with a top level field "total_storage_quota".
+
+taskqueue_storage_limit: 1.2G
+
+If no suffix is specified, the number is interpreted as bytes. Supported
+suffices are B (bytes), K (kilobytes), M (megabytes), G (gigabytes) and
+T (terabytes). If taskqueue_storage_quota exceeds the total storage quota
+available to an app, it is clamped.
"""
@@ -52,12 +65,16 @@ from google.appengine.api import yaml_object
_NAME_REGEX = r'^[A-Za-z0-9-]{0,499}$'
_RATE_REGEX = r'^(0|[0-9]+(\.[0-9]*)?/[smhd])'
+_TOTAL_STORAGE_LIMIT_REGEX = r'^([0-9]+(\.[0-9]*)?[BKMGT]?)'
QUEUE = 'queue'
NAME = 'name'
RATE = 'rate'
BUCKET_SIZE = 'bucket_size'
+TOTAL_STORAGE_LIMIT = 'total_storage_limit'
+
+BYTE_SUFFIXES = 'BKMGT'
class MalformedQueueConfiguration(Exception):
@@ -76,7 +93,8 @@ class QueueEntry(validation.Validated):
class QueueInfoExternal(validation.Validated):
"""QueueInfoExternal describes all queue entries for an application."""
ATTRIBUTES = {
- QUEUE: validation.Optional(validation.Repeated(QueueEntry))
+ TOTAL_STORAGE_LIMIT: validation.Optional(_TOTAL_STORAGE_LIMIT_REGEX),
+ QUEUE: validation.Optional(validation.Repeated(QueueEntry)),
}
@@ -141,3 +159,34 @@ def ParseRate(rate):
return number/(60 * 60)
if unit == 'd':
return number/(24 * 60 * 60)
+
+def ParseTotalStorageLimit(limit):
+ """Parses a string representing the storage bytes limit.
+
+ Optional limit suffixes are:
+ B (bytes), K (kilobytes), M (megabytes), G (gigabytes), T (terabytes)
+
+ Args:
+ limit: The storage bytes limit string.
+
+ Returns:
+ An int representing the storage limit in bytes.
+
+ Raises:
+ MalformedQueueConfiguration: if the limit argument isn't a valid python
+ double followed by an optional suffix.
+ """
+ try:
+ if limit[-1] in BYTE_SUFFIXES:
+ number = float(limit[0:-1])
+ for c in BYTE_SUFFIXES:
+ if limit[-1] != c:
+ number = number * 1024
+ else:
+ return int(number)
+ else:
+ return int(limit)
+ except ValueError:
+ raise MalformedQueueConfiguration('Total Storage Limit "%s" is invalid.' %
+ limit)
+
diff --git a/google-appengine/google/appengine/api/urlfetch_errors.py b/google-appengine/google/appengine/api/urlfetch_errors.py
index e71ca5d..13bfac0 100755
--- a/google-appengine/google/appengine/api/urlfetch_errors.py
+++ b/google-appengine/google/appengine/api/urlfetch_errors.py
@@ -55,6 +55,3 @@ class ResponseTooLargeError(Error):
class InvalidMethodError(Error):
"""Raised when an invalid value for 'method' is provided"""
-
-class InvalidMethodError(Error):
- """Raised when an invalid value for 'method' is provided"""
diff --git a/google-appengine/google/appengine/api/urlfetch_service_pb.py b/google-appengine/google/appengine/api/urlfetch_service_pb.py
index 0254f09..34b6d5c 100644..100755
--- a/google-appengine/google/appengine/api/urlfetch_service_pb.py
+++ b/google-appengine/google/appengine/api/urlfetch_service_pb.py
@@ -578,6 +578,12 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
externalbytesreceived_ = 0
has_finalurl_ = 0
finalurl_ = ""
+ has_apicpumilliseconds_ = 0
+ apicpumilliseconds_ = 0
+ has_apibytessent_ = 0
+ apibytessent_ = 0
+ has_apibytesreceived_ = 0
+ apibytesreceived_ = 0
def __init__(self, contents=None):
self.header_ = []
@@ -677,6 +683,45 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
def has_finalurl(self): return self.has_finalurl_
+ def apicpumilliseconds(self): return self.apicpumilliseconds_
+
+ def set_apicpumilliseconds(self, x):
+ self.has_apicpumilliseconds_ = 1
+ self.apicpumilliseconds_ = x
+
+ def clear_apicpumilliseconds(self):
+ if self.has_apicpumilliseconds_:
+ self.has_apicpumilliseconds_ = 0
+ self.apicpumilliseconds_ = 0
+
+ def has_apicpumilliseconds(self): return self.has_apicpumilliseconds_
+
+ def apibytessent(self): return self.apibytessent_
+
+ def set_apibytessent(self, x):
+ self.has_apibytessent_ = 1
+ self.apibytessent_ = x
+
+ def clear_apibytessent(self):
+ if self.has_apibytessent_:
+ self.has_apibytessent_ = 0
+ self.apibytessent_ = 0
+
+ def has_apibytessent(self): return self.has_apibytessent_
+
+ def apibytesreceived(self): return self.apibytesreceived_
+
+ def set_apibytesreceived(self, x):
+ self.has_apibytesreceived_ = 1
+ self.apibytesreceived_ = x
+
+ def clear_apibytesreceived(self):
+ if self.has_apibytesreceived_:
+ self.has_apibytesreceived_ = 0
+ self.apibytesreceived_ = 0
+
+ def has_apibytesreceived(self): return self.has_apibytesreceived_
+
def MergeFrom(self, x):
assert x is not self
@@ -687,6 +732,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
if (x.has_externalbytessent()): self.set_externalbytessent(x.externalbytessent())
if (x.has_externalbytesreceived()): self.set_externalbytesreceived(x.externalbytesreceived())
if (x.has_finalurl()): self.set_finalurl(x.finalurl())
+ if (x.has_apicpumilliseconds()): self.set_apicpumilliseconds(x.apicpumilliseconds())
+ if (x.has_apibytessent()): self.set_apibytessent(x.apibytessent())
+ if (x.has_apibytesreceived()): self.set_apibytesreceived(x.apibytesreceived())
def Equals(self, x):
if x is self: return 1
@@ -705,6 +753,12 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
if self.has_externalbytesreceived_ and self.externalbytesreceived_ != x.externalbytesreceived_: return 0
if self.has_finalurl_ != x.has_finalurl_: return 0
if self.has_finalurl_ and self.finalurl_ != x.finalurl_: return 0
+ if self.has_apicpumilliseconds_ != x.has_apicpumilliseconds_: return 0
+ if self.has_apicpumilliseconds_ and self.apicpumilliseconds_ != x.apicpumilliseconds_: return 0
+ if self.has_apibytessent_ != x.has_apibytessent_: return 0
+ if self.has_apibytessent_ and self.apibytessent_ != x.apibytessent_: return 0
+ if self.has_apibytesreceived_ != x.has_apibytesreceived_: return 0
+ if self.has_apibytesreceived_ and self.apibytesreceived_ != x.apibytesreceived_: return 0
return 1
def IsInitialized(self, debug_strs=None):
@@ -727,6 +781,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
if (self.has_externalbytessent_): n += 1 + self.lengthVarInt64(self.externalbytessent_)
if (self.has_externalbytesreceived_): n += 1 + self.lengthVarInt64(self.externalbytesreceived_)
if (self.has_finalurl_): n += 1 + self.lengthString(len(self.finalurl_))
+ if (self.has_apicpumilliseconds_): n += 1 + self.lengthVarInt64(self.apicpumilliseconds_)
+ if (self.has_apibytessent_): n += 1 + self.lengthVarInt64(self.apibytessent_)
+ if (self.has_apibytesreceived_): n += 1 + self.lengthVarInt64(self.apibytesreceived_)
return n + 1
def Clear(self):
@@ -737,6 +794,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
self.clear_externalbytessent()
self.clear_externalbytesreceived()
self.clear_finalurl()
+ self.clear_apicpumilliseconds()
+ self.clear_apibytessent()
+ self.clear_apibytesreceived()
def OutputUnchecked(self, out):
if (self.has_content_):
@@ -760,6 +820,15 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
if (self.has_finalurl_):
out.putVarInt32(74)
out.putPrefixedString(self.finalurl_)
+ if (self.has_apicpumilliseconds_):
+ out.putVarInt32(80)
+ out.putVarInt64(self.apicpumilliseconds_)
+ if (self.has_apibytessent_):
+ out.putVarInt32(88)
+ out.putVarInt64(self.apibytessent_)
+ if (self.has_apibytesreceived_):
+ out.putVarInt32(96)
+ out.putVarInt64(self.apibytesreceived_)
def TryMerge(self, d):
while d.avail() > 0:
@@ -785,6 +854,15 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
if tt == 74:
self.set_finalurl(d.getPrefixedString())
continue
+ if tt == 80:
+ self.set_apicpumilliseconds(d.getVarInt64())
+ continue
+ if tt == 88:
+ self.set_apibytessent(d.getVarInt64())
+ continue
+ if tt == 96:
+ self.set_apibytesreceived(d.getVarInt64())
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
@@ -805,6 +883,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
if self.has_externalbytessent_: res+=prefix+("ExternalBytesSent: %s\n" % self.DebugFormatInt64(self.externalbytessent_))
if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_))
if self.has_finalurl_: res+=prefix+("FinalUrl: %s\n" % self.DebugFormatString(self.finalurl_))
+ if self.has_apicpumilliseconds_: res+=prefix+("ApiCpuMilliseconds: %s\n" % self.DebugFormatInt64(self.apicpumilliseconds_))
+ if self.has_apibytessent_: res+=prefix+("ApiBytesSent: %s\n" % self.DebugFormatInt64(self.apibytessent_))
+ if self.has_apibytesreceived_: res+=prefix+("ApiBytesReceived: %s\n" % self.DebugFormatInt64(self.apibytesreceived_))
return res
@@ -820,6 +901,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
kExternalBytesSent = 7
kExternalBytesReceived = 8
kFinalUrl = 9
+ kApiCpuMilliseconds = 10
+ kApiBytesSent = 11
+ kApiBytesReceived = 12
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
@@ -832,7 +916,10 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
7: "ExternalBytesSent",
8: "ExternalBytesReceived",
9: "FinalUrl",
- }, 9)
+ 10: "ApiCpuMilliseconds",
+ 11: "ApiBytesSent",
+ 12: "ApiBytesReceived",
+ }, 12)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
@@ -845,7 +932,10 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
- }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
+ 10: ProtocolBuffer.Encoder.NUMERIC,
+ 11: ProtocolBuffer.Encoder.NUMERIC,
+ 12: ProtocolBuffer.Encoder.NUMERIC,
+ }, 12, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
diff --git a/google-appengine/google/appengine/api/urlfetch_stub.py b/google-appengine/google/appengine/api/urlfetch_stub.py
index 0684c7c..a7775f2 100755
--- a/google-appengine/google/appengine/api/urlfetch_stub.py
+++ b/google-appengine/google/appengine/api/urlfetch_stub.py
@@ -45,10 +45,6 @@ REDIRECT_STATUSES = frozenset([
httplib.TEMPORARY_REDIRECT,
])
-PORTS_ALLOWED_IN_PRODUCTION = (
- None, '80', '443', '4443', '8080', '8081', '8082', '8083', '8084', '8085',
- '8086', '8087', '8088', '8089', '8188', '8444', '8990')
-
_API_CALL_DEADLINE = 5.0
@@ -60,6 +56,21 @@ _UNTRUSTED_REQUEST_HEADERS = frozenset([
'x-forwarded-for',
])
+
+def _IsAllowedPort(port):
+ if port is None:
+ return True
+ try:
+ port = int(port)
+ except ValueError, e:
+ return False
+ if ((port >= 80 and port <= 90) or
+ (port >= 440 and port <= 450) or
+ port >= 1024):
+ return True
+ return False
+
+
class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
"""Stub version of the urlfetch API to be used with apiproxy_stub_map."""
@@ -151,7 +162,7 @@ class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
port = urllib.splitport(urllib.splituser(host)[1])[1]
- if port not in PORTS_ALLOWED_IN_PRODUCTION:
+ if not _IsAllowedPort(port):
logging.warning(
'urlfetch received %s ; port %s is not allowed in production!' %
(url, port))
diff --git a/google-appengine/google/appengine/api/user_service_pb.py b/google-appengine/google/appengine/api/user_service_pb.py
index 2037cdd..12c7dfe 100644..100755
--- a/google-appengine/google/appengine/api/user_service_pb.py
+++ b/google-appengine/google/appengine/api/user_service_pb.py
@@ -552,6 +552,10 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
email_ = ""
has_user_id_ = 0
user_id_ = ""
+ has_auth_domain_ = 0
+ auth_domain_ = ""
+ has_user_organization_ = 0
+ user_organization_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
@@ -582,11 +586,39 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
def has_user_id(self): return self.has_user_id_
+ def auth_domain(self): return self.auth_domain_
+
+ def set_auth_domain(self, x):
+ self.has_auth_domain_ = 1
+ self.auth_domain_ = x
+
+ def clear_auth_domain(self):
+ if self.has_auth_domain_:
+ self.has_auth_domain_ = 0
+ self.auth_domain_ = ""
+
+ def has_auth_domain(self): return self.has_auth_domain_
+
+ def user_organization(self): return self.user_organization_
+
+ def set_user_organization(self, x):
+ self.has_user_organization_ = 1
+ self.user_organization_ = x
+
+ def clear_user_organization(self):
+ if self.has_user_organization_:
+ self.has_user_organization_ = 0
+ self.user_organization_ = ""
+
+ def has_user_organization(self): return self.has_user_organization_
+
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_user_id()): self.set_user_id(x.user_id())
+ if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+ if (x.has_user_organization()): self.set_user_organization(x.user_organization())
def Equals(self, x):
if x is self: return 1
@@ -594,29 +626,52 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_user_id_ != x.has_user_id_: return 0
if self.has_user_id_ and self.user_id_ != x.user_id_: return 0
+ if self.has_auth_domain_ != x.has_auth_domain_: return 0
+ if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+ if self.has_user_organization_ != x.has_user_organization_: return 0
+ if self.has_user_organization_ and self.user_organization_ != x.user_organization_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
+ if (not self.has_email_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: email not set.')
+ if (not self.has_user_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: user_id not set.')
+ if (not self.has_auth_domain_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: auth_domain not set.')
return initialized
def ByteSize(self):
n = 0
- if (self.has_email_): n += 1 + self.lengthString(len(self.email_))
- if (self.has_user_id_): n += 1 + self.lengthString(len(self.user_id_))
- return n + 0
+ n += self.lengthString(len(self.email_))
+ n += self.lengthString(len(self.user_id_))
+ n += self.lengthString(len(self.auth_domain_))
+ if (self.has_user_organization_): n += 1 + self.lengthString(len(self.user_organization_))
+ return n + 3
def Clear(self):
self.clear_email()
self.clear_user_id()
+ self.clear_auth_domain()
+ self.clear_user_organization()
def OutputUnchecked(self, out):
- if (self.has_email_):
- out.putVarInt32(10)
- out.putPrefixedString(self.email_)
- if (self.has_user_id_):
- out.putVarInt32(18)
- out.putPrefixedString(self.user_id_)
+ out.putVarInt32(10)
+ out.putPrefixedString(self.email_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.user_id_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.auth_domain_)
+ if (self.has_user_organization_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.user_organization_)
def TryMerge(self, d):
while d.avail() > 0:
@@ -627,6 +682,12 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
if tt == 18:
self.set_user_id(d.getPrefixedString())
continue
+ if tt == 26:
+ self.set_auth_domain(d.getPrefixedString())
+ continue
+ if tt == 34:
+ self.set_user_organization(d.getPrefixedString())
+ continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
@@ -635,6 +696,8 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_user_id_: res+=prefix+("user_id: %s\n" % self.DebugFormatString(self.user_id_))
+ if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+ if self.has_user_organization_: res+=prefix+("user_organization: %s\n" % self.DebugFormatString(self.user_organization_))
return res
@@ -643,18 +706,24 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage):
kemail = 1
kuser_id = 2
+ kauth_domain = 3
+ kuser_organization = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "email",
2: "user_id",
- }, 2)
+ 3: "auth_domain",
+ 4: "user_organization",
+ }, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
- }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
@@ -745,20 +814,23 @@ class CheckOAuthSignatureResponse(ProtocolBuffer.ProtocolMessage):
def IsInitialized(self, debug_strs=None):
initialized = 1
+ if (not self.has_oauth_consumer_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: oauth_consumer_key not set.')
return initialized
def ByteSize(self):
n = 0
- if (self.has_oauth_consumer_key_): n += 1 + self.lengthString(len(self.oauth_consumer_key_))
- return n + 0
+ n += self.lengthString(len(self.oauth_consumer_key_))
+ return n + 1
def Clear(self):
self.clear_oauth_consumer_key()
def OutputUnchecked(self, out):
- if (self.has_oauth_consumer_key_):
- out.putVarInt32(10)
- out.putPrefixedString(self.oauth_consumer_key_)
+ out.putVarInt32(10)
+ out.putPrefixedString(self.oauth_consumer_key_)
def TryMerge(self, d):
while d.avail() > 0:
diff --git a/google-appengine/google/appengine/api/validation.py b/google-appengine/google/appengine/api/validation.py
index 00833e6..db969a7 100755
--- a/google-appengine/google/appengine/api/validation.py
+++ b/google-appengine/google/appengine/api/validation.py
@@ -218,7 +218,7 @@ class Validated(object):
"""
if key in self.ATTRIBUTES:
- value = self.GetAttribute(key)(value)
+ value = self.GetAttribute(key)(value, key)
object.__setattr__(self, key, value)
else:
raise ValidationError('Class \'%s\' does not have attribute \'%s\''
@@ -358,15 +358,16 @@ class Validator(object):
"""
self.default = default
- def __call__(self, value):
+ def __call__(self, value, key='???'):
"""Main interface to validator is call mechanism."""
- return self.Validate(value)
+ return self.Validate(value, key)
- def Validate(self, value):
+ def Validate(self, value, key='???'):
"""Override this method to customize sub-class behavior.
Args:
value: Value to validate.
+ key: Name of the field being validated.
Returns:
Value if value is valid, or a valid representation of value.
@@ -425,11 +426,12 @@ class Type(Validator):
self.expected_type = expected_type
self.convert = convert
- def Validate(self, value):
+ def Validate(self, value, key):
"""Validate that value is correct type.
Args:
value: Value to validate.
+ key: Name of the field being validated.
Returns:
None if value is None, value if value is of correct type, converted
@@ -444,12 +446,11 @@ class Type(Validator):
try:
return self.expected_type(value)
except ValueError, e:
- raise ValidationError('Type conversion failed for value \'%s\'.'
- % value,
- e)
+ raise ValidationError('Type conversion failed for value \'%s\' '
+ 'key %s.' % (value, key), e)
except TypeError, e:
- raise ValidationError('Expected value of type %s, but got \'%s\'.'
- % (self.expected_type, value))
+ raise ValidationError('Expected value of type %s for key %s, but got '
+ '\'%s\'.' % (self.expected_type, key, value))
else:
raise MissingAttribute('Missing value is required.')
else:
@@ -534,7 +535,7 @@ class Options(Validator):
super(Options, self).__init__(default)
self.options = alias_map
- def Validate(self, value):
+ def Validate(self, value, key):
"""Validate options.
Returns:
@@ -547,8 +548,8 @@ class Options(Validator):
raise ValidationError('Value for options field must not be None.')
value = str(value)
if value not in self.options:
- raise ValidationError('Value \'%s\' not in %s.'
- % (value, self.options))
+ raise ValidationError('Value \'%s\' for key %s not in %s.'
+ % (value, key, self.options))
return self.options[value]
@@ -583,7 +584,7 @@ class Optional(Validator):
self.expected_type = self.validator.expected_type
self.default = default
- def Validate(self, value):
+ def Validate(self, value, key):
"""Optionally require a value.
Normal validators do not accept None. This will accept none on
@@ -591,13 +592,20 @@ class Optional(Validator):
Args:
value: Value to be validated as optional.
+ key: Name of the field being validated.
Returns:
None if value is None, else results of contained validation.
"""
if value is None:
return None
- return self.validator(value)
+ return self.validator(value, key)
+
+ def ToValue(self, value):
+ """Convert 'value' to a simplified collection or basic type."""
+ if value is None:
+ return None
+ return self.validator.ToValue(value)
class Regex(Validator):
@@ -647,11 +655,12 @@ class Regex(Validator):
self.expected_type = string_type
- def Validate(self, value):
+ def Validate(self, value, key):
"""Does validation of a string against a regular expression.
Args:
value: String to match against regular expression.
+ key: Name of the field being validated.
Raises:
ValidationError when value does not match regular expression or
@@ -663,8 +672,8 @@ class Regex(Validator):
cast_value = TYPE_UNICODE(value)
if self.re.match(cast_value) is None:
- raise ValidationError('Value \'%s\' does not match expression \'%s\''
- % (value, self.re.pattern))
+ raise ValidationError('Value \'%s\' for key %s does not match expression '
+ '\'%s\'' % (value, key, self.re.pattern))
return cast_value
@@ -680,17 +689,19 @@ class _RegexStrValue(object):
is a list of strings, the strings are joined in to a single 'or' expression.
"""
- def __init__(self, attribute, value):
+ def __init__(self, attribute, value, key):
"""Initialize recompilable regex value.
Args:
attribute: Attribute validator associated with this regex value.
value: Initial underlying python value for regex string. Either a single
regex string or a list of regex strings.
+ key: Name of the field.
"""
self.__attribute = attribute
self.__value = value
self.__regex = None
+ self.__key = key
def __AsString(self, value):
"""Convert a value to appropriate string.
@@ -741,7 +752,8 @@ class _RegexStrValue(object):
try:
return re.compile(regex)
except re.error, e:
- raise ValidationError('Value \'%s\' does not compile: %s' % (regex, e), e)
+ raise ValidationError('Value \'%s\' for key %s does not compile: %s' %
+ (regex, self.__key, e), e)
@property
def regex(self):
@@ -790,7 +802,7 @@ class RegexStr(Validator):
AttributeDefinitionError if string_type is not a kind of string.
"""
if default is not None:
- default = _RegexStrValue(self, default)
+ default = _RegexStrValue(self, default, None)
re.compile(str(default))
super(RegexStr, self).__init__(default)
if (not issubclass(string_type, basestring) or
@@ -800,7 +812,7 @@ class RegexStr(Validator):
self.expected_type = string_type
- def Validate(self, value):
+ def Validate(self, value, key):
"""Validates that the string compiles as a regular expression.
Because the regular expression might have been expressed as a multiline
@@ -808,6 +820,7 @@ class RegexStr(Validator):
Args:
value: String to compile as a regular expression.
+ key: Name of the field being validated.
Raises:
ValueError when value does not compile as a regular expression. TypeError
@@ -815,7 +828,7 @@ class RegexStr(Validator):
"""
if isinstance(value, _RegexStrValue):
return value
- value = _RegexStrValue(self, value)
+ value = _RegexStrValue(self, value, key)
value.Validate()
return value
@@ -862,22 +875,24 @@ class Range(Validator):
self.expected_type = range_type
self._type_validator = Type(range_type)
- def Validate(self, value):
+ def Validate(self, value, key):
"""Validate that value is within range.
Validates against range-type then checks the range.
Args:
value: Value to validate.
+ key: Name of the field being validated.
Raises:
ValidationError when value is out of range. ValidationError when value
is notd of the same range type.
"""
- cast_value = self._type_validator.Validate(value)
+ cast_value = self._type_validator.Validate(value, key)
if cast_value < self.minimum or cast_value > self.maximum:
- raise ValidationError('Value \'%s\' is out of range %s - %s'
+ raise ValidationError('Value \'%s\' for %s is out of range %s - %s'
% (str(value),
+ key,
str(self.minimum),
str(self.maximum)))
return cast_value
@@ -902,27 +917,29 @@ class Repeated(Validator):
self.constructor = constructor
self.expected_type = list
- def Validate(self, value):
+ def Validate(self, value, key):
"""Do validation of sequence.
Value must be a list and all elements must be of type 'constructor'.
Args:
value: Value to validate.
+ key: Name of the field being validated.
Raises:
ValidationError if value is None, not a list or one of its elements is the
wrong type.
"""
if not isinstance(value, list):
- raise ValidationError('Repeated fields must be sequence, '
- 'but found \'%s\'.' % value)
+ raise ValidationError('Repeated fields for %s must be sequence, '
+ 'but found \'%s\'.' % (key, value))
for item in value:
if isinstance(self.constructor, Validator):
- item = self.constructor.Validate(item)
+ item = self.constructor.Validate(item, key)
elif not isinstance(item, self.constructor):
- raise ValidationError('Repeated items must be %s, but found \'%s\'.'
- % (str(self.constructor), str(item)))
+ raise ValidationError('Repeated items for %s must be %s, but found '
+ '\'%s\'.' %
+ (key, str(self.constructor), str(item)))
return value
diff --git a/google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py b/google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py
index f77e50b..f77e50b 100644..100755
--- a/google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py
+++ b/google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py
diff --git a/google-appengine/google/appengine/base/capabilities_pb.py b/google-appengine/google/appengine/base/capabilities_pb.py
index c0434ef..c0434ef 100644..100755
--- a/google-appengine/google/appengine/base/capabilities_pb.py
+++ b/google-appengine/google/appengine/base/capabilities_pb.py
diff --git a/google-appengine/google/appengine/cron/groctimespecification.py b/google-appengine/google/appengine/cron/groctimespecification.py
index 37d168f..9c030b3 100755
--- a/google-appengine/google/appengine/cron/groctimespecification.py
+++ b/google-appengine/google/appengine/cron/groctimespecification.py
@@ -57,20 +57,26 @@ except ImportError:
pass
-def GrocTimeSpecification(schedule):
+def GrocTimeSpecification(schedule, timezone=None):
"""Factory function.
Turns a schedule specification into a TimeSpecification.
Arguments:
schedule: the schedule specification, as a string
-
+ timezone: the optional timezone as a string for this specification.
+ Defaults to 'UTC' - valid entries are things like 'Australia/Victoria'
+ or 'PST8PDT'.
Returns:
a TimeSpecification instance
"""
parser = groc.CreateParser(schedule)
parser.timespec()
+ if parser.getTokenStream().LT(1).getText():
+ raise groc.GrocException(
+ 'Extra token %r' % parser.getTokenStream().LT(1).getText())
+
if parser.period_string:
return IntervalTimeSpecification(parser.interval_mins,
parser.period_string,
@@ -79,7 +85,8 @@ def GrocTimeSpecification(schedule):
return SpecificTimeSpecification(parser.ordinal_set, parser.weekday_set,
parser.month_set,
parser.monthday_set,
- parser.time_string)
+ parser.time_string,
+ timezone)
class TimeSpecification(object):
@@ -186,7 +193,7 @@ class SpecificTimeSpecification(TimeSpecification):
def __init__(self, ordinals=None, weekdays=None, months=None, monthdays=None,
timestr='00:00', timezone=None):
- super(SpecificTimeSpecification, self).__init__(self)
+ super(SpecificTimeSpecification, self).__init__()
if weekdays and monthdays:
raise ValueError('cannot supply both monthdays and weekdays')
if ordinals is None:
@@ -304,8 +311,6 @@ class SpecificTimeSpecification(TimeSpecification):
day_matches.pop(0)
while day_matches:
out = candidate_month.replace(day=day_matches[0], hour=self.time.hour,
-
-
minute=self.time.minute, second=0,
microsecond=0)
if self.timezone and pytz is not None:
diff --git a/google-appengine/google/appengine/datastore/datastore_pb.py b/google-appengine/google/appengine/datastore/datastore_pb.py
index 58e2400..f8d2424 100644..100755
--- a/google-appengine/google/appengine/datastore/datastore_pb.py
+++ b/google-appengine/google/appengine/datastore/datastore_pb.py
@@ -2352,194 +2352,6 @@ class RunCompiledQueryRequest(ProtocolBuffer.ProtocolMessage):
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
-class QueryExplanation(ProtocolBuffer.ProtocolMessage):
- has_native_ancestor_ = 0
- native_ancestor_ = 0
- has_native_offset_ = 0
- native_offset_ = 0
- has_native_limit_ = 0
- native_limit_ = 0
-
- def __init__(self, contents=None):
- self.native_index_ = []
- if contents is not None: self.MergeFromString(contents)
-
- def native_ancestor(self): return self.native_ancestor_
-
- def set_native_ancestor(self, x):
- self.has_native_ancestor_ = 1
- self.native_ancestor_ = x
-
- def clear_native_ancestor(self):
- if self.has_native_ancestor_:
- self.has_native_ancestor_ = 0
- self.native_ancestor_ = 0
-
- def has_native_ancestor(self): return self.has_native_ancestor_
-
- def native_index_size(self): return len(self.native_index_)
- def native_index_list(self): return self.native_index_
-
- def native_index(self, i):
- return self.native_index_[i]
-
- def mutable_native_index(self, i):
- return self.native_index_[i]
-
- def add_native_index(self):
- x = Index()
- self.native_index_.append(x)
- return x
-
- def clear_native_index(self):
- self.native_index_ = []
- def native_offset(self): return self.native_offset_
-
- def set_native_offset(self, x):
- self.has_native_offset_ = 1
- self.native_offset_ = x
-
- def clear_native_offset(self):
- if self.has_native_offset_:
- self.has_native_offset_ = 0
- self.native_offset_ = 0
-
- def has_native_offset(self): return self.has_native_offset_
-
- def native_limit(self): return self.native_limit_
-
- def set_native_limit(self, x):
- self.has_native_limit_ = 1
- self.native_limit_ = x
-
- def clear_native_limit(self):
- if self.has_native_limit_:
- self.has_native_limit_ = 0
- self.native_limit_ = 0
-
- def has_native_limit(self): return self.has_native_limit_
-
-
- def MergeFrom(self, x):
- assert x is not self
- if (x.has_native_ancestor()): self.set_native_ancestor(x.native_ancestor())
- for i in xrange(x.native_index_size()): self.add_native_index().CopyFrom(x.native_index(i))
- if (x.has_native_offset()): self.set_native_offset(x.native_offset())
- if (x.has_native_limit()): self.set_native_limit(x.native_limit())
-
- def Equals(self, x):
- if x is self: return 1
- if self.has_native_ancestor_ != x.has_native_ancestor_: return 0
- if self.has_native_ancestor_ and self.native_ancestor_ != x.native_ancestor_: return 0
- if len(self.native_index_) != len(x.native_index_): return 0
- for e1, e2 in zip(self.native_index_, x.native_index_):
- if e1 != e2: return 0
- if self.has_native_offset_ != x.has_native_offset_: return 0
- if self.has_native_offset_ and self.native_offset_ != x.native_offset_: return 0
- if self.has_native_limit_ != x.has_native_limit_: return 0
- if self.has_native_limit_ and self.native_limit_ != x.native_limit_: return 0
- return 1
-
- def IsInitialized(self, debug_strs=None):
- initialized = 1
- for p in self.native_index_:
- if not p.IsInitialized(debug_strs): initialized=0
- return initialized
-
- def ByteSize(self):
- n = 0
- if (self.has_native_ancestor_): n += 2
- n += 1 * len(self.native_index_)
- for i in xrange(len(self.native_index_)): n += self.lengthString(self.native_index_[i].ByteSize())
- if (self.has_native_offset_): n += 1 + self.lengthVarInt64(self.native_offset_)
- if (self.has_native_limit_): n += 1 + self.lengthVarInt64(self.native_limit_)
- return n + 0
-
- def Clear(self):
- self.clear_native_ancestor()
- self.clear_native_index()
- self.clear_native_offset()
- self.clear_native_limit()
-
- def OutputUnchecked(self, out):
- if (self.has_native_ancestor_):
- out.putVarInt32(8)
- out.putBoolean(self.native_ancestor_)
- for i in xrange(len(self.native_index_)):
- out.putVarInt32(18)
- out.putVarInt32(self.native_index_[i].ByteSize())
- self.native_index_[i].OutputUnchecked(out)
- if (self.has_native_offset_):
- out.putVarInt32(24)
- out.putVarInt32(self.native_offset_)
- if (self.has_native_limit_):
- out.putVarInt32(32)
- out.putVarInt32(self.native_limit_)
-
- def TryMerge(self, d):
- while d.avail() > 0:
- tt = d.getVarInt32()
- if tt == 8:
- self.set_native_ancestor(d.getBoolean())
- continue
- if tt == 18:
- length = d.getVarInt32()
- tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
- d.skip(length)
- self.add_native_index().TryMerge(tmp)
- continue
- if tt == 24:
- self.set_native_offset(d.getVarInt32())
- continue
- if tt == 32:
- self.set_native_limit(d.getVarInt32())
- continue
- if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
- d.skipData(tt)
-
-
- def __str__(self, prefix="", printElemNumber=0):
- res=""
- if self.has_native_ancestor_: res+=prefix+("native_ancestor: %s\n" % self.DebugFormatBool(self.native_ancestor_))
- cnt=0
- for e in self.native_index_:
- elm=""
- if printElemNumber: elm="(%d)" % cnt
- res+=prefix+("native_index%s <\n" % elm)
- res+=e.__str__(prefix + " ", printElemNumber)
- res+=prefix+">\n"
- cnt+=1
- if self.has_native_offset_: res+=prefix+("native_offset: %s\n" % self.DebugFormatInt32(self.native_offset_))
- if self.has_native_limit_: res+=prefix+("native_limit: %s\n" % self.DebugFormatInt32(self.native_limit_))
- return res
-
-
- def _BuildTagLookupTable(sparse, maxtag, default=None):
- return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
-
- knative_ancestor = 1
- knative_index = 2
- knative_offset = 3
- knative_limit = 4
-
- _TEXT = _BuildTagLookupTable({
- 0: "ErrorCode",
- 1: "native_ancestor",
- 2: "native_index",
- 3: "native_offset",
- 4: "native_limit",
- }, 4)
-
- _TYPES = _BuildTagLookupTable({
- 0: ProtocolBuffer.Encoder.NUMERIC,
- 1: ProtocolBuffer.Encoder.NUMERIC,
- 2: ProtocolBuffer.Encoder.STRING,
- 3: ProtocolBuffer.Encoder.NUMERIC,
- 4: ProtocolBuffer.Encoder.NUMERIC,
- }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
-
- _STYLE = """"""
- _STYLE_CONTENT_TYPE = """"""
class Cursor(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = 0
@@ -2661,6 +2473,9 @@ class Error(ProtocolBuffer.ProtocolMessage):
NEED_INDEX = 4
TIMEOUT = 5
PERMISSION_DENIED = 6
+ BIGTABLE_ERROR = 7
+ COMMITTED_BUT_STILL_APPLYING = 8
+ CAPABILITY_DISABLED = 9
_ErrorCode_NAMES = {
1: "BAD_REQUEST",
@@ -2669,6 +2484,9 @@ class Error(ProtocolBuffer.ProtocolMessage):
4: "NEED_INDEX",
5: "TIMEOUT",
6: "PERMISSION_DENIED",
+ 7: "BIGTABLE_ERROR",
+ 8: "COMMITTED_BUT_STILL_APPLYING",
+ 9: "CAPABILITY_DISABLED",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
@@ -5343,4 +5161,4 @@ class CommitResponse(ProtocolBuffer.ProtocolMessage):
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
-__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_Position','RunCompiledQueryRequest','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','GetSchemaRequest','Schema','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse']
+__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_Position','RunCompiledQueryRequest','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','GetSchemaRequest','Schema','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse']
diff --git a/google-appengine/google/appengine/datastore/datastore_sqlite_stub.py b/google-appengine/google/appengine/datastore/datastore_sqlite_stub.py
new file mode 100644
index 0000000..8994d85
--- /dev/null
+++ b/google-appengine/google/appengine/datastore/datastore_sqlite_stub.py
@@ -0,0 +1,1501 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""SQlite-based stub for the Python datastore API.
+
+Entities are stored in an sqlite database in a similar fashion to the production
+datastore.
+
+Transactions are serialized through __tx_lock. Each transaction acquires it
+when it begins and releases it when it commits or rolls back.
+"""
+
+
+
+
+
+
+import array
+import itertools
+import logging
+import md5
+import sys
+import threading
+
+from google.appengine.datastore import entity_pb
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore_errors
+from google.appengine.datastore import datastore_index
+from google.appengine.datastore import datastore_pb
+from google.appengine.datastore import sortable_pb_encoder
+from google.appengine.runtime import apiproxy_errors
+
+try:
+ import pysqlite2.dbapi2 as sqlite3
+except ImportError:
+ import sqlite3
+
+try:
+ __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb')
+ taskqueue_service_pb = sys.modules.get(
+ 'google.appengine.api.labs.taskqueue.taskqueue_service_pb')
+except ImportError:
+ from google.appengine.api.taskqueue import taskqueue_service_pb
+
+
+import __builtin__
+buffer = __builtin__.buffer
+
+
+entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())
+datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
+datastore_pb.Transaction.__hash__ = lambda self: hash(self.Encode())
+datastore_pb.Cursor.__hash__ = lambda self: hash(self.Encode())
+
+
+_MAXIMUM_RESULTS = 1000
+
+
+_MAX_QUERY_COMPONENTS = 63
+
+
+_BATCH_SIZE = 20
+
+
+_MAX_ACTIONS_PER_TXN = 5
+
+
+_MAX_TIMEOUT = 5.0
+
+
+_OPERATOR_MAP = {
+ datastore_pb.Query_Filter.LESS_THAN: '<',
+ datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
+ datastore_pb.Query_Filter.EQUAL: '=',
+ datastore_pb.Query_Filter.GREATER_THAN: '>',
+ datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
+}
+
+
+_ORDER_MAP = {
+ datastore_pb.Query_Order.ASCENDING: 'ASC',
+ datastore_pb.Query_Order.DESCENDING: 'DESC',
+}
+
+_CORE_SCHEMA = """
+CREATE TABLE IF NOT EXISTS Apps (
+ app_id TEXT NOT NULL PRIMARY KEY,
+ indexes BLOB);
+
+CREATE TABLE IF NOT EXISTS Namespaces (
+ app_id TEXT NOT NULL,
+ name_space TEXT NOT NULL,
+ PRIMARY KEY (app_id, name_space));
+
+CREATE TABLE IF NOT EXISTS IdSeq (
+ prefix TEXT NOT NULL PRIMARY KEY,
+ next_id INT NOT NULL);
+"""
+
+_NAMESPACE_SCHEMA = """
+CREATE TABLE "%(prefix)s!Entities" (
+ __path__ BLOB NOT NULL PRIMARY KEY,
+ kind TEXT NOT NULL,
+ entity BLOB NOT NULL);
+CREATE INDEX "%(prefix)s!EntitiesByKind" ON "%(prefix)s!Entities" (
+ kind ASC,
+ __path__ ASC);
+
+CREATE TABLE "%(prefix)s!EntitiesByProperty" (
+ kind TEXT NOT NULL,
+ name TEXT NOT NULL,
+ value BLOB NOT NULL,
+ __path__ BLOB NOT NULL REFERENCES Entities,
+ PRIMARY KEY(kind ASC, name ASC, value ASC, __path__ ASC) ON CONFLICT IGNORE);
+CREATE INDEX "%(prefix)s!EntitiesByPropertyDesc"
+ ON "%(prefix)s!EntitiesByProperty" (
+ kind ASC,
+ name ASC,
+ value DESC,
+ __path__ ASC);
+CREATE INDEX "%(prefix)s!EntitiesByPropertyKey"
+ ON "%(prefix)s!EntitiesByProperty" (
+ __path__ ASC);
+
+INSERT OR IGNORE INTO Apps (app_id) VALUES ('%(app_id)s');
+INSERT INTO Namespaces (app_id, name_space)
+ VALUES ('%(app_id)s', '%(name_space)s');
+INSERT OR IGNORE INTO IdSeq VALUES ('%(prefix)s', 1);
+"""
+
+
+def ReferencePropertyToReference(refprop):
+ ref = entity_pb.Reference()
+ ref.set_app(refprop.app())
+ if refprop.has_name_space():
+ ref.set_name_space(refprop.name_space())
+ for pathelem in refprop.pathelement_list():
+ ref.mutable_path().add_element().CopyFrom(pathelem)
+ return ref
+
+
+class QueryCursor(object):
+ """Encapsulates a database cursor and provides methods to fetch results."""
+
+ def __init__(self, query, db_cursor):
+ """Constructor.
+
+ Args:
+ query: A Query PB.
+ db_cursor: An SQLite cursor returning n+2 columns. The first 2 columns
+ must be the path of the entity and the entity itself, while the
+ remaining columns must be the sort columns for the query.
+ """
+ self.__query = query
+ self.app = query.app()
+ self.__cursor = db_cursor
+ self.__seen = set()
+
+ self.__position = ''
+
+ self.__next_result = (None, None)
+
+ if query.has_limit():
+ self.limit = query.limit() + query.offset()
+ else:
+ self.limit = None
+
+ def Count(self):
+ """Counts results, up to the query's limit.
+
+ Note this method does not deduplicate results, so the query it was generated
+ from should have the 'distinct' clause applied.
+
+ Returns:
+ int: Result count.
+ """
+ count = 0
+ while self.limit is None or count < self.limit:
+ row = self.__cursor.fetchone()
+ if not row:
+ break
+ count += 1
+ return count
+
+ def _EncodeCompiledCursor(self, cc):
+ """Encodes the current position in the query as a compiled cursor.
+
+ Args:
+ cc: The compiled cursor to fill out.
+ """
+ position = cc.add_position()
+ position.set_start_key(self.__position)
+
+ def _GetResult(self):
+ """Returns the next result from the result set, without deduplication.
+
+ Returns:
+ (path, value): The path and value of the next result.
+ """
+ if not self.__cursor:
+ return None, None
+ row = self.__cursor.fetchone()
+ if not row:
+ self.__cursor = None
+ return None, None
+ path, data, position_parts = str(row[0]), row[1], row[2:]
+ self.__position = ''.join(str(x) for x in position_parts)
+ return path, data
+
+ def _Next(self):
+ """Fetches the next unique result from the result set.
+
+ Returns:
+ A datastore_pb.EntityProto instance.
+ """
+ entity = None
+ path, data = self.__next_result
+ self.__next_result = None, None
+ while self.__cursor and not entity:
+ if path and path not in self.__seen:
+ self.__seen.add(path)
+ entity = entity_pb.EntityProto(data)
+ else:
+ path, data = self._GetResult()
+ return entity
+
+ def Skip(self, count):
+ """Skips the specified number of unique results.
+
+ Args:
+ count: Number of results to skip.
+ """
+ for unused_i in xrange(count):
+ self._Next()
+
+ def ResumeFromCompiledCursor(self, cc):
+ """Resumes a query from a compiled cursor.
+
+ Args:
+ cc: The compiled cursor to resume from.
+ """
+ target_position = cc.position(0).start_key()
+ while self.__position <= target_position and self.__cursor:
+ self.__next_result = self._GetResult()
+
+ def PopulateQueryResult(self, count, result):
+ """Populates a QueryResult PB with results from the cursor.
+
+ Args:
+ count: The number of results to retrieve.
+ result: out: A query_result PB.
+ """
+ if count > _MAXIMUM_RESULTS:
+ count = _MAXIMUM_RESULTS
+
+ result.set_keys_only(self.__query.keys_only())
+
+ result_list = result.result_list()
+ while len(result_list) < count:
+ if self.limit is not None and len(self.__seen) >= self.limit:
+ break
+ entity = self._Next()
+ if entity is None:
+ break
+ result_list.append(entity)
+
+ result.set_more_results(len(result_list) == count)
+ self._EncodeCompiledCursor(result.mutable_compiled_cursor())
+
+
+class DatastoreSqliteStub(apiproxy_stub.APIProxyStub):
+ """Persistent stub for the Python datastore API.
+
+ Stores all entities in an SQLite database. A DatastoreSqliteStub instance
+ handles a single app's data.
+ """
+
+ WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY
+ READ_WRITE = entity_pb.CompositeIndex.READ_WRITE
+ DELETED = entity_pb.CompositeIndex.DELETED
+ ERROR = entity_pb.CompositeIndex.ERROR
+
+ _INDEX_STATE_TRANSITIONS = {
+ WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)),
+ READ_WRITE: frozenset((DELETED,)),
+ ERROR: frozenset((DELETED,)),
+ DELETED: frozenset((ERROR,)),
+ }
+
+ READ_ERROR_MSG = ('Data in %s is corrupt or a different version. '
+ 'Try running with the --clear_datastore flag.\n%r')
+
+ def __init__(self,
+ app_id,
+ datastore_file,
+ require_indexes=False,
+ verbose=False,
+ service_name='datastore_v3',
+ trusted=False):
+ """Constructor.
+
+ Initializes the SQLite database if necessary.
+
+ Args:
+ app_id: string
+ datastore_file: string, path to sqlite database. Use None to create an
+ in-memory database.
+ require_indexes: bool, default False. If True, composite indexes must
+ exist in index.yaml for queries that need them.
+ verbose: bool, default False. If True, logs all select statements.
+ service_name: Service name expected for all calls.
+ trusted: bool, default False. If True, this stub allows an app to access
+ the data of another app.
+ """
+ apiproxy_stub.APIProxyStub.__init__(self, service_name)
+
+ assert isinstance(app_id, basestring) and app_id
+ self.__app_id = app_id
+ self.__datastore_file = datastore_file
+ self.SetTrusted(trusted)
+
+ self.__tx_actions = []
+
+ self.__require_indexes = require_indexes
+ self.__verbose = verbose
+
+ self.__id_map = {}
+ self.__id_lock = threading.Lock()
+
+ self.__connection = sqlite3.connect(
+ self.__datastore_file or ':memory:',
+ timeout=_MAX_TIMEOUT,
+ check_same_thread=False)
+ self.__connection_lock = threading.RLock()
+ self.__current_transaction = None
+ self.__next_tx_handle = 1
+
+ self.__tx_writes = {}
+ self.__tx_deletes = set()
+
+ self.__next_cursor_id = 1
+ self.__cursor_lock = threading.Lock()
+ self.__cursors = {}
+
+ self.__namespaces = set()
+
+ self.__indexes = {}
+ self.__index_lock = threading.Lock()
+
+ self.__query_history = {}
+
+ try:
+ self.__Init()
+ except sqlite3.DatabaseError, e:
+ raise datastore_errors.InternalError(self.READ_ERROR_MSG %
+ (self.__datastore_file, e))
+
+ def __Init(self):
+ self.__connection.executescript(_CORE_SCHEMA)
+ self.__connection.commit()
+
+ c = self.__connection.execute('SELECT app_id, name_space FROM Namespaces')
+ self.__namespaces = set(c.fetchall())
+
+ c = self.__connection.execute('SELECT app_id, indexes FROM Apps')
+ for app_id, index_proto in c.fetchall():
+ index_map = self.__indexes.setdefault(app_id, {})
+ if not index_proto:
+ continue
+ indexes = datastore_pb.CompositeIndices(index_proto)
+ for index in indexes.index_list():
+ index_map.setdefault(index.definition().entity_type(), []).append(index)
+
+ def Clear(self):
+ """Clears the datastore."""
+ conn = self.__GetConnection(None)
+ try:
+ c = conn.execute(
+ "SELECT tbl_name FROM sqlite_master WHERE type = 'table'")
+ for row in c.fetchall():
+ conn.execute('DROP TABLE "%s"' % row)
+ conn.commit()
+ finally:
+ self.__ReleaseConnection(conn, None)
+
+ self.__namespaces = set()
+ self.__indexes = {}
+ self.__cursors = {}
+ self.__query_history = {}
+
+ self.__Init()
+
+ def Read(self):
+ """Reads the datastore from disk.
+
+ Noop for compatibility with file stub.
+ """
+ pass
+
+ def Write(self):
+ """Writes the datastore to disk.
+
+ Noop for compatibility with file stub.
+ """
+ pass
+
+ def SetTrusted(self, trusted):
+ """Set/clear the trusted bit in the stub.
+
+ This bit indicates that the app calling the stub is trusted. A
+ trusted app can write to datastores of other apps.
+
+ Args:
+ trusted: boolean.
+ """
+ self.__trusted = trusted
+
+ @staticmethod
+ def __MakeParamList(size):
+ """Returns a comma separated list of sqlite substitution parameters.
+
+ Args:
+ size: Number of parameters in returned list.
+ Returns:
+ A comma separated list of substitution parameters.
+ """
+ return ','.join('?' * size)
+
+ @staticmethod
+ def __GetEntityKind(key):
+ if isinstance(key, entity_pb.EntityProto):
+ key = key.key()
+ return key.path().element_list()[-1].type()
+
+ @staticmethod
+ def __EncodeIndexPB(pb):
+ if isinstance(pb, entity_pb.PropertyValue) and pb.has_uservalue():
+ userval = entity_pb.PropertyValue()
+ userval.mutable_uservalue().set_email(pb.uservalue().email())
+ userval.mutable_uservalue().set_auth_domain(pb.uservalue().auth_domain())
+ userval.mutable_uservalue().set_gaiaid(0)
+ pb = userval
+ encoder = sortable_pb_encoder.Encoder()
+ pb.Output(encoder)
+ return buffer(encoder.buffer().tostring())
+
+ @staticmethod
+ def __AddQueryParam(params, param):
+ params.append(param)
+ return len(params)
+
+ @staticmethod
+ def __CreateFilterString(filter_list, params):
+ """Transforms a filter list into an SQL WHERE clause.
+
+ Args:
+ filter_list: The list of (property, operator, value) filters
+ to transform. A value_type of -1 indicates no value type comparison
+ should be done.
+ params: out: A list of parameters to pass to the query.
+ Returns:
+ An SQL 'where' clause.
+ """
+ clauses = []
+ for prop, operator, value in filter_list:
+ sql_op = _OPERATOR_MAP[operator]
+
+ value_index = DatastoreSqliteStub.__AddQueryParam(params, value)
+ clauses.append('%s %s :%d' % (prop, sql_op, value_index))
+
+ filters = ' AND '.join(clauses)
+ if filters:
+ filters = 'WHERE ' + filters
+ return filters
+
+ @staticmethod
+ def __CreateOrderString(order_list):
+ """Returns an 'ORDER BY' clause from the given list of orders.
+
+ Args:
+ order_list: A list of (field, order) tuples.
+ Returns:
+ An SQL ORDER BY clause.
+ """
+ orders = ', '.join('%s %s' % (x[0], _ORDER_MAP[x[1]]) for x in order_list)
+ if orders:
+ orders = 'ORDER BY ' + orders
+ return orders
+
+ def __ValidateAppId(self, app_id):
+ """Verify that this is the stub for app_id.
+
+ Args:
+ app_id: An application ID.
+
+ Raises:
+ datastore_errors.BadRequestError: if this is not the stub for app_id.
+ """
+ assert app_id
+ if not self.__trusted and app_id != self.__app_id:
+ raise datastore_errors.BadRequestError(
+ 'app %s cannot access app %s\'s data' % (self.__app_id, app_id))
+
+ def __ValidateTransaction(self, tx):
+ """Verify that this transaction exists and is valid.
+
+ Args:
+ tx: datastore_pb.Transaction
+
+ Raises:
+ datastore_errors.BadRequestError: if the tx is valid or doesn't exist.
+ """
+ assert isinstance(tx, datastore_pb.Transaction)
+ self.__ValidateAppId(tx.app())
+ if tx.handle() != self.__current_transaction:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'Transaction %s not found' % tx)
+
+ def __ValidateKey(self, key):
+ """Validate this key.
+
+ Args:
+ key: entity_pb.Reference
+
+ Raises:
+ datastore_errors.BadRequestError: if the key is invalid
+ """
+ assert isinstance(key, entity_pb.Reference)
+
+ self.__ValidateAppId(key.app())
+
+ for elem in key.path().element_list():
+ if elem.has_id() == elem.has_name():
+ raise datastore_errors.BadRequestError(
+ 'each key path element should have id or name but not both: %r'
+ % key)
+
+ def __GetConnection(self, transaction):
+ """Retrieves a connection to the SQLite DB.
+
+ If a transaction is supplied, the transaction's connection is returned;
+ otherwise a fresh connection is returned.
+
+ Args:
+ transaction: A Transaction PB.
+ Returns:
+ An SQLite connection object.
+ """
+ self.__connection_lock.acquire()
+ request_tx = transaction and transaction.handle()
+ if request_tx == 0:
+ request_tx = None
+ if request_tx != self.__current_transaction:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Only one concurrent transaction per thread is permitted.')
+ return self.__connection
+
+ def __ReleaseConnection(self, conn, transaction, rollback=False):
+ """Releases a connection for use by other operations.
+
+ If a transaction is supplied, no action is taken.
+
+ Args:
+ conn: An SQLite connection object.
+ transaction: A Transaction PB.
+ rollback: If True, roll back the database TX instead of committing it.
+ """
+ if not transaction or not transaction.has_handle():
+ if rollback:
+ conn.rollback()
+ else:
+ conn.commit()
+ self.__connection_lock.release()
+
+ def __ConfigureNamespace(self, conn, prefix, app_id, name_space):
+ """Ensures the relevant tables and indexes exist.
+
+ Args:
+ conn: An SQLite database connection.
+ prefix: The namespace prefix to configure.
+ app_id: The app ID.
+ name_space: The per-app namespace name.
+ """
+ format_args = {'app_id': app_id, 'name_space': name_space, 'prefix': prefix}
+ conn.executescript(_NAMESPACE_SCHEMA % format_args)
+ conn.commit()
+
+ def __WriteIndexData(self, conn, app):
+ """Writes index data to disk.
+
+ Args:
+ conn: An SQLite connection.
+ app: The app ID to write indexes for.
+ """
+ indices = datastore_pb.CompositeIndices()
+ for indexes in self.__indexes[app].values():
+ indices.index_list().extend(indexes)
+
+ conn.execute('UPDATE Apps SET indexes = ? WHERE app_id = ?',
+ (app, indices.Encode()))
+
+ def __GetTablePrefix(self, data):
+ """Returns the namespace prefix for a query.
+
+ Args:
+ data: An Entity, Key or Query PB, or an (app_id, ns) tuple.
+ Returns:
+ A valid table prefix
+ """
+ if isinstance(data, entity_pb.EntityProto):
+ data = data.key()
+ if not isinstance(data, tuple):
+ data = (data.app(), data.name_space())
+ prefix = ('%s!%s' % data).replace('"', '""')
+ if data not in self.__namespaces:
+ self.__namespaces.add(data)
+ self.__ConfigureNamespace(self.__connection, prefix, *data)
+ return prefix
+
+ def __DeleteRows(self, conn, paths, table):
+ """Deletes rows from a table.
+
+ Args:
+ conn: An SQLite connection.
+ paths: Paths to delete.
+ table: The table to delete from.
+ Returns:
+ The number of rows deleted.
+ """
+ c = conn.execute('DELETE FROM "%s" WHERE __path__ IN (%s)'
+ % (table, self.__MakeParamList(len(paths))),
+ paths)
+ return c.rowcount
+
+ def __DeleteEntityRows(self, conn, keys, table):
+ """Deletes rows from the specified table that index the keys provided.
+
+ Args:
+ conn: A database connection.
+ keys: A list of keys to delete index entries for.
+ table: The table to delete from.
+ Returns:
+ The number of rows deleted.
+ """
+ keys = sorted((x.app(), x.name_space(), x) for x in keys)
+ for (app_id, ns), group in itertools.groupby(keys, lambda x: x[:2]):
+ path_strings = [self.__EncodeIndexPB(x[2].path()) for x in group]
+ prefix = self.__GetTablePrefix((app_id, ns))
+ return self.__DeleteRows(conn, path_strings, '%s!%s' % (prefix, table))
+
+ def __DeleteIndexEntries(self, conn, keys):
+ """Deletes entities from the index.
+
+ Args:
+ conn: An SQLite connection.
+ keys: A list of keys to delete.
+ """
+ self.__DeleteEntityRows(conn, keys, 'EntitiesByProperty')
+
+ def __InsertEntities(self, conn, entities):
+ """Inserts or updates entities in the DB.
+
+ Args:
+ conn: A database connection.
+ entities: A list of entities to store.
+ """
+
+ def RowGenerator(entities):
+ for unused_prefix, e in entities:
+ yield (self.__EncodeIndexPB(e.key().path()),
+ self.__GetEntityKind(e),
+ buffer(e.Encode()))
+
+ entities = sorted((self.__GetTablePrefix(x), x) for x in entities)
+ for prefix, group in itertools.groupby(entities, lambda x: x[0]):
+ conn.executemany(
+ 'INSERT OR REPLACE INTO "%s!Entities" VALUES (?, ?, ?)' % prefix,
+ RowGenerator(group))
+
+ def __InsertIndexEntries(self, conn, entities):
+ """Inserts index entries for the supplied entities.
+
+ Args:
+ conn: A database connection.
+ entities: A list of entities to create index entries for.
+ """
+
+ def RowGenerator(entities):
+ for unused_prefix, e in entities:
+ for p in e.property_list():
+ yield (self.__GetEntityKind(e),
+ p.name(),
+ self.__EncodeIndexPB(p.value()),
+ self.__EncodeIndexPB(e.key().path()))
+ entities = sorted((self.__GetTablePrefix(x), x) for x in entities)
+ for prefix, group in itertools.groupby(entities, lambda x: x[0]):
+ conn.executemany(
+ 'INSERT INTO "%s!EntitiesByProperty" VALUES (?, ?, ?, ?)' % prefix,
+ RowGenerator(group))
+
+ def __AllocateIds(self, conn, prefix, size):
+ """Allocates IDs.
+
+ Args:
+ conn: An Sqlite connection object.
+ prefix: A table namespace prefix.
+ size: Number of IDs to allocate.
+ Returns:
+ int: The beginning of a range of size IDs
+ """
+ self.__id_lock.acquire()
+ next_id, block_size = self.__id_map.get(prefix, (0, 0))
+ if size >= block_size:
+ block_size = max(1000, size)
+ c = conn.execute(
+ 'UPDATE IdSeq SET next_id = next_id + ? WHERE prefix = ?',
+ (block_size, prefix))
+ assert c.rowcount == 1
+ c = conn.execute('SELECT next_id FROM IdSeq WHERE prefix = ? LIMIT 1',
+ (prefix,))
+ next_id = c.fetchone()[0] - block_size
+
+ ret = next_id
+
+ next_id += size
+ block_size -= size
+ self.__id_map[prefix] = (next_id, block_size)
+ self.__id_lock.release()
+
+ return ret
+
+ def MakeSyncCall(self, service, call, request, response):
+ """The main RPC entry point. service must be 'datastore_v3'."""
+ self.AssertPbIsInitialized(request)
+ try:
+ apiproxy_stub.APIProxyStub.MakeSyncCall(self, service, call, request,
+ response)
+ except sqlite3.OperationalError, e:
+ if e.args[0] == 'database is locked':
+ raise datastore_errors.Timeout('Database is locked.')
+ else:
+ raise
+ self.AssertPbIsInitialized(response)
+
+ def AssertPbIsInitialized(self, pb):
+ """Raises an exception if the given PB is not initialized and valid."""
+ explanation = []
+ assert pb.IsInitialized(explanation), explanation
+ pb.Encode()
+
+ def QueryHistory(self):
+ """Returns a dict that maps Query PBs to times they've been run."""
+ return dict((pb, times) for pb, times in self.__query_history.items() if
+ pb.app() == self.__app_id)
+
+ def __PutEntities(self, conn, entities):
+ self.__DeleteIndexEntries(conn, [e.key() for e in entities])
+ self.__InsertEntities(conn, entities)
+ self.__InsertIndexEntries(conn, entities)
+
+ def __DeleteEntities(self, conn, keys):
+ self.__DeleteIndexEntries(conn, keys)
+ self.__DeleteEntityRows(conn, keys, 'Entities')
+
+ def _Dynamic_Put(self, put_request, put_response):
+ conn = self.__GetConnection(put_request.transaction())
+ try:
+ entities = put_request.entity_list()
+ for entity in entities:
+ self.__ValidateKey(entity.key())
+
+ for prop in itertools.chain(entity.property_list(),
+ entity.raw_property_list()):
+ if prop.value().has_uservalue():
+ uid = md5.new(prop.value().uservalue().email().lower()).digest()
+ uid = '1' + ''.join(['%02d' % ord(x) for x in uid])[:20]
+ prop.mutable_value().mutable_uservalue().set_obfuscated_gaiaid(uid)
+
+ assert entity.has_key()
+ assert entity.key().path().element_size() > 0
+
+ last_path = entity.key().path().element_list()[-1]
+ if last_path.id() == 0 and not last_path.has_name():
+ id_ = self.__AllocateIds(conn, self.__GetTablePrefix(entity.key()), 1)
+ last_path.set_id(id_)
+
+ assert entity.entity_group().element_size() == 0
+ group = entity.mutable_entity_group()
+ root = entity.key().path().element(0)
+ group.add_element().CopyFrom(root)
+
+ else:
+ assert (entity.has_entity_group() and
+ entity.entity_group().element_size() > 0)
+
+ if put_request.transaction().handle():
+ self.__tx_writes[entity.key()] = entity
+ self.__tx_deletes.discard(entity.key())
+
+ if not put_request.transaction().handle():
+ self.__PutEntities(conn, entities)
+ put_response.key_list().extend([e.key() for e in entities])
+ finally:
+ self.__ReleaseConnection(conn, put_request.transaction())
+
+ def _Dynamic_Get(self, get_request, get_response):
+ conn = self.__GetConnection(get_request.transaction())
+ try:
+ for key in get_request.key_list():
+ self.__ValidateAppId(key.app())
+ prefix = self.__GetTablePrefix(key)
+ c = conn.execute(
+ 'SELECT entity FROM "%s!Entities" WHERE __path__ = ?' % (prefix,),
+ (self.__EncodeIndexPB(key.path()),))
+ group = get_response.add_entity()
+ row = c.fetchone()
+ if row:
+ group.mutable_entity().ParseFromString(row[0])
+ finally:
+ self.__ReleaseConnection(conn, get_request.transaction())
+
+ def _Dynamic_Delete(self, delete_request, delete_response):
+ conn = self.__GetConnection(delete_request.transaction())
+ try:
+ for key in delete_request.key_list():
+ self.__ValidateAppId(key.app())
+ if delete_request.transaction().handle():
+ self.__tx_deletes.add(key)
+ self.__tx_writes.pop(key, None)
+
+ if not delete_request.transaction().handle():
+ self.__DeleteEntities(conn, delete_request.key_list())
+ finally:
+ self.__ReleaseConnection(conn, delete_request.transaction())
+
+ def __GenerateFilterInfo(self, filters, query):
+ """Transform a list of filters into a more usable form.
+
+ Args:
+ filters: A list of filter PBs.
+ query: The query to generate filter info for.
+ Returns:
+ A dict mapping property names to lists of (op, value) tuples.
+ """
+ filter_info = {}
+ for filt in filters:
+ assert filt.property_size() == 1
+ prop = filt.property(0)
+ value = prop.value()
+ if prop.name() == '__key__':
+ value = ReferencePropertyToReference(value.referencevalue())
+ assert value.app() == query.app()
+ assert value.name_space() == query.name_space()
+ value = value.path()
+ filter_info.setdefault(prop.name(), []).append(
+ (filt.op(), self.__EncodeIndexPB(value)))
+ return filter_info
+
+ def __GenerateOrderInfo(self, orders):
+ """Transform a list of orders into a more usable form.
+
+ Args:
+ orders: A list of order PBs.
+ Returns:
+ A list of (property, direction) tuples.
+ """
+ orders = [(order.property(), order.direction()) for order in orders]
+ if orders and orders[-1] == ('__key__', datastore_pb.Query_Order.ASCENDING):
+ orders.pop()
+ return orders
+
+ def __GetPrefixRange(self, prefix):
+ """Returns a (min, max) range that encompasses the given prefix.
+
+ Args:
+ prefix: A string prefix to filter for. Must be a PB encodable using
+ __EncodeIndexPB.
+ Returns:
+ (min, max): Start and end string values to filter on.
+ """
+ ancestor_min = self.__EncodeIndexPB(prefix)
+ ancestor_max = buffer(str(ancestor_min) + '\xfb\xff\xff\xff\x89')
+ return ancestor_min, ancestor_max
+
+ def __KindQuery(self, query, filter_info, order_info):
+ """Performs kind only, kind and ancestor, and ancestor only queries."""
+ if not (set(filter_info.keys()) |
+ set(x[0] for x in order_info)).issubset(['__key__']):
+ return None
+ if len(order_info) > 1:
+ return None
+
+ filters = []
+ filters.extend(('__path__', op, value) for op, value
+ in filter_info.get('__key__', []))
+ if query.has_kind():
+ filters.append(('kind', datastore_pb.Query_Filter.EQUAL, query.kind()))
+ if query.has_ancestor():
+ amin, amax = self.__GetPrefixRange(query.ancestor().path())
+ filters.append(('__path__',
+ datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL, amin))
+ filters.append(('__path__', datastore_pb.Query_Filter.LESS_THAN, amax))
+
+ if order_info:
+ orders = [('__path__', order_info[0][1])]
+ else:
+ orders = [('__path__', datastore_pb.Query_Order.ASCENDING)]
+
+ params = []
+ query = ('SELECT Entities.__path__, Entities.entity, %s '
+ 'FROM "%s!Entities" AS Entities %s %s' % (
+ ','.join(x[0] for x in orders),
+ self.__GetTablePrefix(query),
+ self.__CreateFilterString(filters, params),
+ self.__CreateOrderString(orders)))
+ return query, params
+
+ def __SinglePropertyQuery(self, query, filter_info, order_info):
+ """Performs queries satisfiable by the EntitiesByProperty table."""
+ property_names = set(filter_info.keys())
+ property_names.update(x[0] for x in order_info)
+ property_names.discard('__key__')
+ if len(property_names) != 1:
+ return None
+
+ property_name = property_names.pop()
+ filter_ops = filter_info.get(property_name, [])
+
+ if len([1 for o, _ in filter_ops
+ if o == datastore_pb.Query_Filter.EQUAL]) > 1:
+ return None
+
+ if len(order_info) > 1 or (order_info and order_info[0][0] == '__key__'):
+ return None
+
+ if query.has_ancestor():
+ return None
+
+ if not query.has_kind():
+ return None
+
+ prefix = self.__GetTablePrefix(query)
+ filters = []
+ filters.append(('EntitiesByProperty.kind',
+ datastore_pb.Query_Filter.EQUAL, query.kind()))
+ filters.append(('name', datastore_pb.Query_Filter.EQUAL, property_name))
+ for op, value in filter_ops:
+ if property_name == '__key__':
+ filters.append(('EntitiesByProperty.__path__', op, value))
+ else:
+ filters.append(('value', op, value))
+
+ orders = [('EntitiesByProperty.kind', datastore_pb.Query_Order.ASCENDING),
+ ('name', datastore_pb.Query_Order.ASCENDING)]
+ if order_info:
+ orders.append(('value', order_info[0][1]))
+ else:
+ orders.append(('value', datastore_pb.Query_Order.ASCENDING))
+ orders.append(('EntitiesByProperty.__path__',
+ datastore_pb.Query_Order.ASCENDING))
+
+ params = []
+ format_args = (
+ ','.join(x[0] for x in orders[2:]),
+ prefix,
+ prefix,
+ self.__CreateFilterString(filters, params),
+ self.__CreateOrderString(orders))
+ query = ('SELECT Entities.__path__, Entities.entity, %s '
+ 'FROM "%s!EntitiesByProperty" AS EntitiesByProperty INNER JOIN '
+ '"%s!Entities" AS Entities USING (__path__) %s %s' % format_args)
+ return query, params
+
+ def __StarSchemaQueryPlan(self, query, filter_info, order_info):
+ """Executes a query using a 'star schema' based on EntitiesByProperty.
+
+ A 'star schema' is a join between an objects table (Entities) and multiple
+ instances of a facts table (EntitiesByProperty). Ideally, this will result
+ in a merge join if the only filters are inequalities and the sort orders
+ match those in the index for the facts table; otherwise, the DB will do its
+ best to satisfy the query efficiently.
+
+ Args:
+ query: The datastore_pb.Query PB.
+ filter_info: A dict mapping properties filtered on to (op, value) tuples.
+ order_info: A list of (property, direction) tuples.
+ Returns:
+ (query, params): An SQL query string and list of parameters for it.
+ """
+ filter_sets = []
+ for name, filter_ops in filter_info.items():
+ filter_sets.extend((name, [x]) for x in filter_ops
+ if x[0] == datastore_pb.Query_Filter.EQUAL)
+ ineq_ops = [x for x in filter_ops
+ if x[0] != datastore_pb.Query_Filter.EQUAL]
+ if ineq_ops:
+ filter_sets.append((name, ineq_ops))
+
+ for prop, _ in order_info:
+ if prop == '__key__':
+ continue
+ if prop not in filter_info:
+ filter_sets.append((prop, []))
+
+ prefix = self.__GetTablePrefix(query)
+
+ joins = []
+ filters = []
+ join_name_map = {}
+ for name, filter_ops in filter_sets:
+ join_name = 'ebp_%d' % (len(joins),)
+ join_name_map.setdefault(name, join_name)
+ joins.append(
+ 'INNER JOIN "%s!EntitiesByProperty" AS %s '
+ 'ON Entities.__path__ = %s.__path__'
+ % (prefix, join_name, join_name))
+ filters.append(('%s.kind' % join_name, datastore_pb.Query_Filter.EQUAL,
+ query.kind()))
+ filters.append(('%s.name' % join_name, datastore_pb.Query_Filter.EQUAL,
+ name))
+ for op, value in filter_ops:
+ filters.append(('%s.value' % join_name, op, buffer(value)))
+ if query.has_ancestor():
+ amin, amax = self.__GetPrefixRange(query.ancestor().path())
+ filters.append(('%s.__path__' % join_name,
+ datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL, amin))
+ filters.append(('%s.__path__' % join_name,
+ datastore_pb.Query_Filter.LESS_THAN, amax))
+
+ orders = []
+ for prop, order in order_info:
+ if prop == '__key__':
+ orders.append(('Entities.__path__', order))
+ else:
+ prop = '%s.value' % (join_name_map[prop],)
+ orders.append((prop, order))
+ if not order_info or order_info[-1][0] != '__key__':
+ orders.append(('Entities.__path__', datastore_pb.Query_Order.ASCENDING))
+
+ params = []
+ format_args = (
+ ','.join(x[0] for x in orders),
+ prefix,
+ ' '.join(joins),
+ self.__CreateFilterString(filters, params),
+ self.__CreateOrderString(orders))
+ query = ('SELECT Entities.__path__, Entities.entity, %s '
+ 'FROM "%s!Entities" AS Entities %s %s %s' % format_args)
+ return query, params
+
+ def __MergeJoinQuery(self, query, filter_info, order_info):
+ if order_info:
+ return None
+ if query.has_ancestor():
+ return None
+ if not query.has_kind():
+ return None
+ for filter_ops in filter_info.values():
+ for op, _ in filter_ops:
+ if op != datastore_pb.Query_Filter.EQUAL:
+ return None
+
+ return self.__StarSchemaQueryPlan(query, filter_info, order_info)
+
+ def __LastResortQuery(self, query, filter_info, order_info):
+ """Last resort query plan that executes queries requring composite indexes.
+
+ Args:
+ query: The datastore_pb.Query PB.
+ filter_info: A dict mapping properties filtered on to (op, value) tuples.
+ order_info: A list of (property, direction) tuples.
+ Returns:
+ (query, params): An SQL query string and list of parameters for it.
+ """
+ if self.__require_indexes:
+ index = self.__FindIndexForQuery(query)
+ if not index:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.NEED_INDEX,
+ 'This query requires a composite index that is not defined. '
+ 'You must update the index.yaml file in your application root.')
+ return self.__StarSchemaQueryPlan(query, filter_info, order_info)
+
+ def __FindIndexForQuery(self, query):
+ """Finds an index that can be used to satisfy the provided query.
+
+ Args:
+ query: A datastore_pb.Query PB.
+ Returns:
+ An entity_pb.CompositeIndex PB, if a suitable index exists; otherwise None
+ """
+ unused_required, kind, ancestor, props, num_eq_filters = (
+ datastore_index.CompositeIndexForQuery(query))
+ required_key = (kind, ancestor, props)
+ indexes = self.__indexes.get(query.app(), {}).get(kind, [])
+
+ eq_filters_set = set(props[:num_eq_filters])
+ remaining_filters = props[num_eq_filters:]
+ for index in indexes:
+ definition = datastore_index.ProtoToIndexDefinition(index)
+ index_key = datastore_index.IndexToKey(definition)
+ if required_key == index_key:
+ return index
+ if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
+ this_props = index_key[2]
+ this_eq_filters_set = set(this_props[:num_eq_filters])
+ this_remaining_filters = this_props[num_eq_filters:]
+ if (eq_filters_set == this_eq_filters_set and
+ remaining_filters == this_remaining_filters):
+ return index
+
+ _QUERY_STRATEGIES = [
+ __KindQuery,
+ __SinglePropertyQuery,
+ __MergeJoinQuery,
+ __LastResortQuery,
+ ]
+
+ def __GetQueryCursor(self, conn, query):
+ """Returns an SQLite query cursor for the provided query.
+
+ Args:
+ conn: The SQLite connection.
+ query: A datastore_pb.Query protocol buffer.
+ Returns:
+ A QueryCursor object.
+ """
+ if query.has_transaction() and not query.has_ancestor():
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Only ancestor queries are allowed inside transactions.')
+
+ num_components = len(query.filter_list()) + len(query.order_list())
+ if query.has_ancestor():
+ num_components += 1
+ if num_components > _MAX_QUERY_COMPONENTS:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ ('query is too large. may not have more than %s filters'
+ ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))
+
+ app_id = query.app()
+ self.__ValidateAppId(app_id)
+
+ filters, orders = datastore_index.Normalize(query.filter_list(),
+ query.order_list())
+
+ filter_info = self.__GenerateFilterInfo(filters, query)
+ order_info = self.__GenerateOrderInfo(orders)
+
+ for strategy in DatastoreSqliteStub._QUERY_STRATEGIES:
+ result = strategy(self, query, filter_info, order_info)
+ if result:
+ break
+ else:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'No strategy found to satisfy query.')
+
+ sql_stmt, params = result
+
+ if self.__verbose:
+ logging.info("Executing statement '%s' with arguments %r",
+ sql_stmt, [str(x) for x in params])
+ db_cursor = conn.execute(sql_stmt, params)
+ cursor = QueryCursor(query, db_cursor)
+ if query.has_compiled_cursor() and query.compiled_cursor().position_size():
+ cursor.ResumeFromCompiledCursor(query.compiled_cursor())
+ if query.has_offset():
+ cursor.Skip(query.offset())
+
+ clone = datastore_pb.Query()
+ clone.CopyFrom(query)
+ clone.clear_hint()
+ clone.clear_limit()
+ clone.clear_count()
+ clone.clear_offset()
+ self.__query_history[clone] = self.__query_history.get(clone, 0) + 1
+
+ return cursor
+
+ def _Dynamic_RunQuery(self, query, query_result):
+ conn = self.__GetConnection(query.transaction())
+ try:
+ cursor = self.__GetQueryCursor(conn, query)
+
+ self.__cursor_lock.acquire()
+ cursor_id = self.__next_cursor_id
+ self.__next_cursor_id += 1
+ self.__cursor_lock.release()
+
+ cursor_pb = query_result.mutable_cursor()
+ cursor_pb.set_app(query.app())
+ cursor_pb.set_cursor(cursor_id)
+
+ if query.has_count():
+ count = query.count()
+ elif query.has_limit():
+ count = query.limit()
+ else:
+ count = _BATCH_SIZE
+
+ cursor.PopulateQueryResult(count, query_result)
+ self.__cursors[cursor_pb] = cursor
+ finally:
+ self.__ReleaseConnection(conn, query.transaction())
+
+ def _Dynamic_Next(self, next_request, query_result):
+ self.__ValidateAppId(next_request.cursor().app())
+
+ try:
+ cursor = self.__cursors[next_request.cursor()]
+ except KeyError:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Cursor %d not found' % next_request.cursor().cursor())
+
+ assert cursor.app == next_request.cursor().app()
+
+ count = _BATCH_SIZE
+ if next_request.has_count():
+ count = next_request.count()
+ cursor.PopulateQueryResult(count, query_result)
+
+ def _Dynamic_Count(self, query, integer64proto):
+ if query.has_limit():
+ query.set_limit(min(query.limit(), _MAXIMUM_RESULTS))
+ else:
+ query.set_limit(_MAXIMUM_RESULTS)
+
+ conn = self.__GetConnection(query.transaction())
+ try:
+ cursor = self.__GetQueryCursor(conn, query)
+ integer64proto.set_value(cursor.Count())
+ finally:
+ self.__ReleaseConnection(conn, query.transaction())
+
+ def _Dynamic_BeginTransaction(self, request, transaction):
+ self.__ValidateAppId(request.app())
+
+ self.__connection_lock.acquire()
+ assert self.__current_transaction is None
+ handle = self.__next_tx_handle
+ self.__next_tx_handle += 1
+
+ transaction.set_app(request.app())
+ transaction.set_handle(handle)
+ self.__current_transaction = handle
+
+ def _Dynamic_AddActions(self, request, _):
+
+ if ((len(self.__tx_actions) + request.add_request_size()) >
+ _MAX_ACTIONS_PER_TXN):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Too many messages, maximum allowed %s' % _MAX_ACTIONS_PER_TXN)
+
+ new_actions = []
+ for add_request in request.add_request_list():
+ self.__ValidateTransaction(add_request.transaction())
+ clone = taskqueue_service_pb.TaskQueueAddRequest()
+ clone.CopyFrom(add_request)
+ clone.clear_transaction()
+ new_actions.append(clone)
+
+ self.__tx_actions.extend(new_actions)
+
+ def _Dynamic_Commit(self, transaction, _):
+ assert self.__current_transaction == transaction.handle()
+ conn = self.__connection
+
+ try:
+ self.__PutEntities(conn, self.__tx_writes.values())
+ self.__DeleteEntities(conn, self.__tx_deletes)
+
+ for action in self.__tx_actions:
+ try:
+ apiproxy_stub_map.MakeSyncCall(
+ 'taskqueue', 'Add', action, api_base_pb.VoidProto())
+ except apiproxy_errors.ApplicationError, e:
+ logging.warning('Transactional task %s has been dropped, %s',
+ action, e)
+ finally:
+ self.__current_transaction = None
+ self.__tx_actions = []
+ self.__tx_writes = {}
+ self.__tx_deletes = set()
+ self.__ReleaseConnection(conn, None)
+
+ def _Dynamic_Rollback(self, transaction, _):
+ conn = self.__GetConnection(transaction)
+ self.__current_transaction = None
+ self.__tx_actions = []
+ self.__tx_writes = {}
+ self.__tx_deletes = set()
+ self.__ReleaseConnection(conn, None, True)
+
+ def _Dynamic_GetSchema(self, req, schema):
+ conn = self.__GetConnection(None)
+ try:
+ prefix = self.__GetTablePrefix(req)
+
+ filters = []
+ if req.has_start_kind():
+ filters.append(('kind', datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
+ req.start_kind()))
+ if req.has_end_kind():
+ filters.append(('kind', datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
+ req.end_kind()))
+
+ params = []
+ if req.properties():
+ sql_stmt = ('SELECT kind, name, value FROM "%s!EntitiesByProperty" %s '
+ 'GROUP BY kind, name, substr(value, 1, 1) ORDER BY kind'
+ % (prefix, self.__CreateFilterString(filters, params)))
+ else:
+ sql_stmt = ('SELECT kind FROM "%s!Entities" %s GROUP BY kind'
+ % (prefix, self.__CreateFilterString(filters, params)))
+ c = conn.execute(sql_stmt, params)
+
+ kind = None
+ current_name = None
+ kind_pb = None
+ for row in c.fetchall():
+ if row[0] != kind:
+ if kind_pb:
+ schema.kind_list().append(kind_pb)
+ kind = row[0].encode('utf-8')
+ kind_pb = entity_pb.EntityProto()
+ kind_pb.mutable_key().set_app('')
+ kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
+ kind_pb.mutable_entity_group()
+
+ if req.properties():
+ name, value_data = row[1:]
+ if current_name != name:
+ current_name = name
+ prop_pb = kind_pb.add_property()
+ prop_pb.set_name(name.encode('utf-8'))
+ prop_pb.set_multiple(False)
+
+ value_decoder = sortable_pb_encoder.Decoder(
+ array.array('B', str(value_data)))
+ value_pb = prop_pb.mutable_value()
+ value_pb.Merge(value_decoder)
+
+ if value_pb.has_int64value():
+ value_pb.set_int64value(0)
+ if value_pb.has_booleanvalue():
+ value_pb.set_booleanvalue(False)
+ if value_pb.has_stringvalue():
+ value_pb.set_stringvalue('none')
+ if value_pb.has_doublevalue():
+ value_pb.set_doublevalue(0.0)
+ if value_pb.has_pointvalue():
+ value_pb.mutable_pointvalue().set_x(0.0)
+ value_pb.mutable_pointvalue().set_y(0.0)
+ if value_pb.has_uservalue():
+ value_pb.mutable_uservalue().set_gaiaid(0)
+ value_pb.mutable_uservalue().set_email('none')
+ value_pb.mutable_uservalue().set_auth_domain('none')
+ value_pb.mutable_uservalue().clear_nickname()
+ value_pb.mutable_uservalue().clear_obfuscated_gaiaid()
+ if value_pb.has_referencevalue():
+ value_pb.clear_referencevalue()
+ value_pb.mutable_referencevalue().set_app('none')
+ pathelem = value_pb.mutable_referencevalue().add_pathelement()
+ pathelem.set_type('none')
+ pathelem.set_name('none')
+
+ if kind_pb:
+ schema.kind_list().append(kind_pb)
+ finally:
+ self.__ReleaseConnection(conn, None)
+
+ def _Dynamic_AllocateIds(self, allocate_ids_request, allocate_ids_response):
+ conn = self.__GetConnection(None)
+
+ model_key = allocate_ids_request.model_key()
+ size = allocate_ids_request.size()
+
+ self.__ValidateAppId(model_key.app())
+
+ first_id = self.__AllocateIds(conn, self.__GetTablePrefix(model_key), size)
+ allocate_ids_response.set_start(first_id)
+ allocate_ids_response.set_end(first_id + size - 1)
+
+ self.__ReleaseConnection(conn, None)
+
+ def __FindIndex(self, index):
+ """Finds an existing index by definition.
+
+ Args:
+ index: entity_pb.CompositeIndex
+
+ Returns:
+ entity_pb.CompositeIndex, if it exists; otherwise None
+ """
+ app_indexes = self.__indexes.get(index.app_id(), {})
+ for stored_index in app_indexes.get(index.definition().entity_type(), []):
+ if index.definition() == stored_index.definition():
+ return stored_index
+
+ return None
+
+ def _Dynamic_CreateIndex(self, index, id_response):
+ app_id = index.app_id()
+ kind = index.definition().entity_type()
+
+ self.__ValidateAppId(app_id)
+ if index.id() != 0:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'New index id must be 0.')
+
+ self.__index_lock.acquire()
+ try:
+ if self.__FindIndex(index):
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'Index already exists.')
+
+ next_id = max([idx.id() for x in self.__indexes.get(app_id, {}).values()
+ for idx in x] + [0]) + 1
+ index.set_id(next_id)
+ id_response.set_value(next_id)
+
+ clone = entity_pb.CompositeIndex()
+ clone.CopyFrom(index)
+ self.__indexes.setdefault(app_id, {}).setdefault(kind, []).append(clone)
+
+ conn = self.__GetConnection(None)
+ try:
+ self.__WriteIndexData(conn, app_id)
+ finally:
+ self.__ReleaseConnection(conn, None)
+ finally:
+ self.__index_lock.release()
+
+ def _Dynamic_GetIndices(self, app_str, composite_indices):
+ self.__ValidateAppId(app_str.value())
+
+ index_list = composite_indices.index_list()
+ for indexes in self.__indexes.get(app_str.value(), {}).values():
+ index_list.extend(indexes)
+
+ def _Dynamic_UpdateIndex(self, index, _):
+ self.__ValidateAppId(index.app_id())
+ my_index = self.__FindIndex(index)
+ if not my_index:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ "Index doesn't exist.")
+ elif (index.state() != my_index.state() and
+ index.state() not in self._INDEX_STATE_TRANSITIONS[my_index.state()]):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Cannot move index state from %s to %s' %
+ (entity_pb.CompositeIndex.State_Name(my_index.state()),
+ (entity_pb.CompositeIndex.State_Name(index.state()))))
+
+ self.__index_lock.acquire()
+ try:
+ my_index.set_state(index.state())
+ finally:
+ self.__index_lock.release()
+
+ def _Dynamic_DeleteIndex(self, index, _):
+ app_id = index.app_id()
+ kind = index.definition().entity_type()
+ self.__ValidateAppId(app_id)
+
+ my_index = self.__FindIndex(index)
+ if not my_index:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ "Index doesn't exist.")
+
+ conn = self.__GetConnection(None)
+ try:
+ self.__WriteIndexData(conn, app_id)
+ finally:
+ self.__ReleaseConnection(conn, None)
+ self.__index_lock.acquire()
+ try:
+ self.__indexes[app_id][kind].remove(my_index)
+ finally:
+ self.__index_lock.release()
diff --git a/google-appengine/google/appengine/datastore/entity_pb.py b/google-appengine/google/appengine/datastore/entity_pb.py
index b30563f..b30563f 100644..100755
--- a/google-appengine/google/appengine/datastore/entity_pb.py
+++ b/google-appengine/google/appengine/datastore/entity_pb.py
diff --git a/google-appengine/google/appengine/datastore/sortable_pb_encoder.py b/google-appengine/google/appengine/datastore/sortable_pb_encoder.py
new file mode 100644
index 0000000..e1d4e65
--- /dev/null
+++ b/google-appengine/google/appengine/datastore/sortable_pb_encoder.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""An Encoder class for Protocol Buffers that preserves sorting characteristics.
+
+This is used by datastore_sqlite_stub in order to index entities in a fashion
+that preserves the datastore's sorting semantics. Broadly, there are four
+changes from regular PB encoding:
+
+ - Strings are escaped and null terminated instead of length-prefixed. The
+ escaping replaces \0 with \1\1 and \1 with \1\2, thus preserving the ordering
+ of the original string.
+ - Variable length integers are encoded using a variable length encoding that
+ preserves order. The first byte stores the absolute value if it's between
+ -119 to 119, otherwise it stores the number of bytes that follow.
+ - Numbers are stored big endian instead of little endian.
+ - Negative doubles are entirely negated, while positive doubles have their sign
+ bit flipped.
+
+Warning:
+ Due to the way nested Protocol Buffers are encoded, this encoder will NOT
+ preserve sorting characteristics for embedded protocol buffers!
+"""
+
+
+
+
+
+
+
+import array
+import struct
+
+from google.net.proto import ProtocolBuffer
+
+
+_MAX_UNSIGNED_BYTE = 255
+
+_MAX_LONG_BYTES = 8
+
+_MAX_INLINE = (_MAX_UNSIGNED_BYTE - (2 * _MAX_LONG_BYTES)) / 2
+_MIN_INLINE = -_MAX_INLINE
+_OFFSET = 1 + 8
+_POS_OFFSET = _OFFSET + _MAX_INLINE * 2
+
+
+class Encoder(ProtocolBuffer.Encoder):
+ """Encodes Protocol Buffers in a form that sorts nicely."""
+
+ def put16(self, value):
+ if value < 0 or value >= (1<<16):
+ raise ProtocolBuffer.ProtocolBufferEncodeError, 'u16 too big'
+ self.buf.append((value >> 8) & 0xff)
+ self.buf.append((value >> 0) & 0xff)
+ return
+
+ def put32(self, value):
+ if value < 0 or value >= (1L<<32):
+ raise ProtocolBuffer.ProtocolBufferEncodeError, 'u32 too big'
+ self.buf.append((value >> 24) & 0xff)
+ self.buf.append((value >> 16) & 0xff)
+ self.buf.append((value >> 8) & 0xff)
+ self.buf.append((value >> 0) & 0xff)
+ return
+
+ def put64(self, value):
+ if value < 0 or value >= (1L<<64):
+ raise ProtocolBuffer.ProtocolBufferEncodeError, 'u64 too big'
+ self.buf.append((value >> 56) & 0xff)
+ self.buf.append((value >> 48) & 0xff)
+ self.buf.append((value >> 40) & 0xff)
+ self.buf.append((value >> 32) & 0xff)
+ self.buf.append((value >> 24) & 0xff)
+ self.buf.append((value >> 16) & 0xff)
+ self.buf.append((value >> 8) & 0xff)
+ self.buf.append((value >> 0) & 0xff)
+ return
+
+ def _PutVarInt(self, value):
+ if value is None:
+ self.buf.append(0)
+ return
+
+ if value >= _MIN_INLINE and value <= _MAX_INLINE:
+ value = _OFFSET + (value - _MIN_INLINE)
+ self.buf.append(value & 0xff)
+ return
+
+ negative = False
+
+ if value < 0:
+ value = _MIN_INLINE - value
+ negative = True
+ else:
+ value = value - _MAX_INLINE
+
+ len = 0
+ w = value
+ while w > 0:
+ w >>= 8
+ len += 1
+
+ if negative:
+ head = _OFFSET - len
+ else:
+ head = _POS_OFFSET + len
+ self.buf.append(head & 0xff)
+
+ for i in range(len - 1, -1, -1):
+ b = value >> (i * 8)
+ if negative:
+ b = _MAX_UNSIGNED_BYTE - (b & 0xff)
+ self.buf.append(b & 0xff)
+
+ def putVarInt32(self, value):
+ if value >= 0x80000000 or value < -0x80000000:
+ raise ProtocolBuffer.ProtocolBufferEncodeError, 'int32 too big'
+ self._PutVarInt(value)
+
+ def putVarInt64(self, value):
+ if value >= 0x8000000000000000 or value < -0x8000000000000000:
+ raise ProtocolBuffer.ProtocolBufferEncodeError, 'int64 too big'
+ self._PutVarInt(value)
+
+ def putVarUint64(self, value):
+ if value < 0 or value >= 0x10000000000000000:
+ raise ProtocolBuffer.ProtocolBufferEncodeError, 'uint64 too big'
+ self._PutVarInt(value)
+
+ def putFloat(self, value):
+ encoded = array.array('B')
+ encoded.fromstring(struct.pack('>f', value))
+ if value < 0:
+ encoded[0] ^= 0xFF
+ encoded[1] ^= 0xFF
+ encoded[2] ^= 0xFF
+ encoded[3] ^= 0xFF
+ else:
+ encoded[0] ^= 0x80
+ self.buf.extend(encoded)
+
+ def putDouble(self, value):
+ encoded = array.array('B')
+ encoded.fromstring(struct.pack('>d', value))
+ if value < 0:
+ encoded[0] ^= 0xFF
+ encoded[1] ^= 0xFF
+ encoded[2] ^= 0xFF
+ encoded[3] ^= 0xFF
+ encoded[4] ^= 0xFF
+ encoded[5] ^= 0xFF
+ encoded[6] ^= 0xFF
+ encoded[7] ^= 0xFF
+ else:
+ encoded[0] ^= 0x80
+ self.buf.extend(encoded)
+
+ def putPrefixedString(self, value):
+ self.buf.fromstring(value.replace('\1', '\1\2').replace('\0', '\1\1') + '\0')
+
+
+class Decoder(ProtocolBuffer.Decoder):
+ def __init__(self, buf, idx=0, limit=None):
+ if not limit:
+ limit = len(buf)
+ ProtocolBuffer.Decoder.__init__(self, buf, idx, limit)
+
+ def get16(self):
+ if self.idx + 2 > self.limit:
+ raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated'
+ c = self.buf[self.idx]
+ d = self.buf[self.idx + 1]
+ self.idx += 2
+ return (c << 8) | d
+
+ def get32(self):
+ if self.idx + 4 > self.limit:
+ raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated'
+ c = long(self.buf[self.idx])
+ d = self.buf[self.idx + 1]
+ e = self.buf[self.idx + 2]
+ f = self.buf[self.idx + 3]
+ self.idx += 4
+ return (c << 24) | (d << 16) | (e << 8) | f
+
+ def get64(self):
+ if self.idx + 8 > self.limit:
+ raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated'
+ c = long(self.buf[self.idx])
+ d = long(self.buf[self.idx + 1])
+ e = long(self.buf[self.idx + 2])
+ f = long(self.buf[self.idx + 3])
+ g = long(self.buf[self.idx + 4])
+ h = self.buf[self.idx + 5]
+ i = self.buf[self.idx + 6]
+ j = self.buf[self.idx + 7]
+ self.idx += 8
+ return ((c << 56) | (d << 48) | (e << 40) | (f << 32) | (g << 24)
+ | (h << 16) | (i << 8) | j)
+
+ def getVarInt64(self):
+ b = self.get8()
+ if b >= _OFFSET and b <= _POS_OFFSET:
+ return b - _OFFSET + _MIN_INLINE
+ if b == 0:
+ return None
+
+ if b < _OFFSET:
+ negative = True
+ bytes = _OFFSET - b
+ else:
+ negative = False
+ bytes = b - _POS_OFFSET
+
+ ret = 0
+ for i in range(bytes):
+ b = self.get8()
+ if negative:
+ b = _MAX_UNSIGNED_BYTE - b
+ ret = ret << 8 | b
+
+ if negative:
+ return _MIN_INLINE - ret
+ else:
+ return ret + _MAX_INLINE
+
+ def getVarInt32(self):
+ result = self.getVarInt64()
+ if result >= 0x80000000L or result < -0x80000000L:
+ raise ProtocolBuffer.ProtocolBufferDecodeError, 'corrupted'
+ return result
+
+ def getVarUint64(self):
+ result = self.getVarInt64()
+ if result < 0:
+ raise ProtocolBuffer.ProtocolBufferDecodeError, 'corrupted'
+ return result
+
+ def getFloat(self):
+ if self.idx + 4 > self.limit:
+ raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated'
+ a = self.buf[self.idx:self.idx+4]
+ self.idx += 4
+ if a[0] & 0x80:
+ a[0] ^= 0x80
+ else:
+ a = [x ^ 0xFF for x in a]
+ return struct.unpack('>f', array.array('B', a).tostring())[0]
+
+ def getDouble(self):
+ if self.idx + 8 > self.limit:
+ raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated'
+ a = self.buf[self.idx:self.idx+8]
+ self.idx += 8
+ if a[0] & 0x80:
+ a[0] ^= 0x80
+ else:
+ a = [x ^ 0xFF for x in a]
+ return struct.unpack('>d', array.array('B', a).tostring())[0]
+
+ def getPrefixedString(self):
+ end_idx = self.idx
+ while self.buf[end_idx] != 0:
+ end_idx += 1
+
+ data = array.array('B', self.buf[self.idx:end_idx]).tostring()
+ self.idx = end_idx + 1
+ return data.replace('\1\1', '\0').replace('\1\2', '\1')
diff --git a/google-appengine/google/appengine/dist/py_imp.py b/google-appengine/google/appengine/dist/py_imp.py
index a6a0f38..cb097bc 100755
--- a/google-appengine/google/appengine/dist/py_imp.py
+++ b/google-appengine/google/appengine/dist/py_imp.py
@@ -28,7 +28,7 @@ PKG_DIRECTORY, C_BUILTIN, PY_FROZEN = 5, 6, 7
def get_magic():
"""Return the magic string used to recognize byte-compiled code files."""
- return '\0\0\0\0'
+ return '\xb3\xf2\r\n'
_PY_SOURCE_SUFFIX = ('.py', 'U', PY_SOURCE)
diff --git a/google-appengine/google/appengine/ext/admin/__init__.py b/google-appengine/google/appengine/ext/admin/__init__.py
index f69173e..ab9a5dd 100755
--- a/google-appengine/google/appengine/ext/admin/__init__.py
+++ b/google-appengine/google/appengine/ext/admin/__init__.py
@@ -64,6 +64,14 @@ from google.appengine.ext.webapp import template
_DEBUG = True
+def ustr(value):
+ """Like str(), but UTF-8-encodes Unicode instead of failing."""
+ try:
+ return str(value)
+ except UnicodeError:
+ return unicode(value).encode('UTF-8')
+
+
class ImageHandler(webapp.RequestHandler):
"""Serves a static image.
@@ -578,9 +586,12 @@ class DatastoreRequestHandler(BaseRequestHandler):
set of results and 0 for the entity count.
"""
kind = self.request.get('kind')
+ namespace = self.request.get('namespace')
+ if not namespace:
+ namespace = None
if not kind:
return ([], 0)
- query = datastore.Query(kind)
+ query = datastore.Query(kind, _namespace=namespace)
order = self.request.get('order')
order_type = self.request.get('order_type')
@@ -724,6 +735,7 @@ class DatastoreQueryHandler(DatastoreRequestHandler):
'message': self.request.get('msg'),
'pages': pages,
'current_page': current_page,
+ 'namespace': self.request.get('namespace'),
'num': num,
'next_start': -1,
'prev_start': -1,
@@ -846,6 +858,7 @@ class DatastoreEditHandler(DatastoreRequestHandler):
'key_id': entity_key_id,
'fields': fields,
'focus': self.request.get('focus'),
+ 'namespace': self.request.get('namespace'),
'next': self.request.get('next'),
'parent_key': parent_key,
'parent_kind': parent_kind,
@@ -862,7 +875,10 @@ class DatastoreEditHandler(DatastoreRequestHandler):
return
entity = datastore.Get(datastore.Key(entity_key))
else:
- entity = datastore.Entity(kind)
+ namespace = self.request.get('namespace')
+ if not namespace:
+ namespace = None
+ entity = datastore.Entity(kind, _namespace=namespace)
args = self.request.arguments()
for arg in args:
@@ -874,7 +890,7 @@ class DatastoreEditHandler(DatastoreRequestHandler):
data_type = DataType.get_by_name(data_type_name)
if entity and entity.has_key(field_name):
old_formatted_value = data_type.format(entity[field_name])
- if old_formatted_value == form_value:
+ if old_formatted_value == ustr(form_value):
continue
if len(form_value) > 0:
@@ -912,7 +928,7 @@ class DataType(object):
return _NAMED_DATA_TYPES[name]
def format(self, value):
- return str(value)
+ return ustr(value)
def short_format(self, value):
return self.format(value)
@@ -922,7 +938,8 @@ class DataType(object):
string_value = self.format(value)
else:
string_value = ''
- return '<input class="%s" name="%s" type="text" size="%d" value="%s"/>' % (cgi.escape(self.name()), cgi.escape(name), self.input_field_size(),
+ return '<input class="%s" name="%s" type="text" size="%d" value="%s"/>' % (cgi.escape(ustr(self.name())), cgi.escape(ustr(name)),
+ self.input_field_size(),
cgi.escape(string_value, True))
def input_field_size(self):
@@ -934,11 +951,11 @@ class DataType(object):
class StringType(DataType):
def format(self, value):
- return value
+ return ustr(value)
def input_field(self, name, value, sample_values):
- value = str(value)
- sample_values = [str(s) for s in sample_values]
+ value = ustr(value)
+ sample_values = [ustr(s) for s in sample_values]
multiline = False
if value:
multiline = len(value) > 255 or value.find('\n') >= 0
@@ -973,7 +990,7 @@ class TextType(StringType):
return 'Text'
def input_field(self, name, value, sample_values):
- return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(str(value)))
+ return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(ustr(name)), cgi.escape(ustr(value)))
def parse(self, value):
return datastore_types.Text(value)
@@ -1006,7 +1023,8 @@ class TimeType(DataType):
return 'datetime'
def parse(self, value):
- return datetime.datetime(*(time.strptime(value, TimeType._FORMAT)[0:6]))
+ return datetime.datetime(*(time.strptime(ustr(value),
+ TimeType._FORMAT)[0:6]))
def python_type(self):
return datetime.datetime
@@ -1017,8 +1035,8 @@ class ListType(DataType):
value_file = cStringIO.StringIO()
try:
writer = csv.writer(value_file)
- writer.writerow(value)
- return value_file.getvalue()
+ writer.writerow(map(ustr, value))
+ return ustr(value_file.getvalue())
finally:
value_file.close()
@@ -1026,10 +1044,15 @@ class ListType(DataType):
return 'list'
def parse(self, value):
- value_file = cStringIO.StringIO(value)
+ value_file = cStringIO.StringIO(ustr(value))
try:
reader = csv.reader(value_file)
- return reader.next()
+ fields = []
+ for field in reader.next():
+ if isinstance(field, str):
+ field = field.decode('utf-8')
+ fields.append(field)
+ return fields
finally:
value_file.close()
@@ -1284,6 +1307,7 @@ _DATA_TYPES = {
datastore_types.PostalAddress: PostalAddressType(),
datastore_types.Rating: RatingType(),
datastore_types.BlobKey: BlobKeyType(),
+ datastore_types.ByteString: StringType(),
}
_NAMED_DATA_TYPES = {}
diff --git a/google-appengine/google/appengine/ext/admin/templates/datastore.html b/google-appengine/google/appengine/ext/admin/templates/datastore.html
index 06cc3ee..3ae084d 100644
--- a/google-appengine/google/appengine/ext/admin/templates/datastore.html
+++ b/google-appengine/google/appengine/ext/admin/templates/datastore.html
@@ -83,6 +83,12 @@
{% if kinds or in_production %}
<form action="{{ request.path }}" method="get">
<div id="datastore_search">
+ {% if namespace %}
+ <div class="field">
+ <span class="name">Namespace</span>
+ <input name="namespace" type="text" size="20" value="{{ namespace|escape }}"/>
+ </div>
+ {% endif %}
<span class="field">
<span class="name">Entity Kind:</span>
<span class="value">
@@ -99,7 +105,7 @@
</span>
<span class="buttons">
<input type="submit" value="List Entities"/>
- <input type="button" id="create_button" onclick="location.href='{{ datastore_edit_path }}?kind=' + encodeURIComponent(document.getElementById('kind_input').value) + '&amp;next={{ request.uri|urlencode }}'" value="Create New Entity"/>
+ <input type="button" id="create_button" onclick="location.href='{{ datastore_edit_path }}?namespace={{ namespace|escape }}&amp;kind=' + encodeURIComponent(document.getElementById('kind_input').value) + '&amp;next={{ request.uri|urlencode }}'" value="Create New Entity"/>
</span>
</div>
</form>
diff --git a/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html b/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html
index 0621df8..0e2247a 100644
--- a/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html
+++ b/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html
@@ -109,6 +109,17 @@
</td>
</tr>
{% endif %}
+ {% if not key %}
+ {% if namespace %}
+ <tr>
+ <td class="name">
+ <span class="field_name">Namespace</span>
+ <span class="field_type">({{ namespace|escape }})</span>
+ </td>
+ <td class="value"><input type="text" name="namespace" value="{{ namespace|escape }}"/></td>
+ </tr>
+ {% endif %}
+ {% endif %}
{% for field in fields %}
<tr>
<td class="name">
diff --git a/google-appengine/google/appengine/ext/appstats/datamodel_pb.py b/google-appengine/google/appengine/ext/appstats/datamodel_pb.py
index b204cd9..b204cd9 100644..100755
--- a/google-appengine/google/appengine/ext/appstats/datamodel_pb.py
+++ b/google-appengine/google/appengine/ext/appstats/datamodel_pb.py
diff --git a/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py b/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py
index f85803c..661f91a 100755
--- a/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py
+++ b/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py
@@ -28,6 +28,8 @@ There are four sections:
import logging
+import random
+import re
# 0) WSGI middleware declaration.
@@ -156,9 +158,9 @@ appstats_FILTER_LIST = []
# above) *and* random.random() < RECORD_FRACTION.
def appstats_should_record(env):
- if config.FILTER_LIST:
- logging.debug('FILTER_LIST: %r', config.FILTER_LIST)
- for filter_dict in config.FILTER_LIST:
+ if appstats_FILTER_LIST:
+ logging.debug('FILTER_LIST: %r', appstats_FILTER_LIST)
+ for filter_dict in appstats_FILTER_LIST:
for key, regex in filter_dict.iteritems():
negated = isinstance(regex, str) and regex.startswith('!')
if negated:
diff --git a/google-appengine/google/appengine/ext/appstats/static/appstats_js.js b/google-appengine/google/appengine/ext/appstats/static/appstats_js.js
index a77bdad..f9c882b 100755
--- a/google-appengine/google/appengine/ext/appstats/static/appstats_js.js
+++ b/google-appengine/google/appengine/ext/appstats/static/appstats_js.js
@@ -1,71 +1,76 @@
-/* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){function e(a){throw a;}var h=true,i=null,k=false,aa=Object,l=Error,ba=undefined,ca=parseInt,da=document;function ea(a,b){return a.currentTarget=b}function fa(a,b){return a.keyCode=b}function ga(a,b){return a.type=b}function ha(a,b){return a.length=b}function ia(a,b){return a.className=b}function ja(a,b){return a.target=b}
-var ka="appendChild",m="push",la="relatedTarget",ma="slice",n="replace",na="nodeType",oa="preventDefault",q="indexOf",s="dispatchEvent",pa="capture",qa="nodeName",ra="charCode",t="keyCode",u="firstChild",sa="setAttribute",ta="handleEvent",w="type",ua="nextSibling",va="setActive",wa="toString",x="length",xa="propertyIsEnumerable",y="prototype",z="split",ya="stopPropagation",za="style",Aa="body",Ba="removeChild",A="target",B="call",C="apply",Ca="navigator",D="parentNode",Da="join",Ea="nodeValue",E,
-F=this,Fa=function(a,b,c){a=a[z](".");c=c||F;!(a[0]in c)&&c.execScript&&c.execScript("var "+a[0]);for(var d;a[x]&&(d=a.shift());)if(!a[x]&&b!==ba)c[d]=b;else c=c[d]?c[d]:(c[d]={})},Ga=function(a,b){a=a[z](".");b=b||F;for(var c;c=a.shift();)if(b[c])b=b[c];else return i;return b},Ha=function(){},Ia=function(a){a.T=function(){return a.Tb||(a.Tb=new a)}},Ja=function(a){var b=typeof a;if(b=="object")if(a){if(a instanceof Array||!(a instanceof aa)&&aa[y][wa][B](a)=="[object Array]"||typeof a[x]=="number"&&
-typeof a.splice!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("splice"))return"array";if(!(a instanceof aa)&&(aa[y][wa][B](a)=="[object Function]"||typeof a[B]!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("call")))return"function"}else return"null";else if(b=="function"&&typeof a[B]=="undefined")return"object";return b},Ka=function(a){return Ja(a)=="array"},La=function(a){var b=Ja(a);return b=="array"||b=="object"&&typeof a[x]=="number"},I=function(a){return typeof a=="string"},J=function(a){return Ja(a)==
-"function"},Ma=function(a){a=Ja(a);return a=="object"||a=="array"||a=="function"},K=function(a){if(a.hasOwnProperty&&a.hasOwnProperty(Na))return a[Na];a[Na]||(a[Na]=++Oa);return a[Na]},Na="closure_hashCode_"+Math.floor(Math.random()*2147483648)[wa](36),Oa=0,Pa=function(a){var b=Ja(a);if(b=="object"||b=="array"){if(a.Bb)return a.Bb[B](a);b=b=="array"?[]:{};for(var c in a)b[c]=Pa(a[c]);return b}return a},Qa=function(a){var b=Array[y][ma][B](arguments,1);return function(){var c=Array[y][ma][B](arguments);
-c.unshift[C](c,b);return a[C](this,c)}},L=function(a,b){function c(){}c.prototype=b[y];a.d=b[y];a.prototype=new c;a[y].constructor=a};var M=Array[y],Ra=M[q]?function(a,b,c){return M[q][B](a,b,c)}:function(a,b,c){c=c==i?0:c<0?Math.max(0,a[x]+c):c;if(I(a)){if(!I(b)||b[x]!=1)return-1;return a[q](b,c)}for(c=c;c<a[x];c++)if(c in a&&a[c]===b)return c;return-1},Sa=M.forEach?function(a,b,c){M.forEach[B](a,b,c)}:function(a,b,c){for(var d=a[x],f=I(a)?a[z](""):a,g=0;g<d;g++)g in f&&b[B](c,f[g],g,a)},Ta=M.every?function(a,b,c){return M.every[B](a,b,c)}:function(a,b,c){for(var d=a[x],f=I(a)?a[z](""):a,g=0;g<d;g++)if(g in f&&!b[B](c,f[g],g,a))return k;
-return h},Ua=function(a,b){return Ra(a,b)>=0},Va=function(a,b){b=Ra(a,b);var c;if(c=b>=0)M.splice[B](a,b,1)[x]==1;return c},Wa=function(){return M.concat[C](M,arguments)},Xa=function(a){if(Ka(a))return Wa(a);else{for(var b=[],c=0,d=a[x];c<d;c++)b[c]=a[c];return b}},Za=function(a){return M.splice[C](a,Ya(arguments,1))},Ya=function(a,b,c){return arguments[x]<=2?M[ma][B](a,b):M[ma][B](a,b,c)};var $a=function(a,b,c){for(var d in a)b[B](c,a[d],d,a)},ab=function(a,b){var c;if(c=b in a)delete a[b];return c},bb=function(a,b,c){if(b in a)e(l('The object already contains the key "'+b+'"'));a[b]=c},cb=function(a,b,c){if(b in a)return a[b];return c},db=function(a){var b={};for(var c in a)b[a[c]]=c;return b},eb=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"],fb=function(a){for(var b,c,d=1;d<arguments[x];d++){c=arguments[d];for(b in c)a[b]=
-c[b];for(var f=0;f<eb[x];f++){b=eb[f];if(aa[y].hasOwnProperty[B](c,b))a[b]=c[b]}}},gb=function(){var a=arguments[x];if(a==1&&Ka(arguments[0]))return gb[C](i,arguments[0]);if(a%2)e(l("Uneven number of arguments"));for(var b={},c=0;c<a;c+=2)b[arguments[c]]=arguments[c+1];return b};var hb=function(a){return a[n](/^[\s\xa0]+|[\s\xa0]+$/g,"")},nb=function(a,b){if(b)return a[n](ib,"&amp;")[n](jb,"&lt;")[n](kb,"&gt;")[n](lb,"&quot;");else{if(!mb.test(a))return a;if(a[q]("&")!=-1)a=a[n](ib,"&amp;");if(a[q]("<")!=-1)a=a[n](jb,"&lt;");if(a[q](">")!=-1)a=a[n](kb,"&gt;");if(a[q]('"')!=-1)a=a[n](lb,"&quot;");return a}},ib=/&/g,jb=/</g,kb=/>/g,lb=/\"/g,mb=/[&<>\"]/,pb=function(a,b){var c=0;a=hb(String(a))[z](".");b=hb(String(b))[z](".");for(var d=Math.max(a[x],b[x]),f=0;c==0&&f<d;f++){var g=
-a[f]||"",j=b[f]||"",o=new RegExp("(\\d*)(\\D*)","g"),p=new RegExp("(\\d*)(\\D*)","g");do{var r=o.exec(g)||["","",""],v=p.exec(j)||["","",""];if(r[0][x]==0&&v[0][x]==0)break;c=ob(r[1][x]==0?0:ca(r[1],10),v[1][x]==0?0:ca(v[1],10))||ob(r[2][x]==0,v[2][x]==0)||ob(r[2],v[2])}while(c==0)}return c},ob=function(a,b){if(a<b)return-1;else if(a>b)return 1;return 0};(Date.now||function(){return+new Date})();var qb,rb,sb,tb,ub=function(){return F[Ca]?F[Ca].userAgent:i};tb=sb=rb=qb=k;var vb;if(vb=ub()){var wb=F[Ca];qb=vb[q]("Opera")==0;rb=!qb&&vb[q]("MSIE")!=-1;sb=!qb&&vb[q]("WebKit")!=-1;tb=!qb&&!sb&&wb.product=="Gecko"}var xb=qb,N=rb,O=tb,yb=sb,zb=F[Ca],Ab=(zb&&zb.platform||"")[q]("Mac")!=-1,Bb="",Cb;
-if(xb&&F.opera){var Db=F.opera.version;Bb=typeof Db=="function"?Db():Db}else{if(O)Cb=/rv\:([^\);]+)(\)|;)/;else if(N)Cb=/MSIE\s+([^\);]+)(\)|;)/;else if(yb)Cb=/WebKit\/(\S+)/;if(Cb){var Eb=Cb.exec(ub());Bb=Eb?Eb[1]:""}}var Fb=Bb,Gb={},Hb=function(a){return Gb[a]||(Gb[a]=pb(Fb,a)>=0)};var Ib,Jb=function(a){return(a=a.className)&&typeof a[z]=="function"?a[z](" "):[]},Kb=function(a){var b=Jb(a),c;c=Ya(arguments,1);for(var d=0,f=0;f<c[x];f++)if(!Ua(b,c[f])){b[m](c[f]);d++}c=d==c[x];ia(a,b[Da](" "));return c},Lb=function(a){var b=Jb(a),c;c=Ya(arguments,1);for(var d=0,f=0;f<b[x];f++)if(Ua(c,b[f])){Za(b,f--,1);d++}c=d==c[x];ia(a,b[Da](" "));return c};var Ob=function(a){return a?new Mb(Nb(a)):Ib||(Ib=new Mb)},Pb=function(a){return I(a)?da.getElementById(a):a},Qb=function(a,b,c,d){d=d||a;b=b&&b!="*"?b.toLowerCase():"";if(d.querySelectorAll&&(b||c)&&(!yb||a.compatMode=="CSS1Compat"||Hb("528")))return d.querySelectorAll(b+(c?"."+c:""));if(c&&d.getElementsByClassName){a=d.getElementsByClassName(c);if(b){d={};for(var f=0,g=0,j;j=a[g];g++)if(b==j[qa].toLowerCase())d[f++]=j;ha(d,f);return d}else return a}a=d.getElementsByTagName(b||"*");if(c){d={};for(g=
-f=0;j=a[g];g++){b=j.className;if(typeof b[z]=="function"&&Ua(b[z](" "),c))d[f++]=j}ha(d,f);return d}else return a},Sb=function(a,b){$a(b,function(c,d){if(d=="style")a[za].cssText=c;else if(d=="class")ia(a,c);else if(d=="for")a.htmlFor=c;else if(d in Rb)a[sa](Rb[d],c);else a[d]=c})},Rb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",rowspan:"rowSpan",valign:"vAlign",height:"height",width:"width",usemap:"useMap",frameborder:"frameBorder",type:"type"},Ub=function(){return Tb(da,
-arguments)},Tb=function(a,b){var c=b[0],d=b[1];if(N&&d&&(d.name||d[w])){c=["<",c];d.name&&c[m](' name="',nb(d.name),'"');if(d[w]){c[m](' type="',nb(d[w]),'"');d=Pa(d);delete d[w]}c[m](">");c=c[Da]("")}var f=a.createElement(c);if(d)if(I(d))ia(f,d);else Sb(f,d);if(b[x]>2){d=function(j){if(j)f[ka](I(j)?a.createTextNode(j):j)};for(c=2;c<b[x];c++){var g=b[c];La(g)&&!(Ma(g)&&g[na]>0)?Sa(Vb(g)?Xa(g):g,d):d(g)}}return f},Wb=function(a){return a&&a[D]?a[D][Ba](a):i},Xb=function(a,b){if(a.contains&&b[na]==
-1)return a==b||a.contains(b);if(typeof a.compareDocumentPosition!="undefined")return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b[D];return b==a},Nb=function(a){return a[na]==9?a:a.ownerDocument||a.document},Yb=function(a,b){if("textContent"in a)a.textContent=b;else if(a[u]&&a[u][na]==3){for(;a.lastChild!=a[u];)a[Ba](a.lastChild);a[u].data=b}else{for(var c;c=a[u];)a[Ba](c);a[ka](Nb(a).createTextNode(b))}},Zb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},$b={IMG:" ",BR:"\n"},ac=function(a){var b=
-a.getAttributeNode("tabindex");if(b&&b.specified){a=a.tabIndex;return typeof a=="number"&&a>=0}return k},bc=function(a,b){if(b)a.tabIndex=0;else a.removeAttribute("tabIndex")},cc=function(a,b,c){if(!(a[qa]in Zb))if(a[na]==3)c?b[m](String(a[Ea])[n](/(\r\n|\r|\n)/g,"")):b[m](a[Ea]);else if(a[qa]in $b)b[m]($b[a[qa]]);else for(a=a[u];a;){cc(a,b,c);a=a[ua]}},Vb=function(a){if(a&&typeof a[x]=="number")if(Ma(a))return typeof a.item=="function"||typeof a.item=="string";else if(J(a))return typeof a.item==
-"function";return k},Mb=function(a){this.I=a||F.document||da};E=Mb[y];E.Fa=Ob;E.c=function(a){return I(a)?this.I.getElementById(a):a};E.n=function(){return Tb(this.I,arguments)};E.createElement=function(a){return this.I.createElement(a)};E.createTextNode=function(a){return this.I.createTextNode(a)};E.appendChild=function(a,b){a[ka](b)};E.contains=Xb;var dc=function(){};dc[y].Qa=k;dc[y].Ub=function(){return this.Qa};dc[y].M=function(){if(!this.Qa){this.Qa=h;this.g()}};dc[y].g=function(){};var P=function(a,b){ga(this,a);ja(this,b);ea(this,this[A])};L(P,dc);E=P[y];E.g=function(){delete this[w];delete this[A];delete this.currentTarget};E.X=k;E.la=h;E.stopPropagation=function(){this.X=h};E.preventDefault=function(){this.la=k};var ec=function(a,b){a&&this.sa(a,b)};L(ec,P);var fc=[1,4,2];E=ec[y];ja(E,i);E.relatedTarget=i;E.offsetX=0;E.offsetY=0;E.clientX=0;E.clientY=0;E.screenX=0;E.screenY=0;E.button=0;fa(E,0);E.charCode=0;E.ctrlKey=k;E.altKey=k;E.shiftKey=k;E.metaKey=k;E.N=i;
-E.sa=function(a,b){var c=ga(this,a[w]);ja(this,a[A]||a.srcElement);ea(this,b);if(b=a[la]){if(O)try{b=b[qa]&&b}catch(d){b=i}}else if(c=="mouseover")b=a.fromElement;else if(c=="mouseout")b=a.toElement;this.relatedTarget=b;this.offsetX=a.offsetX!==ba?a.offsetX:a.layerX;this.offsetY=a.offsetY!==ba?a.offsetY:a.layerY;this.clientX=a.clientX!==ba?a.clientX:a.pageX;this.clientY=a.clientY!==ba?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;fa(this,a[t]||0);this.charCode=
-a[ra]||(c=="keypress"?a[t]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.N=a;delete this.la;delete this.X};var gc=function(a,b){return N?a[w]=="click"?b==0:!!(a.N.button&fc[b]):a.N.button==b};ec[y].stopPropagation=function(){this.X=h;if(this.N[ya])this.N[ya]();else this.N.cancelBubble=h};var hc=N&&!Hb("8");
-ec[y].preventDefault=function(){this.la=k;var a=this.N;if(a[oa])a[oa]();else{a.returnValue=k;if(hc)try{if(a.ctrlKey||a[t]>=112&&a[t]<=123)fa(a,-1)}catch(b){}}};ec[y].g=function(){ec.d.g[B](this);this.N=i;ja(this,i);ea(this,i);this.relatedTarget=i};var Q=function(a,b){this.pb=b;this.ba=[];if(a>this.pb)e(l("[goog.structs.SimplePool] Initial cannot be greater than max"));for(b=0;b<a;b++)this.ba[m](this.K?this.K():{})};L(Q,dc);Q[y].K=i;Q[y].hb=i;var ic=function(a){if(a.ba[x])return a.ba.pop();return a.K?a.K():{}},kc=function(a,b){a.ba[x]<a.pb?a.ba[m](b):jc(a,b)},jc=function(a,b){if(a.hb)a.hb(b);else if(J(b.M))b.M();else for(var c in b)delete b[c]};Q[y].g=function(){Q.d.g[B](this);for(var a=this.ba;a[x];)jc(this,a.pop());delete this.ba};var lc;var mc=(lc="ScriptEngine"in F&&F.ScriptEngine()=="JScript")?F.ScriptEngineMajorVersion()+"."+F.ScriptEngineMinorVersion()+"."+F.ScriptEngineBuildVersion():"0";var nc=function(){},oc=0;E=nc[y];E.key=0;E.ka=k;E.eb=k;E.sa=function(a,b,c,d,f,g){if(J(a))this.nb=h;else if(a&&a[ta]&&J(a[ta]))this.nb=k;else e(l("Invalid listener argument"));this.ia=a;this.rb=b;this.src=c;ga(this,d);this.capture=!!f;this.Ha=g;this.eb=k;this.key=++oc;this.ka=k};E.handleEvent=function(a){if(this.nb)return this.ia[B](this.Ha||this.src,a);return this.ia[ta][B](this.ia,a)};var pc,qc,rc,sc,tc,uc,vc,wc,xc,yc,zc;
-(function(){function a(){return{H:0,C:0}}function b(){return[]}function c(){var G=function(Ud){return j[B](G.src,G.key,Ud)};return G}function d(){return new nc}function f(){return new ec}var g=lc&&!(pb(mc,"5.7")>=0),j;uc=function(G){j=G};if(g){pc=function(){return ic(o)};qc=function(G){kc(o,G)};rc=function(){return ic(p)};sc=function(G){kc(p,G)};tc=function(){return ic(r)};vc=function(){kc(r,c())};wc=function(){return ic(v)};xc=function(G){kc(v,G)};yc=function(){return ic(H)};zc=function(G){kc(H,
-G)};var o=new Q(0,600);o.K=a;var p=new Q(0,600);p.K=b;var r=new Q(0,600);r.K=c;var v=new Q(0,600);v.K=d;var H=new Q(0,600);H.K=f}else{pc=a;qc=Ha;rc=b;sc=Ha;tc=c;vc=Ha;wc=d;xc=Ha;yc=f;zc=Ha}})();var Ac={},R={},Bc={},Cc={},S=function(a,b,c,d,f){if(b)if(Ka(b)){for(var g=0;g<b[x];g++)S(a,b[g],c,d,f);return i}else{d=!!d;var j=R;b in j||(j[b]=pc());j=j[b];if(!(d in j)){j[d]=pc();j.H++}j=j[d];var o=K(a),p;j.C++;if(j[o]){p=j[o];for(g=0;g<p[x];g++){j=p[g];if(j.ia==c&&j.Ha==f){if(j.ka)break;return p[g].key}}}else{p=j[o]=rc();j.H++}g=tc();g.src=a;j=wc();j.sa(c,g,a,b,d,f);c=j.key;g.key=c;p[m](j);Ac[c]=j;Bc[o]||(Bc[o]=rc());Bc[o][m](j);if(a.addEventListener){if(a==F||!a.gb)a.addEventListener(b,g,d)}else a.attachEvent(Dc(b),
-g);return c}else e(l("Invalid event type"))},Ec=function(a,b,c,d,f){if(Ka(b)){for(var g=0;g<b[x];g++)Ec(a,b[g],c,d,f);return i}d=!!d;a=Fc(a,b,d);if(!a)return k;for(g=0;g<a[x];g++)if(a[g].ia==c&&a[g][pa]==d&&a[g].Ha==f)return T(a[g].key);return k},T=function(a){if(!Ac[a])return k;var b=Ac[a];if(b.ka)return k;var c=b.src,d=b[w],f=b.rb,g=b[pa];if(c.removeEventListener){if(c==F||!c.gb)c.removeEventListener(d,f,g)}else c.detachEvent&&c.detachEvent(Dc(d),f);c=K(c);f=R[d][g][c];if(Bc[c]){var j=Bc[c];Va(j,
-b);j[x]==0&&delete Bc[c]}b.ka=h;f.qb=h;Gc(d,g,c,f);delete Ac[a];return h},Gc=function(a,b,c,d){if(!d.Ka)if(d.qb){for(var f=0,g=0;f<d[x];f++)if(d[f].ka){var j=d[f].rb;j.src=i;vc(j);xc(d[f])}else{if(f!=g)d[g]=d[f];g++}ha(d,g);d.qb=k;if(g==0){sc(d);delete R[a][b][c];R[a][b].H--;if(R[a][b].H==0){qc(R[a][b]);delete R[a][b];R[a].H--}if(R[a].H==0){qc(R[a]);delete R[a]}}}},Hc=function(a,b,c){var d=0,f=a==i,g=b==i,j=c==i;c=!!c;if(f)$a(Bc,function(p){for(var r=p[x]-1;r>=0;r--){var v=p[r];if((g||b==v[w])&&(j||
-c==v[pa])){T(v.key);d++}}});else{a=K(a);if(Bc[a]){a=Bc[a];for(f=a[x]-1;f>=0;f--){var o=a[f];if((g||b==o[w])&&(j||c==o[pa])){T(o.key);d++}}}}return d},Fc=function(a,b,c){var d=R;if(b in d){d=d[b];if(c in d){d=d[c];a=K(a);if(d[a])return d[a]}}return i},Dc=function(a){if(a in Cc)return Cc[a];return Cc[a]="on"+a},Jc=function(a,b,c,d,f){var g=1;b=K(b);if(a[b]){a.C--;a=a[b];if(a.Ka)a.Ka++;else a.Ka=1;try{for(var j=a[x],o=0;o<j;o++){var p=a[o];if(p&&!p.ka)g&=Ic(p,f)!==k}}finally{a.Ka--;Gc(c,d,b,a)}}return Boolean(g)},
-Ic=function(a,b){b=a[ta](b);a.eb&&T(a.key);return b};
-uc(function(a,b){if(!Ac[a])return h;a=Ac[a];var c=a[w],d=R;if(!(c in d))return h;d=d[c];var f,g;if(N){f=b||Ga("window.event");b=h in d;var j=k in d;if(b){if(f[t]<0||f.returnValue!=ba)return h;a:{var o=k;if(f[t]==0)try{fa(f,-1);break a}catch(p){o=h}if(o||f.returnValue==ba)f.returnValue=h}}o=yc();o.sa(f,this);f=h;try{if(b){for(var r=rc(),v=o.currentTarget;v;v=v[D])r[m](v);g=d[h];g.C=g.H;for(var H=r[x]-1;!o.X&&H>=0&&g.C;H--){ea(o,r[H]);f&=Jc(g,r[H],c,h,o)}if(j){g=d[k];g.C=g.H;for(H=0;!o.X&&H<r[x]&&g.C;H++){ea(o,
-r[H]);f&=Jc(g,r[H],c,k,o)}}}else f=Ic(a,o)}finally{if(r){ha(r,0);sc(r)}o.M();zc(o)}return f}g=new ec(b,this);try{f=Ic(a,g)}finally{g.M()}return f});var Kc=function(a){this.mb=a};L(Kc,dc);var Lc=new Q(0,100);Kc[y].f=function(a,b,c,d,f){if(Ka(b))for(var g=0;g<b[x];g++)this.f(a,b[g],c,d,f);else{a=S(a,b,c||this,d||k,f||this.mb||this);if(this.u)this.u[a]=h;else if(this.V){this.u=ic(Lc);this.u[this.V]=h;this.V=i;this.u[a]=h}else this.V=a}return this};
-Kc[y].Q=function(a,b,c,d,f){if(this.V||this.u)if(Ka(b))for(var g=0;g<b[x];g++)this.Q(a,b[g],c,d,f);else{a:{c=c||this;f=f||this.mb||this;d=!!(d||k);if(a=Fc(a,b,d))for(b=0;b<a[x];b++)if(a[b].ia==c&&a[b][pa]==d&&a[b].Ha==f){a=a[b];break a}a=i}if(a){a=a.key;T(a);if(this.u)ab(this.u,a);else if(this.V==a)this.V=i}}return this};var Mc=function(a){if(a.u){for(var b in a.u){T(b);delete a.u[b]}kc(Lc,a.u);a.u=i}else a.V&&T(a.V)};Kc[y].g=function(){Kc.d.g[B](this);Mc(this)};Kc[y].handleEvent=function(){e(l("EventHandler.handleEvent not implemented"))};var Nc=function(){};L(Nc,dc);E=Nc[y];E.gb=h;E.La=i;E.ab=function(a){this.La=a};E.addEventListener=function(a,b,c,d){S(this,a,b,c,d)};E.removeEventListener=function(a,b,c,d){Ec(this,a,b,c,d)};
-E.dispatchEvent=function(a){a=a;if(I(a))a=new P(a,this);else if(a instanceof P)ja(a,a[A]||this);else{var b=a;a=new P(a[w],this);fb(a,b)}b=1;var c,d=a[w],f=R;if(d in f){f=f[d];d=h in f;var g;if(d){c=[];for(g=this;g;g=g.La)c[m](g);g=f[h];g.C=g.H;for(var j=c[x]-1;!a.X&&j>=0&&g.C;j--){ea(a,c[j]);b&=Jc(g,c[j],a[w],h,a)&&a.la!=k}}if(k in f){g=f[k];g.C=g.H;if(d)for(j=0;!a.X&&j<c[x]&&g.C;j++){ea(a,c[j]);b&=Jc(g,c[j],a[w],k,a)&&a.la!=k}else for(c=this;!a.X&&c&&g.C;c=c.La){ea(a,c);b&=Jc(g,c,a[w],k,a)&&a.la!=
-k}}a=Boolean(b)}else a=h;return a};E.g=function(){Nc.d.g[B](this);Hc(this);this.La=i};var Oc=function(a,b){var c=Nb(a);if(c.defaultView&&c.defaultView.getComputedStyle)if(a=c.defaultView.getComputedStyle(a,""))return a[b];return i},Pc=function(a,b){a[za].display=b?"":"none"},Qc=O?"MozUserSelect":yb?"WebkitUserSelect":i,Rc=function(a,b,c){c=!c?a.getElementsByTagName("*"):i;if(Qc){b=b?"none":"";a[za][Qc]=b;if(c){a=0;for(var d;d=c[a];a++)d[za][Qc]=b}}else if(N||xb){b=b?"on":"";a[sa]("unselectable",b);if(c)for(a=0;d=c[a];a++)d[sa]("unselectable",b)}};var Sc=function(){};Ia(Sc);Sc[y].Yb=0;Sc.T();var U=function(a){this.z=a||Ob();this.ua=Tc};L(U,Nc);U[y].Sb=Sc.T();var Tc=i,Uc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close";default:}e(l("Invalid component state"))};E=U[y];E.ga=i;E.z=i;E.e=k;E.b=i;E.ua=i;E.Xb=i;E.k=i;E.q=i;E.w=i;E.vb=k;
-var Vc=function(a){return a.ga||(a.ga=":"+(a.Sb.Yb++)[wa](36))},Wc=function(a,b){if(a.k&&a.k.w){ab(a.k.w,a.ga);bb(a.k.w,b,a)}a.ga=b};U[y].c=function(){return this.b};var Xc=function(a){return a.fa||(a.fa=new Kc(a))},Yc=function(a,b){if(a==b)e(l("Unable to set parent component"));if(b&&a.k&&a.ga&&a.k.jb(a.ga)&&a.k!=b)e(l("Unable to set parent component"));a.k=b;U.d.ab[B](a,b)};E=U[y];E.ab=function(a){if(this.k&&this.k!=a)e(l("Method not supported"));U.d.ab[B](this,a)};E.Fa=function(){return this.z};
-E.n=function(){this.b=this.z.createElement("div")};E.L=function(a){if(this.e)e(l("Component already rendered"));else if(a&&this.Z(a)){this.vb=h;if(!this.z||this.z.I!=Nb(a))this.z=Ob(a);this.Pa(a);this.J()}else e(l("Invalid element to decorate"))};E.Z=function(){return h};E.Pa=function(a){this.b=a};E.J=function(){this.e=h;Zc(this,function(a){!a.e&&a.c()&&a.J()})};E.aa=function(){Zc(this,function(a){a.e&&a.aa()});this.fa&&Mc(this.fa);this.e=k};
-E.g=function(){U.d.g[B](this);this.e&&this.aa();if(this.fa){this.fa.M();delete this.fa}Zc(this,function(a){a.M()});!this.vb&&this.b&&Wb(this.b);this.k=this.Xb=this.b=this.w=this.q=i};E.Aa=function(a,b){this.Oa(a,$c(this),b)};
-E.Oa=function(a,b,c){if(a.e&&(c||!this.e))e(l("Component already rendered"));if(b<0||b>$c(this))e(l("Child component index out of bounds"));if(!this.w||!this.q){this.w={};this.q=[]}if(a.k==this){this.w[Vc(a)]=a;Va(this.q,a)}else bb(this.w,Vc(a),a);Yc(a,this);Za(this.q,b,0,a);if(a.e&&this.e&&a.k==this){c=this.O();c.insertBefore(a.c(),c.childNodes[b+1]||i)}else if(c){this.b||this.n();c=this.s(b+1);b=this.O();c=c?c.b:i;if(a.e)e(l("Component already rendered"));a.b||a.n();b?b.insertBefore(a.b,c||i):a.z.I[Aa][ka](a.b);
-if(!a.k||a.k.e)a.J()}else this.e&&!a.e&&a.b&&a.J()};E.O=function(){return this.b};var ad=function(a){if(a.ua==i)a.ua="rtl"==(Oc(a.e?a.b:a.z.I[Aa],"direction")||((a.e?a.b:a.z.I[Aa]).currentStyle?(a.e?a.b:a.z.I[Aa]).currentStyle.direction:i)||(a.e?a.b:a.z.I[Aa])[za].direction);return a.ua};U[y].xa=function(a){if(this.e)e(l("Component already rendered"));this.ua=a};var $c=function(a){return a.q?a.q[x]:0};U[y].jb=function(a){return this.w&&a?cb(this.w,a)||i:i};
-U[y].s=function(a){return this.q?this.q[a]||i:i};var Zc=function(a,b,c){a.q&&Sa(a.q,b,c)},bd=function(a,b){return a.q&&b?Ra(a.q,b):-1};U[y].removeChild=function(a,b){if(a){var c=I(a)?a:Vc(a);a=this.jb(c);if(c&&a){ab(this.w,c);Va(this.q,a);if(b){a.aa();a.b&&Wb(a.b)}Yc(a,i)}}if(!a)e(l("Child is not in parent component"));return a};var cd,dd=function(a,b){if(O||cd){a[sa]("role",b);a.dc=b}},ed=function(a,b,c){if(O||cd)a[sa]("aria-"+b,c)};var gd=function(a,b,c,d,f){if(!N&&!(yb&&Hb("525")))return h;if(Ab&&f)return fd(a);if(f&&!d)return k;if(N&&!c&&(b==17||b==18))return k;if(N&&d&&b==a)return k;switch(a){case 13:return h;case 27:return!yb}return fd(a)},fd=function(a){if(a>=48&&a<=57)return h;if(a>=96&&a<=106)return h;if(a>=65&&a<=90)return h;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return h;default:return k}};var V=function(a){a&&hd(this,a)};L(V,Nc);E=V[y];E.b=i;E.Ia=i;E.Ya=i;E.Ja=i;E.ta=-1;E.ha=-1;
-var id={"3":13,"12":144,"63232":38,"63233":40,"63234":37,"63235":39,"63236":112,"63237":113,"63238":114,"63239":115,"63240":116,"63241":117,"63242":118,"63243":119,"63244":120,"63245":121,"63246":122,"63247":123,"63248":44,"63272":46,"63273":36,"63275":35,"63276":33,"63277":34,"63289":144,"63302":45},jd={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},kd={61:187,
-59:186},ld=N||yb&&Hb("525");V[y].Kb=function(a){if(ld&&!gd(a[t],this.ta,a.shiftKey,a.ctrlKey,a.altKey))this[ta](a);else this.ha=O&&a[t]in kd?kd[a[t]]:a[t]};V[y].Lb=function(){this.ha=this.ta=-1};
-V[y].handleEvent=function(a){var b=a.N,c,d;if(N&&a[w]=="keypress"){c=this.ha;d=c!=13&&c!=27?b[t]:0}else if(yb&&a[w]=="keypress"){c=this.ha;d=b[ra]>=0&&b[ra]<63232&&fd(c)?b[ra]:0}else if(xb){c=this.ha;d=fd(c)?b[t]:0}else{c=b[t]||this.ha;d=b[ra]||0;if(Ab&&d==63&&!c)c=191}var f=c,g=b.keyIdentifier;if(c)if(c>=63232&&c in id)f=id[c];else{if(c==25&&a.shiftKey)f=9}else if(g&&g in jd)f=jd[g];a=f==this.ta;this.ta=f;b=new md(f,d,a,b);try{this[s](b)}finally{b.M()}};
-var hd=function(a,b){a.Ja&&a.detach();a.b=b;a.Ia=S(a.b,"keypress",a);a.Ya=S(a.b,"keydown",a.Kb,k,a);a.Ja=S(a.b,"keyup",a.Lb,k,a)};V[y].detach=function(){if(this.Ia){T(this.Ia);T(this.Ya);T(this.Ja);this.Ja=this.Ya=this.Ia=i}this.b=i;this.ta=-1};V[y].g=function(){V.d.g[B](this);this.detach()};var md=function(a,b,c,d){d&&this.sa(d,void 0);ga(this,"key");fa(this,a);this.charCode=b;this.repeat=c};L(md,ec);var od=function(a){for(var b;a;){b=K(a);if(b=nd[b])break;a=a.d?a.d.constructor:i}if(b)return J(b.T)?b.T():new b;return i},qd=function(a,b){if(!a)e(l("Invalid class name "+a));if(!J(b))e(l("Invalid decorator function "+b));pd[a]=b},nd={},pd={};var rd=function(){},sd;Ia(rd);E=rd[y];E.ea=function(){};E.n=function(a){return a.Fa().n("div",this.na(a)[Da](" "),a.Ea)};E.O=function(a){return a};E.ma=function(a,b,c){if(a=a.c?a.c():a)if(N&&!Hb("7")){var d=td(this,Jb(a),b);d[m](b);Qa(c?Kb:Lb,a)[C](i,d)}else c?Kb(a,b):Lb(a,b)};E.Z=function(){return h};
-E.L=function(a,b){b.id&&Wc(a,b.id);var c=this.O(b);c&&c[u]?ud(a,c[u][ua]?Xa(c.childNodes):c[u]):ud(a,i);var d=0,f=this.p(),g=this.p(),j=k,o=k;c=k;var p=Jb(b);Sa(p,function(v){if(!j&&v==f){j=h;if(g==f)o=h}else if(!o&&v==g)o=h;else d|=vd(this,v)},this);a.m=d;if(!j){p[m](f);if(g==f)o=h}o||p[m](g);(a=a.B)&&p[m][C](p,a);if(N&&!Hb("7")){var r=td(this,p);if(r[x]>0){p[m][C](p,r);c=h}}if(!j||!o||a||c)ia(b,p[Da](" "));return b};E.Xa=function(a){ad(a)&&this.xa(a.c(),h);a.j()&&this.wa(a,a.P())};
-E.Ma=function(a,b){Rc(a,!b,!N&&!xb)};E.xa=function(a,b){this.ma(a,this.p()+"-rtl",b)};E.ca=function(a){var b;if(a.v&32&&(b=a.o()))return ac(b);return k};E.wa=function(a,b){var c;if(a.v&32&&(c=a.o())){if(!b&&a.m&32){try{c.blur()}catch(d){}a.m&32&&a.oa(i)}ac(c)!=b&&bc(c,b)}};E.ya=function(a,b){Pc(a,b)};E.G=function(a,b,c){var d=a.c();if(d){var f=wd(this,b);f&&this.ma(a,f,c);if(O){sd||(sd=gb(1,"disabled",4,"pressed",8,"selected",16,"checked",64,"expanded"));(a=sd[b])&&ed(d,a,c)}}};E.o=function(a){return a.c()};
-E.p=function(){return"goog-control"};E.na=function(a){var b=this.p(),c=[b],d=this.p();d!=b&&c[m](d);if(b=a.m){d=[];for(var f=1;b;f<<=1)if(b&f){d[m](wd(this,f));b&=~f}b=d}else b=i;b&&c[m][C](c,b);(a=a.B)&&c[m][C](c,a);N&&!Hb("7")&&c[m][C](c,td(this,c));return c};
-var td=function(a,b,c){var d=[];if(c)b=b.concat([c]);Sa([],function(f){if(Ta(f,Qa(Ua,b))&&(!c||Ua(f,c)))d[m](f[Da]("_"))});return d},wd=function(a,b){a.Da||xd(a);return a.Da[b]},vd=function(a,b){a.sb||yd(a);a=ca(a.sb[b],10);return isNaN(a)?0:a},xd=function(a){var b=a.p();a.Da=gb(1,b+"-disabled",2,b+"-hover",4,b+"-active",8,b+"-selected",16,b+"-checked",32,b+"-focused",64,b+"-open")},yd=function(a){a.Da||xd(a);a.sb=db(a.Da)};var W=function(a,b,c){U[B](this,c);this.a=b||od(this.constructor);this.Ea=a};L(W,U);E=W[y];E.Ea=i;E.m=0;E.v=39;E.zb=255;E.Na=0;E.r=h;E.B=i;E.ra=h;E.Ba=k;E.o=function(){return this.a.o(this)};E.Ga=function(){return this.t||(this.t=new V)};E.kb=function(){return this.a};var zd=function(a,b){if(b){if(a.B)Ua(a.B,b)||a.B[m](b);else a.B=[b];a.a.ma(a,b,h)}},Ad=function(a,b){if(b&&a.B){Va(a.B,b);if(a.B[x]==0)a.B=i;a.a.ma(a,b,k)}};E=W[y];E.ma=function(a,b){b?zd(this,a):Ad(this,a)};
-E.n=function(){var a=this.a.n(this);this.b=a;if(O){var b=this.a.ea();b&&dd(a,b)}this.Ba||this.a.Ma(a,k);this.P()||this.a.ya(a,k)};E.O=function(){return this.a.O(this.c())};E.Z=function(a){return this.a.Z(a)};E.Pa=function(a){this.b=a=this.a.L(this,a);if(O){var b=this.a.ea();b&&dd(a,b)}this.Ba||this.a.Ma(a,k);this.r=a[za].display!="none"};
-E.J=function(){W.d.J[B](this);this.a.Xa(this);if(this.v&-2){this.ra&&Bd(this,h);if(this.v&32){var a=this.o();if(a){var b=this.Ga();hd(b,a);Xc(this).f(b,"key",this.U).f(a,"focus",this.pa).f(a,"blur",this.oa)}}}};var Bd=function(a,b){var c=Xc(a),d=a.c();if(b){c.f(d,"mouseover",a.Va).f(d,"mousedown",a.qa).f(d,"mouseup",a.Wa).f(d,"mouseout",a.Ua);N&&c.f(d,"dblclick",a.lb)}else{c.Q(d,"mouseover",a.Va).Q(d,"mousedown",a.qa).Q(d,"mouseup",a.Wa).Q(d,"mouseout",a.Ua);N&&c.Q(d,"dblclick",a.lb)}};
-W[y].aa=function(){W.d.aa[B](this);this.t&&this.t.detach();this.P()&&this.j()&&this.a.wa(this,k)};W[y].g=function(){W.d.g[B](this);if(this.t){this.t.M();delete this.t}delete this.a;this.B=this.Ea=i};var ud=function(a,b){a.Ea=b};E=W[y];E.xa=function(a){W.d.xa[B](this,a);var b=this.c();b&&this.a.xa(b,a)};E.Ma=function(a){this.Ba=a;var b=this.c();b&&this.a.Ma(b,a)};E.P=function(){return this.r};
-E.ya=function(a,b){if(b||this.r!=a&&this[s](a?"show":"hide")){(b=this.c())&&this.a.ya(b,a);this.j()&&this.a.wa(this,a);this.r=a;return h}return k};E.j=function(){return!!!(this.m&1)};E.va=function(a){var b=this.k;if(!(b&&typeof b.j=="function"&&!b.j())&&Cd(this,1,!a)){if(!a){this[va](k);this.F(k)}this.P()&&this.a.wa(this,a);this.G(1,!a)}};E.F=function(a){Cd(this,2,a)&&this.G(2,a)};E.setActive=function(a){Cd(this,4,a)&&this.G(4,a)};
-var Dd=function(a,b){Cd(a,8,b)&&a.G(8,b)},Ed=function(a,b){Cd(a,16,b)&&a.G(16,b)},Fd=function(a,b){Cd(a,32,b)&&a.G(32,b)},Gd=function(a,b){Cd(a,64,b)&&a.G(64,b)};W[y].G=function(a,b){if(this.v&a&&b!=!!(this.m&a)){this.a.G(this,a,b);this.m=b?this.m|a:this.m&~a}};
-var Hd=function(a,b,c){if(a.e&&a.m&b&&!c)e(l("Component already rendered"));!c&&a.m&b&&a.G(b,k);a.v=c?a.v|b:a.v&~b},X=function(a,b){return!!(a.zb&b)&&!!(a.v&b)},Cd=function(a,b,c){return!!(a.v&b)&&!!(a.m&b)!=c&&(!(a.Na&b)||a[s](Uc(b,c)))&&!a.Ub()};E=W[y];E.Va=function(a){a[la]&&!Xb(this.c(),a[la])&&this[s]("enter")&&this.j()&&X(this,2)&&this.F(h)};E.Ua=function(a){if(a[la]&&!Xb(this.c(),a[la])&&this[s]("leave")){X(this,4)&&this[va](k);X(this,2)&&this.F(k)}};
-E.qa=function(a){if(this.j()){X(this,2)&&this.F(h);if(gc(a,0)){X(this,4)&&this[va](h);this.a.ca(this)&&this.o().focus()}}!this.Ba&&gc(a,0)&&a[oa]()};E.Wa=function(a){if(this.j()){X(this,2)&&this.F(h);this.m&4&&Id(this,a)&&X(this,4)&&this[va](k)}};E.lb=function(a){this.j()&&Id(this,a)};var Id=function(a,b){X(a,16)&&Ed(a,!!!(a.m&16));X(a,8)&&Dd(a,h);X(a,64)&&Gd(a,!!!(a.m&64));var c=new P("action",a);if(b)for(var d=["altKey","ctrlKey","metaKey","shiftKey"],f,g=0;f=d[g];g++)c[f]=b[f];return a[s](c)};
-W[y].pa=function(){X(this,32)&&Fd(this,h)};W[y].oa=function(){X(this,4)&&this[va](k);X(this,32)&&Fd(this,k)};W[y].U=function(a){if(this.P()&&this.j()&&this.Ta(a)){a[oa]();a[ya]();return h}return k};W[y].Ta=function(a){return a[t]==13&&Id(this,a)};if(!J(W))e(l("Invalid component class "+W));if(!J(rd))e(l("Invalid renderer class "+rd));var Jd=K(W);nd[Jd]=rd;qd("goog-control",function(){return new W(i)});var Kd=function(){};L(Kd,rd);Ia(Kd);Kd[y].n=function(a){return a.Fa().n("div",this.p())};Kd[y].L=function(a,b){if(b.tagName=="HR"){var c=b;b=this.n(a);c[D]&&c[D].insertBefore(b,c);Wb(c)}else Kb(b,this.p());return b};Kd[y].p=function(){return"goog-menuseparator"};var Ld=function(a,b){W[B](this,i,a||Kd.T(),b);Hd(this,1,k);Hd(this,2,k);Hd(this,4,k);Hd(this,32,k);this.m=1};L(Ld,W);Ld[y].J=function(){Ld.d.J[B](this);dd(this.c(),"separator")};qd("goog-menuseparator",function(){return new Ld});var Md=function(){};Ia(Md);Md[y].ea=function(){};var Nd=function(a,b,c){if(b)b.tabIndex=c?0:-1};E=Md[y];E.n=function(a){return a.Fa().n("div",this.na(a)[Da](" "))};E.O=function(a){return a};E.Z=function(a){return a.tagName=="DIV"};E.L=function(a,b){b.id&&Wc(a,b.id);var c=this.p(),d=k,f=Jb(b);f&&Sa(f,function(g){if(g==c)d=h;else g&&this.bb(a,g,c)},this);d||Kb(b,c);Od(this,a,b);return b};
-E.bb=function(a,b,c){if(b==c+"-disabled")a.va(k);else if(b==c+"-horizontal")Pd(a,"horizontal");else b==c+"-vertical"&&Pd(a,"vertical")};var Od=function(a,b,c){if(c){a=c[u];for(var d;a&&a[D]==c;){d=a[ua];if(a[na]==1){var f;a:{f=void 0;for(var g=Jb(a),j=0,o=g[x];j<o;j++)if(f=g[j]in pd?pd[g[j]]():i){f=f;break a}f=i}if(f){f.b=a;b.j()||f.va(k);b.Aa(f);f.L(a)}}else if(!a[Ea]||hb(a[Ea])=="")c[Ba](a);a=d}}};Md[y].Xa=function(a){a=a.c();Rc(a,h,O);if(N)a.hideFocus=h;var b=this.ea();b&&dd(a,b)};Md[y].o=function(a){return a.c()};
-Md[y].p=function(){return"goog-container"};Md[y].na=function(a){var b=this.p(),c=[b,a.W=="horizontal"?b+"-horizontal":b+"-vertical"];a.j()||c[m](b+"-disabled");return c};var Y=function(a,b,c){U[B](this,c);this.a=b||Md.T();this.W=a||"vertical"};L(Y,U);E=Y[y];E.Vb=i;E.t=i;E.a=i;E.W=i;E.r=h;E.$=h;E.Ra=h;E.l=-1;E.h=i;E.ja=k;E.xb=k;E.R=i;E.o=function(){return this.Vb||this.a.o(this)};E.Ga=function(){return this.t||(this.t=new V(this.o()))};E.kb=function(){return this.a};E.n=function(){this.b=this.a.n(this)};E.O=function(){return this.a.O(this.c())};E.Z=function(a){return this.a.Z(a)};E.Pa=function(a){this.b=this.a.L(this,a);if(a[za].display=="none")this.r=k};
-E.J=function(){Y.d.J[B](this);Zc(this,function(b){b.e&&Qd(this,b)},this);var a=this.c();this.a.Xa(this);this.ya(this.r,h);Xc(this).f(this,"enter",this.Ib).f(this,"highlight",this.Jb).f(this,"unhighlight",this.Rb).f(this,"open",this.Mb).f(this,"close",this.Gb).f(a,"mousedown",this.qa).f(Nb(a),"mouseup",this.Hb).f(a,["mousedown","mouseup","mouseover","mouseout"],this.Fb);this.ca()&&Rd(this,h)};
-var Rd=function(a,b){var c=Xc(a),d=a.o();b?c.f(d,"focus",a.pa).f(d,"blur",a.oa).f(a.Ga(),"key",a.U):c.Q(d,"focus",a.pa).Q(d,"blur",a.oa).Q(a.Ga(),"key",a.U)};E=Y[y];E.aa=function(){Sd(this,-1);this.h&&Gd(this.h,k);this.ja=k;Y.d.aa[B](this)};E.g=function(){Y.d.g[B](this);if(this.t){this.t.M();this.t=i}this.a=this.h=this.R=i};E.Ib=function(){return h};
-E.Jb=function(a){var b=bd(this,a[A]);if(b>-1&&b!=this.l){var c=this.s(this.l);c&&c.F(k);this.l=b;c=this.s(this.l);this.ja&&c[va](h);if(this.h&&c!=this.h)c.v&64?Gd(c,h):Gd(this.h,k)}ed(this.c(),"activedescendant",a[A].c().id)};E.Rb=function(a){if(a[A]==this.s(this.l))this.l=-1;ed(this.c(),"activedescendant","")};E.Mb=function(a){if((a=a[A])&&a!=this.h&&a.k==this){this.h&&Gd(this.h,k);this.h=a}};E.Gb=function(a){if(a[A]==this.h)this.h=i};
-E.qa=function(a){this.$&&Td(this,h);var b=this.o(),c;a:{if(b)if((c=b.getAttributeNode("tabindex"))&&c.specified){c=b.tabIndex;c=typeof c=="number"&&c>=0;break a}c=k}c?b.focus():a[oa]()};E.Hb=function(){this.ja=k};E.Fb=function(a){var b;a:{b=a[A];if(this.R)for(var c=this.c();b&&b[D]&&b!=c;){var d=b.id;if(d in this.R){b=this.R[d];break a}b=b[D]}b=i}if(b)switch(a[w]){case "mousedown":b.qa(a);break;case "mouseup":b.Wa(a);break;case "mouseover":b.Va(a);break;case "mouseout":b.Ua(a);break}};E.pa=function(){};
-E.oa=function(){Sd(this,-1);this.ja=k;this.h&&Gd(this.h,k)};E.U=function(a){if(this.j()&&$c(this)!=0&&this.Ta(a)){a[oa]();a[ya]();return h}return k};
-E.Ta=function(a){var b=this.s(this.l);if(b&&typeof b.U=="function"&&b.U(a))return h;if(this.h&&this.h!=b&&typeof this.h.U=="function"&&this.h.U(a))return h;switch(a[t]){case 27:if(this.ca())this.o().blur();else return k;break;case 36:Vd(this);break;case 35:Wd(this);break;case 38:if(this.W=="vertical")Xd(this);else return k;break;case 37:if(this.W=="horizontal")ad(this)?Yd(this):Xd(this);else return k;break;case 40:if(this.W=="vertical")Yd(this);else return k;break;case 39:if(this.W=="horizontal")ad(this)?
-Xd(this):Yd(this);else return k;break;default:return k}return h};var Qd=function(a,b){var c=b.c();c=c.id||(c.id=Vc(b));if(!a.R)a.R={};a.R[c]=b};Y[y].Aa=function(a,b){Y.d.Aa[B](this,a,b)};Y[y].Oa=function(a,b,c){a.Na|=2;a.Na|=64;if(this.ca()||!this.xb)Hd(a,32,k);a.e&&k!=a.ra&&Bd(a,k);a.ra=k;Y.d.Oa[B](this,a,b,c);c&&this.e&&Qd(this,a);b<=this.l&&this.l++};
-Y[y].removeChild=function(a,b){var c=bd(this,a);if(c!=-1)if(c==this.l)a.F(k);else c<this.l&&this.l--;(c=a.c())&&c.id&&ab(this.R,c.id);b=a=Y.d[Ba][B](this,a,b);b.e&&h!=b.ra&&Bd(b,h);b.ra=h;return a};var Pd=function(a,b){if(a.c())e(l("Component already rendered"));a.W=b};E=Y[y];E.P=function(){return this.r};E.ya=function(a,b){if(b||this.r!=a&&this[s](a?"show":"hide")){this.r=a;var c=this.c();if(c){Pc(c,a);this.ca()&&Nd(this.a,this.o(),this.$&&this.r);this.r&&!b&&this[s]("aftershow")}return h}return k};
-E.j=function(){return this.$};E.va=function(a){if(this.$!=a&&this[s](a?"enable":"disable")){if(a){this.$=h;Zc(this,function(b){if(b.wb)delete b.wb;else b.va(h)})}else{Zc(this,function(b){if(b.j())b.va(k);else b.wb=h});this.ja=this.$=k}this.ca()&&Nd(this.a,this.o(),a&&this.r)}};E.ca=function(){return this.Ra};E.wa=function(a){a!=this.Ra&&this.e&&Rd(this,a);this.Ra=a;this.$&&this.r&&Nd(this.a,this.o(),a)};var Sd=function(a,b){if(b=a.s(b))b.F(h);else a.l>-1&&a.s(a.l).F(k)};
-Y[y].F=function(a){Sd(this,bd(this,a))};var Vd=function(a){Zd(a,function(b,c){return(b+1)%c},$c(a)-1)},Wd=function(a){Zd(a,function(b,c){b--;return b<0?c-1:b},0)},Yd=function(a){Zd(a,function(b,c){return(b+1)%c},a.l)},Xd=function(a){Zd(a,function(b,c){b--;return b<0?c-1:b},a.l)},Zd=function(a,b,c){c=c<0?bd(a,a.h):c;var d=$c(a);c=b(c,d);for(var f=0;f<=d;){var g=a.s(c);if(g&&g.P()&&g.j()&&g.v&2){a.$a(c);return h}f++;c=b(c,d)}return k};Y[y].$a=function(a){Sd(this,a)};var Td=function(a,b){a.ja=b};var $d=function(){};L($d,rd);Ia($d);E=$d[y];E.p=function(){return"goog-tab"};E.ea=function(){return"tab"};E.n=function(a){var b=$d.d.n[B](this,a);(a=a.Sa())&&this.cb(b,a);return b};E.L=function(a,b){b=$d.d.L[B](this,a,b);var c=this.Sa(b);c&&be(a,c);if(a.m&8)if((c=a.k)&&J(c.da)){a.G(8,k);c.da(a)}return b};E.Sa=function(a){return a.title||""};E.cb=function(a,b){if(a)a.title=b||""};var ce=function(a,b,c){W[B](this,a,b||$d.T(),c);Hd(this,8,h);this.Na|=9};L(ce,W);ce[y].Sa=function(){return this.ub};ce[y].cb=function(a){this.kb().cb(this.c(),a);this.ub=a};var be=function(a,b){a.ub=b};qd("goog-tab",function(){return new ce(i)});var de=function(){};L(de,Md);Ia(de);de[y].p=function(){return"goog-tab-bar"};de[y].ea=function(){return"tablist"};de[y].bb=function(a,b,c){this.ob||ee(this);var d=this.ob[b];d?fe(a,d):de.d.bb[B](this,a,b,c)};de[y].na=function(a){var b=de.d.na[B](this,a);this.Ca||ge(this);b[m](this.Ca[a.Wb]);return b};var ge=function(a){var b=a.p();a.Ca=gb("top",b+"-top","bottom",b+"-bottom","start",b+"-start","end",b+"-end")},ee=function(a){a.Ca||ge(a);a.ob=db(a.Ca)};var Z=function(a,b,c){fe(this,a||"top");Y[B](this,this.W,b||de.T(),c);a=Xc(this);a.f(this,"select",this.Pb);a.f(this,"unselect",this.Qb);a.f(this,"disable",this.Nb);a.f(this,"hide",this.Ob)};L(Z,Y);Z[y].yb=h;Z[y].D=i;Z[y].g=function(){Z.d.g[B](this);this.D=i};Z[y].removeChild=function(a,b){he(this,a);return Z.d[Ba][B](this,a,b)};var fe=function(a,b){Pd(a,b=="start"||b=="end"?"vertical":"horizontal");a.Wb=b};Z[y].$a=function(a){Z.d.$a[B](this,a);this.yb&&ie(this,a)};
-Z[y].da=function(a){if(a)Dd(a,h);else this.D&&Dd(this.D,k)};var ie=function(a,b){a.da(a.s(b))},he=function(a,b){if(b&&b==a.D){for(var c=bd(a,b),d=c-1;b=a.s(d);d--)if(b.P()&&b.j()){a.da(b);return}for(c=c+1;b=a.s(c);c++)if(b.P()&&b.j()){a.da(b);return}a.da(i)}};E=Z[y];E.Pb=function(a){this.D&&this.D!=a[A]&&Dd(this.D,k);this.D=a[A]};E.Qb=function(a){if(a[A]==this.D)this.D=i};E.Nb=function(a){he(this,a[A])};E.Ob=function(a){he(this,a[A])};E.pa=function(){this.s(this.l)||this.F(this.D||this.s(0))};
-qd("goog-tab-bar",function(){return new Z});var $=function(a,b,c){this.A=Pb(a)||i;this.ib=b?Pb(b):i;this.i=c==h;if(this.A){this.A.tabIndex=0;S(this.A,"click",this.Zb,k,this);S(this.A,"keydown",this.$b,k,this)}this.Y(this.i)};L($,Nc);$[y].g=function(){this.A&&Hc(this.A);$.d.g[B](this)};$[y].Y=function(a){if(this.ib)this.ib[za].display=a?"":"none";if(this.A)if(a){Lb(this.A,"goog-zippy-collapsed");Kb(this.A,"goog-zippy-expanded")}else{Lb(this.A,"goog-zippy-expanded");Kb(this.A,"goog-zippy-collapsed")}this.i=a;this[s](new je("toggle",this,this.i))};
-$[y].$b=function(a){if(a[t]==13||a[t]==32){this.Y(!this.i);a[oa]();a[ya]()}};$[y].Zb=function(){this.Y(!this.i)};var je=function(a,b,c){P[B](this,a,b);this.cc=c};L(je,P);var le=function(a,b){this.db=[];a=Pb(a);a=Qb(da,"span","ae-zippy",a);for(var c=0,d;d=a[c];c++){for(var f=d[D][D][D][ua];f&&f[na]!=1;)f=f[ua];this.db[m](new $(d,f,k))}this.Cb=new ke(this.db,Pb(b))};le[y].Db=function(){return this.Cb};le[y].Eb=function(){return this.db};
-var ke=function(a,b){this.za=a;if(this.za[x]){a=0;for(var c;c=this.za[a];a++)S(c,"toggle",this.bc,k,this)}this.Za=0;this.i=k;a="ae-toggle ae-plus ae-action";this.za[x]||(a+=" ae-disabled");this.S=Ub("span",{className:a},"Expand All");S(this.S,"click",this.Ab,k,this);b[ka](this.S)};ke[y].Ab=function(){this.za[x]&&this.Y(!this.i)};ke[y].bc=function(a){a=a.currentTarget;if(a.i)this.Za+=1;else this.Za-=1;if(a.i!=this.i)if(a.i){this.i=h;me(this,h)}else if(this.Za==0){this.i=k;me(this,k)}};
-ke[y].Y=function(a){this.i=a;a=0;for(var b;b=this.za[a];a++)b.i!=this.i&&b.Y(this.i);me(this)};
-var me=function(a,b){if(b!==ba?b:a.i){Lb(a.S,"ae-plus");Kb(a.S,"ae-minus");Yb(a.S,"Collapse All")}else{Lb(a.S,"ae-minus");Kb(a.S,"ae-plus");Yb(a.S,"Expand All")}},ne=function(a){this.ac=a;this.tb={};var b,c=Ub("div",{},b=Ub("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),Ub("div",{className:"goog-tab-bar-clear"}),a=Ub("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Z;d.L(b);S(d,"select",this.fb,k,this);S(d,"unselect",this.fb,k,this);b=
-0;for(var f;f=this.ac[b];b++)if(f=Pb("ae-stats-details-"+f)){var g=Qb(da,"h2",i,f)[0],j;j=void 0;if(N&&"innerText"in g)j=g.innerText[n](/(\r\n|\r|\n)/g,"\n");else{j=[];cc(g,j,h);j=j[Da]("")}j=j[n](/\xAD/g,"");j=j[n](/ +/g," ");if(j!=" ")j=j[n](/^\s*/,"");j=j;Wb(g);g=new ce(j);this.tb[K(g)]=f;d.Aa(g,h);a[ka](f);b==0?d.da(g):Pc(f,k)}Pb("bd")[ka](c)};ne[y].fb=function(a){var b=this.tb[K(a[A])];Pc(b,a[w]=="select")};Fa("ae.Stats.Details.Tabs",ne,void 0);Fa("goog.ui.Zippy",$,void 0);$[y].setExpanded=$[y].Y;
-Fa("ae.Stats.MakeZippys",le,void 0);le[y].getExpandCollapse=le[y].Db;le[y].getZippys=le[y].Eb;ke[y].setExpanded=ke[y].Y;})();
+/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){function f(a){throw a;}var h=true,i=null,k=false,aa=Object,l=Error,ba=undefined,ca=parseInt,da=document,ea=Math;function fa(a,b){return a.currentTarget=b}function ga(a,b){return a.keyCode=b}function ha(a,b){return a.type=b}function ia(a,b){return a.length=b}function ja(a,b){return a.className=b}function ka(a,b){return a.target=b}
+var la="appendChild",m="push",ma="slice",n="replace",na="nodeType",oa="preventDefault",p="indexOf",r="dispatchEvent",pa="capture",qa="nodeName",t="write",ra="charCode",u="keyCode",v="firstChild",sa="setAttribute",ta="handleEvent",w="type",ua="nextSibling",va="setActive",wa="toString",y="length",xa="propertyIsEnumerable",z="prototype",ya="ctrlKey",A="split",za="stopPropagation",Aa="style",Ba="body",Ca="removeChild",B="target",C="call",D="apply",Da="navigator",E="parentNode",Ea="join",Fa="nodeValue",
+F,G=this,Ga=function(a,b,c){a=a[A](".");c=c||G;!(a[0]in c)&&c.execScript&&c.execScript("var "+a[0]);for(var d;a[y]&&(d=a.shift());)if(!a[y]&&b!==ba)c[d]=b;else c=c[d]?c[d]:(c[d]={})},Ha=function(a,b){a=a[A](".");b=b||G;for(var c;c=a.shift();)if(b[c])b=b[c];else return i;return b},Ia=function(){},Ja=function(a){a.R=function(){return a.bc||(a.bc=new a)}},Ka=function(a){var b=typeof a;if(b=="object")if(a){if(a instanceof Array||!(a instanceof aa)&&aa[z][wa][C](a)=="[object Array]"||typeof a[y]=="number"&&
+typeof a.splice!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("splice"))return"array";if(!(a instanceof aa)&&(aa[z][wa][C](a)=="[object Function]"||typeof a[C]!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("call")))return"function"}else return"null";else if(b=="function"&&typeof a[C]=="undefined")return"object";return b},La=function(a){return Ka(a)=="array"},Ma=function(a){var b=Ka(a);return b=="array"||b=="object"&&typeof a[y]=="number"},H=function(a){return typeof a=="string"},K=function(a){return Ka(a)==
+"function"},Na=function(a){a=Ka(a);return a=="object"||a=="array"||a=="function"},Qa=function(a){if(a.hasOwnProperty&&a.hasOwnProperty(Oa))return a[Oa];a[Oa]||(a[Oa]=++Pa);return a[Oa]},Oa="closure_uid_"+ea.floor(ea.random()*2147483648)[wa](36),Pa=0,Ra=function(a){var b=Ka(a);if(b=="object"||b=="array"){if(a.Jb)return a.Jb[C](a);b=b=="array"?[]:{};for(var c in a)b[c]=Ra(a[c]);return b}return a},Sa=function(a){var b=Array[z][ma][C](arguments,1);return function(){var c=Array[z][ma][C](arguments);c.unshift[D](c,
+b);return a[D](this,c)}},L=function(a,b){function c(){}c.prototype=b[z];a.d=b[z];a.prototype=new c;a[z].constructor=a};var M=Array[z],Ta=M[p]?function(a,b,c){return M[p][C](a,b,c)}:function(a,b,c){c=c==i?0:c<0?ea.max(0,a[y]+c):c;if(H(a)){if(!H(b)||b[y]!=1)return-1;return a[p](b,c)}for(c=c;c<a[y];c++)if(c in a&&a[c]===b)return c;return-1},Ua=M.forEach?function(a,b,c){M.forEach[C](a,b,c)}:function(a,b,c){for(var d=a[y],e=H(a)?a[A](""):a,g=0;g<d;g++)g in e&&b[C](c,e[g],g,a)},Va=M.every?function(a,b,c){return M.every[C](a,b,c)}:function(a,b,c){for(var d=a[y],e=H(a)?a[A](""):a,g=0;g<d;g++)if(g in e&&!b[C](c,e[g],g,a))return k;
+return h},Wa=function(a,b){return Ta(a,b)>=0},Xa=function(a,b){b=Ta(a,b);var c;if(c=b>=0)M.splice[C](a,b,1)[y]==1;return c},Ya=function(){return M.concat[D](M,arguments)},Za=function(a){if(La(a))return Ya(a);else{for(var b=[],c=0,d=a[y];c<d;c++)b[c]=a[c];return b}},ab=function(a){return M.splice[D](a,$a(arguments,1))},$a=function(a,b,c){return arguments[y]<=2?M[ma][C](a,b):M[ma][C](a,b,c)};var bb=function(a,b,c){for(var d in a)b[C](c,a[d],d,a)},cb=function(a,b){var c;if(c=b in a)delete a[b];return c},db=function(a,b,c){if(b in a)f(l('The object already contains the key "'+b+'"'));a[b]=c},eb=function(a,b,c){if(b in a)return a[b];return c},fb=function(a){var b={};for(var c in a)b[a[c]]=c;return b},gb=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"],hb=function(a){for(var b,c,d=1;d<arguments[y];d++){c=arguments[d];for(b in c)a[b]=
+c[b];for(var e=0;e<gb[y];e++){b=gb[e];if(aa[z].hasOwnProperty[C](c,b))a[b]=c[b]}}},ib=function(){var a=arguments[y];if(a==1&&La(arguments[0]))return ib[D](i,arguments[0]);if(a%2)f(l("Uneven number of arguments"));for(var b={},c=0;c<a;c+=2)b[arguments[c]]=arguments[c+1];return b};var jb=function(a){return a[n](/^[\s\xa0]+|[\s\xa0]+$/g,"")},pb=function(a,b){if(b)return a[n](kb,"&amp;")[n](lb,"&lt;")[n](mb,"&gt;")[n](nb,"&quot;");else{if(!ob.test(a))return a;if(a[p]("&")!=-1)a=a[n](kb,"&amp;");if(a[p]("<")!=-1)a=a[n](lb,"&lt;");if(a[p](">")!=-1)a=a[n](mb,"&gt;");if(a[p]('"')!=-1)a=a[n](nb,"&quot;");return a}},kb=/&/g,lb=/</g,mb=/>/g,nb=/\"/g,ob=/[&<>\"]/,rb=function(a,b){var c=0;a=jb(String(a))[A](".");b=jb(String(b))[A](".");for(var d=ea.max(a[y],b[y]),e=0;c==0&&e<d;e++){var g=
+a[e]||"",j=b[e]||"",o=new RegExp("(\\d*)(\\D*)","g"),q=new RegExp("(\\d*)(\\D*)","g");do{var s=o.exec(g)||["","",""],x=q.exec(j)||["","",""];if(s[0][y]==0&&x[0][y]==0)break;c=qb(s[1][y]==0?0:ca(s[1],10),x[1][y]==0?0:ca(x[1],10))||qb(s[2][y]==0,x[2][y]==0)||qb(s[2],x[2])}while(c==0)}return c},qb=function(a,b){if(a<b)return-1;else if(a>b)return 1;return 0};var sb,tb,ub,vb,wb=function(){return G[Da]?G[Da].userAgent:i};vb=ub=tb=sb=k;var xb;if(xb=wb()){var yb=G[Da];sb=xb[p]("Opera")==0;tb=!sb&&xb[p]("MSIE")!=-1;ub=!sb&&xb[p]("WebKit")!=-1;vb=!sb&&!ub&&yb.product=="Gecko"}var zb=sb,N=tb,O=vb,Ab=ub,Bb=G[Da],Cb=(Bb&&Bb.platform||"")[p]("Mac")!=-1,Db="",Eb;
+if(zb&&G.opera){var Fb=G.opera.version;Db=typeof Fb=="function"?Fb():Fb}else{if(O)Eb=/rv\:([^\);]+)(\)|;)/;else if(N)Eb=/MSIE\s+([^\);]+)(\)|;)/;else if(Ab)Eb=/WebKit\/(\S+)/;if(Eb){var Gb=Eb.exec(wb());Db=Gb?Gb[1]:""}}var Hb=Db,Ib={},Jb=function(a){return Ib[a]||(Ib[a]=rb(Hb,a)>=0)};var Kb,Lb=function(a){return(a=a.className)&&typeof a[A]=="function"?a[A](/\s+/):[]},Mb=function(a){var b=Lb(a),c;c=$a(arguments,1);for(var d=0,e=0;e<c[y];e++)if(!Wa(b,c[e])){b[m](c[e]);d++}c=d==c[y];ja(a,b[Ea](" "));return c},Nb=function(a){var b=Lb(a),c;c=$a(arguments,1);for(var d=0,e=0;e<b[y];e++)if(Wa(c,b[e])){ab(b,e--,1);d++}c=d==c[y];ja(a,b[Ea](" "));return c};var Qb=function(a){return a?new Ob(Pb(a)):Kb||(Kb=new Ob)},Rb=function(a){return H(a)?da.getElementById(a):a},Sb=function(a,b,c,d){d=d||a;b=b&&b!="*"?b.toUpperCase():"";if(d.querySelectorAll&&(b||c)&&(!Ab||a.compatMode=="CSS1Compat"||Jb("528")))return d.querySelectorAll(b+(c?"."+c:""));if(c&&d.getElementsByClassName){a=d.getElementsByClassName(c);if(b){d={};for(var e=0,g=0,j;j=a[g];g++)if(b==j[qa])d[e++]=j;ia(d,e);return d}else return a}a=d.getElementsByTagName(b||"*");if(c){d={};for(g=e=0;j=a[g];g++){b=
+j.className;if(typeof b[A]=="function"&&Wa(b[A](/\s+/),c))d[e++]=j}ia(d,e);return d}else return a},Ub=function(a,b){bb(b,function(c,d){if(d=="style")a[Aa].cssText=c;else if(d=="class")ja(a,c);else if(d=="for")a.htmlFor=c;else if(d in Tb)a[sa](Tb[d],c);else a[d]=c})},Tb={cellpadding:"cellPadding",cellspacing:"cellSpacing",colspan:"colSpan",rowspan:"rowSpan",valign:"vAlign",height:"height",width:"width",usemap:"useMap",frameborder:"frameBorder",type:"type"},Wb=function(){return Vb(da,arguments)},Vb=
+function(a,b){var c=b[0],d=b[1];if(N&&d&&(d.name||d[w])){c=["<",c];d.name&&c[m](' name="',pb(d.name),'"');if(d[w]){c[m](' type="',pb(d[w]),'"');d=Ra(d);delete d[w]}c[m](">");c=c[Ea]("")}var e=a.createElement(c);if(d)if(H(d))ja(e,d);else Ub(e,d);if(b[y]>2){d=function(j){if(j)e[la](H(j)?a.createTextNode(j):j)};for(c=2;c<b[y];c++){var g=b[c];Ma(g)&&!(Na(g)&&g[na]>0)?Ua(Xb(g)?Za(g):g,d):d(g)}}return e},Yb=function(a){return a&&a[E]?a[E][Ca](a):i},Zb=function(a,b){if(a.contains&&b[na]==1)return a==b||
+a.contains(b);if(typeof a.compareDocumentPosition!="undefined")return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b[E];return b==a},Pb=function(a){return a[na]==9?a:a.ownerDocument||a.document},$b=function(a,b){if("textContent"in a)a.textContent=b;else if(a[v]&&a[v][na]==3){for(;a.lastChild!=a[v];)a[Ca](a.lastChild);a[v].data=b}else{for(var c;c=a[v];)a[Ca](c);a[la](Pb(a).createTextNode(b))}},ac={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},bc={IMG:" ",BR:"\n"},cc=function(a){var b=
+a.getAttributeNode("tabindex");if(b&&b.specified){a=a.tabIndex;return typeof a=="number"&&a>=0}return k},dc=function(a,b){if(b)a.tabIndex=0;else a.removeAttribute("tabIndex")},ec=function(a,b,c){if(!(a[qa]in ac))if(a[na]==3)c?b[m](String(a[Fa])[n](/(\r\n|\r|\n)/g,"")):b[m](a[Fa]);else if(a[qa]in bc)b[m](bc[a[qa]]);else for(a=a[v];a;){ec(a,b,c);a=a[ua]}},Xb=function(a){if(a&&typeof a[y]=="number")if(Na(a))return typeof a.item=="function"||typeof a.item=="string";else if(K(a))return typeof a.item==
+"function";return k},Ob=function(a){this.G=a||G.document||da};F=Ob[z];F.Fa=Qb;F.c=function(a){return H(a)?this.G.getElementById(a):a};F.m=function(){return Vb(this.G,arguments)};F.createElement=function(a){return this.G.createElement(a)};F.createTextNode=function(a){return this.G.createTextNode(a)};F.appendChild=function(a,b){a[la](b)};F.contains=Zb;var fc=function(){};fc[z].Sa=k;fc[z].K=function(){if(!this.Sa){this.Sa=h;this.f()}};fc[z].f=function(){};var gc=function(a,b){ha(this,a);ka(this,b);fa(this,this[B])};L(gc,fc);F=gc[z];F.f=function(){delete this[w];delete this[B];delete this.currentTarget};F.V=k;F.ka=h;F.stopPropagation=function(){this.V=h};F.preventDefault=function(){this.ka=k};var hc=function(a,b){a&&this.sa(a,b)};L(hc,gc);var ic=[1,4,2];F=hc[z];ka(F,i);F.relatedTarget=i;F.offsetX=0;F.offsetY=0;F.clientX=0;F.clientY=0;F.screenX=0;F.screenY=0;F.button=0;ga(F,0);F.charCode=0;F.ctrlKey=k;F.altKey=k;F.shiftKey=k;F.metaKey=k;F.ic=k;F.M=i;
+F.sa=function(a,b){var c=ha(this,a[w]);ka(this,a[B]||a.srcElement);fa(this,b);if(b=a.relatedTarget){if(O)try{b=b[qa]&&b}catch(d){b=i}}else if(c=="mouseover")b=a.fromElement;else if(c=="mouseout")b=a.toElement;this.relatedTarget=b;this.offsetX=a.offsetX!==ba?a.offsetX:a.layerX;this.offsetY=a.offsetY!==ba?a.offsetY:a.layerY;this.clientX=a.clientX!==ba?a.clientX:a.pageX;this.clientY=a.clientY!==ba?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;ga(this,a[u]||
+0);this.charCode=a[ra]||(c=="keypress"?a[u]:0);this.ctrlKey=a[ya];this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.ic=Cb?a.metaKey:a[ya];this.M=a;delete this.ka;delete this.V};var jc=function(a,b){return N?a[w]=="click"?b==0:!!(a.M.button&ic[b]):a.M.button==b};hc[z].stopPropagation=function(){this.V=h;if(this.M[za])this.M[za]();else this.M.cancelBubble=h};var kc=N&&!Jb("8");
+hc[z].preventDefault=function(){this.ka=k;var a=this.M;if(a[oa])a[oa]();else{a.returnValue=k;if(kc)try{if(a[ya]||a[u]>=112&&a[u]<=123)ga(a,-1)}catch(b){}}};hc[z].f=function(){hc.d.f[C](this);this.M=i;ka(this,i);fa(this,i);this.relatedTarget=i};var P=function(a,b){this.wb=b;this.$=[];if(a>this.wb)f(l("[goog.structs.SimplePool] Initial cannot be greater than max"));for(b=0;b<a;b++)this.$[m](this.I?this.I():{})};L(P,fc);P[z].I=i;P[z].ob=i;var lc=function(a){if(a.$[y])return a.$.pop();return a.I?a.I():{}},nc=function(a,b){a.$[y]<a.wb?a.$[m](b):mc(a,b)},mc=function(a,b){if(a.ob)a.ob(b);else if(K(b.K))b.K();else for(var c in b)delete b[c]};P[z].f=function(){P.d.f[C](this);for(var a=this.$;a[y];)mc(this,a.pop());delete this.$};var oc;var pc=(oc="ScriptEngine"in G&&G.ScriptEngine()=="JScript")?G.ScriptEngineMajorVersion()+"."+G.ScriptEngineMinorVersion()+"."+G.ScriptEngineBuildVersion():"0";var qc=function(){},rc=0;F=qc[z];F.key=0;F.ja=k;F.lb=k;F.sa=function(a,b,c,d,e,g){if(K(a))this.ub=h;else if(a&&a[ta]&&K(a[ta]))this.ub=k;else f(l("Invalid listener argument"));this.ha=a;this.yb=b;this.src=c;ha(this,d);this.capture=!!e;this.Ha=g;this.lb=k;this.key=++rc;this.ja=k};F.handleEvent=function(a){if(this.ub)return this.ha[C](this.Ha||this.src,a);return this.ha[ta][C](this.ha,a)};var sc,tc,uc,vc,wc,xc,yc,zc,Ac,Bc,Cc;
+(function(){function a(){return{F:0,A:0}}function b(){return[]}function c(){var I=function(ce){return j[C](I.src,I.key,ce)};return I}function d(){return new qc}function e(){return new hc}var g=oc&&!(rb(pc,"5.7")>=0),j;xc=function(I){j=I};if(g){sc=function(){return lc(o)};tc=function(I){nc(o,I)};uc=function(){return lc(q)};vc=function(I){nc(q,I)};wc=function(){return lc(s)};yc=function(){nc(s,c())};zc=function(){return lc(x)};Ac=function(I){nc(x,I)};Bc=function(){return lc(J)};Cc=function(I){nc(J,
+I)};var o=new P(0,600);o.I=a;var q=new P(0,600);q.I=b;var s=new P(0,600);s.I=c;var x=new P(0,600);x.I=d;var J=new P(0,600);J.I=e}else{sc=a;tc=Ia;uc=b;vc=Ia;wc=c;yc=Ia;zc=d;Ac=Ia;Bc=e;Cc=Ia}})();var Dc={},Q={},Ec={},Fc={},R=function(a,b,c,d,e){if(b)if(La(b)){for(var g=0;g<b[y];g++)R(a,b[g],c,d,e);return i}else{d=!!d;var j=Q;b in j||(j[b]=sc());j=j[b];if(!(d in j)){j[d]=sc();j.F++}j=j[d];var o=Qa(a),q;j.A++;if(j[o]){q=j[o];for(g=0;g<q[y];g++){j=q[g];if(j.ha==c&&j.Ha==e){if(j.ja)break;return q[g].key}}}else{q=j[o]=uc();j.F++}g=wc();g.src=a;j=zc();j.sa(c,g,a,b,d,e);c=j.key;g.key=c;q[m](j);Dc[c]=j;Ec[o]||(Ec[o]=uc());Ec[o][m](j);if(a.addEventListener){if(a==G||!a.nb)a.addEventListener(b,g,d)}else a.attachEvent(Gc(b),
+g);return c}else f(l("Invalid event type"))},Hc=function(a,b,c,d,e){if(La(b)){for(var g=0;g<b[y];g++)Hc(a,b[g],c,d,e);return i}d=!!d;a=Ic(a,b,d);if(!a)return k;for(g=0;g<a[y];g++)if(a[g].ha==c&&a[g][pa]==d&&a[g].Ha==e)return Jc(a[g].key);return k},Jc=function(a){if(!Dc[a])return k;var b=Dc[a];if(b.ja)return k;var c=b.src,d=b[w],e=b.yb,g=b[pa];if(c.removeEventListener){if(c==G||!c.nb)c.removeEventListener(d,e,g)}else c.detachEvent&&c.detachEvent(Gc(d),e);c=Qa(c);e=Q[d][g][c];if(Ec[c]){var j=Ec[c];
+Xa(j,b);j[y]==0&&delete Ec[c]}b.ja=h;e.xb=h;Kc(d,g,c,e);delete Dc[a];return h},Kc=function(a,b,c,d){if(!d.Ka)if(d.xb){for(var e=0,g=0;e<d[y];e++)if(d[e].ja){var j=d[e].yb;j.src=i;yc(j);Ac(d[e])}else{if(e!=g)d[g]=d[e];g++}ia(d,g);d.xb=k;if(g==0){vc(d);delete Q[a][b][c];Q[a][b].F--;if(Q[a][b].F==0){tc(Q[a][b]);delete Q[a][b];Q[a].F--}if(Q[a].F==0){tc(Q[a]);delete Q[a]}}}},Lc=function(a,b,c){var d=0,e=a==i,g=b==i,j=c==i;c=!!c;if(e)bb(Ec,function(q){for(var s=q[y]-1;s>=0;s--){var x=q[s];if((g||b==x[w])&&
+(j||c==x[pa])){Jc(x.key);d++}}});else{a=Qa(a);if(Ec[a]){a=Ec[a];for(e=a[y]-1;e>=0;e--){var o=a[e];if((g||b==o[w])&&(j||c==o[pa])){Jc(o.key);d++}}}}return d},Ic=function(a,b,c){var d=Q;if(b in d){d=d[b];if(c in d){d=d[c];a=Qa(a);if(d[a])return d[a]}}return i},Gc=function(a){if(a in Fc)return Fc[a];return Fc[a]="on"+a},Nc=function(a,b,c,d,e){var g=1;b=Qa(b);if(a[b]){a.A--;a=a[b];if(a.Ka)a.Ka++;else a.Ka=1;try{for(var j=a[y],o=0;o<j;o++){var q=a[o];if(q&&!q.ja)g&=Mc(q,e)!==k}}finally{a.Ka--;Kc(c,d,b,
+a)}}return Boolean(g)},Mc=function(a,b){b=a[ta](b);a.lb&&Jc(a.key);return b};
+xc(function(a,b){if(!Dc[a])return h;a=Dc[a];var c=a[w],d=Q;if(!(c in d))return h;d=d[c];var e,g;if(N){e=b||Ha("window.event");b=h in d;var j=k in d;if(b){if(e[u]<0||e.returnValue!=ba)return h;a:{var o=k;if(e[u]==0)try{ga(e,-1);break a}catch(q){o=h}if(o||e.returnValue==ba)e.returnValue=h}}o=Bc();o.sa(e,this);e=h;try{if(b){for(var s=uc(),x=o.currentTarget;x;x=x[E])s[m](x);g=d[h];g.A=g.F;for(var J=s[y]-1;!o.V&&J>=0&&g.A;J--){fa(o,s[J]);e&=Nc(g,s[J],c,h,o)}if(j){g=d[k];g.A=g.F;for(J=0;!o.V&&J<s[y]&&g.A;J++){fa(o,
+s[J]);e&=Nc(g,s[J],c,k,o)}}}else e=Mc(a,o)}finally{if(s){ia(s,0);vc(s)}o.K();Cc(o)}return e}g=new hc(b,this);try{e=Mc(a,g)}finally{g.K()}return e});var Oc=function(a){this.sb=a};L(Oc,fc);
+var Pc=new P(0,100),S=function(a,b,c,d,e,g){if(La(c))for(var j=0;j<c[y];j++)S(a,b,c[j],d,e,g);else{b=R(b,c,d||a,e||k,g||a.sb||a);if(a.u)a.u[b]=h;else if(a.T){a.u=lc(Pc);a.u[a.T]=h;a.T=i;a.u[b]=h}else a.T=b}return a},Qc=function(a,b,c,d,e,g){if(a.T||a.u)if(La(c))for(var j=0;j<c[y];j++)Qc(a,b,c[j],d,e,g);else{a:{d=d||a;g=g||a.sb||a;e=!!(e||k);if(b=Ic(b,c,e))for(c=0;c<b[y];c++)if(b[c].ha==d&&b[c][pa]==e&&b[c].Ha==g){b=b[c];break a}b=i}if(b){b=b.key;Jc(b);if(a.u)cb(a.u,b);else if(a.T==b)a.T=i}}return a},
+Rc=function(a){if(a.u){for(var b in a.u){Jc(b);delete a.u[b]}nc(Pc,a.u);a.u=i}else a.T&&Jc(a.T)};Oc[z].f=function(){Oc.d.f[C](this);Rc(this)};Oc[z].handleEvent=function(){f(l("EventHandler.handleEvent not implemented"))};var Sc=function(){};L(Sc,fc);F=Sc[z];F.nb=h;F.La=i;F.hb=function(a){this.La=a};F.addEventListener=function(a,b,c,d){R(this,a,b,c,d)};F.removeEventListener=function(a,b,c,d){Hc(this,a,b,c,d)};
+F.dispatchEvent=function(a){a=a;if(H(a))a=new gc(a,this);else if(a instanceof gc)ka(a,a[B]||this);else{var b=a;a=new gc(a[w],this);hb(a,b)}b=1;var c,d=a[w],e=Q;if(d in e){e=e[d];d=h in e;var g;if(d){c=[];for(g=this;g;g=g.La)c[m](g);g=e[h];g.A=g.F;for(var j=c[y]-1;!a.V&&j>=0&&g.A;j--){fa(a,c[j]);b&=Nc(g,c[j],a[w],h,a)&&a.ka!=k}}if(k in e){g=e[k];g.A=g.F;if(d)for(j=0;!a.V&&j<c[y]&&g.A;j++){fa(a,c[j]);b&=Nc(g,c[j],a[w],k,a)&&a.ka!=k}else for(c=this;!a.V&&c&&g.A;c=c.La){fa(a,c);b&=Nc(g,c,a[w],k,a)&&a.ka!=
+k}}a=Boolean(b)}else a=h;return a};F.f=function(){Sc.d.f[C](this);Lc(this);this.La=i};var Tc=function(a,b){var c=Pb(a);if(c.defaultView&&c.defaultView.getComputedStyle)if(a=c.defaultView.getComputedStyle(a,""))return a[b];return i},Uc=function(a,b){a[Aa].display=b?"":"none"},Vc=O?"MozUserSelect":Ab?"WebkitUserSelect":i,Wc=function(a,b,c){c=!c?a.getElementsByTagName("*"):i;if(Vc){b=b?"none":"";a[Aa][Vc]=b;if(c){a=0;for(var d;d=c[a];a++)d[Aa][Vc]=b}}else if(N||zb){b=b?"on":"";a[sa]("unselectable",b);if(c)for(a=0;d=c[a];a++)d[sa]("unselectable",b)}};var Xc=function(){};Ja(Xc);Xc[z].fc=0;Xc.R();var T=function(a){this.w=a||Qb();this.ua=Yc};L(T,Sc);T[z].ac=Xc.R();var Yc=i,Zc=function(a,b){switch(a){case 1:return b?"disable":"enable";case 2:return b?"highlight":"unhighlight";case 4:return b?"activate":"deactivate";case 8:return b?"select":"unselect";case 16:return b?"check":"uncheck";case 32:return b?"focus":"blur";case 64:return b?"open":"close";default:}f(l("Invalid component state"))};F=T[z];F.aa=i;F.w=i;F.e=k;F.b=i;F.ua=i;F.ec=i;F.h=i;F.p=i;F.r=i;F.Cb=k;
+var $c=function(a){return a.aa||(a.aa=":"+(a.ac.fc++)[wa](36))},ad=function(a,b){if(a.h&&a.h.r){cb(a.h.r,a.aa);db(a.h.r,b,a)}a.aa=b};T[z].c=function(){return this.b};var bd=function(a){return a.ea||(a.ea=new Oc(a))},cd=function(a,b){if(a==b)f(l("Unable to set parent component"));if(b&&a.h&&a.aa&&(a.h.r&&a.aa?eb(a.h.r,a.aa)||i:i)&&a.h!=b)f(l("Unable to set parent component"));a.h=b;T.d.hb[C](a,b)};F=T[z];F.hb=function(a){if(this.h&&this.h!=a)f(l("Method not supported"));T.d.hb[C](this,a)};F.Fa=function(){return this.w};
+F.m=function(){this.b=this.w.createElement("div")};F.J=function(a){if(this.e)f(l("Component already rendered"));else if(a&&this.X(a)){this.Cb=h;if(!this.w||this.w.G!=Pb(a))this.w=Qb(a);this.Ra(a);this.H()}else f(l("Invalid element to decorate"))};F.X=function(){return h};F.Ra=function(a){this.b=a};F.H=function(){this.e=h;dd(this,function(a){!a.e&&a.c()&&a.H()})};F.Z=function(){dd(this,function(a){a.e&&a.Z()});this.ea&&Rc(this.ea);this.e=k};
+F.f=function(){T.d.f[C](this);this.e&&this.Z();if(this.ea){this.ea.K();delete this.ea}dd(this,function(a){a.K()});!this.Cb&&this.b&&Yb(this.b);this.h=this.ec=this.b=this.r=this.p=i};F.Aa=function(a,b){this.Pa(a,ed(this),b)};
+F.Pa=function(a,b,c){if(a.e&&(c||!this.e))f(l("Component already rendered"));if(b<0||b>ed(this))f(l("Child component index out of bounds"));if(!this.r||!this.p){this.r={};this.p=[]}if(a.h==this){this.r[$c(a)]=a;Xa(this.p,a)}else db(this.r,$c(a),a);cd(a,this);ab(this.p,b,0,a);if(a.e&&this.e&&a.h==this){c=this.N();c.insertBefore(a.c(),c.childNodes[b]||i)}else if(c){this.b||this.m();c=U(this,b+1);b=this.N();c=c?c.b:i;if(a.e)f(l("Component already rendered"));a.b||a.m();b?b.insertBefore(a.b,c||i):a.w.G[Ba][la](a.b);
+if(!a.h||a.h.e)a.H()}else this.e&&!a.e&&a.b&&a.H()};F.N=function(){return this.b};var fd=function(a){if(a.ua==i)a.ua="rtl"==(Tc(a.e?a.b:a.w.G[Ba],"direction")||((a.e?a.b:a.w.G[Ba]).currentStyle?(a.e?a.b:a.w.G[Ba]).currentStyle.direction:i)||(a.e?a.b:a.w.G[Ba])[Aa].direction);return a.ua};T[z].xa=function(a){if(this.e)f(l("Component already rendered"));this.ua=a};
+var ed=function(a){return a.p?a.p[y]:0},U=function(a,b){return a.p?a.p[b]||i:i},dd=function(a,b,c){a.p&&Ua(a.p,b,c)},gd=function(a,b){return a.p&&b?Ta(a.p,b):-1};T[z].removeChild=function(a,b){if(a){var c=H(a)?a:$c(a);a=this.r&&c?eb(this.r,c)||i:i;if(c&&a){cb(this.r,c);Xa(this.p,a);if(b){a.Z();a.b&&Yb(a.b)}cd(a,i)}}if(!a)f(l("Child is not in parent component"));return a};var hd=function(a,b){if(O){a[sa]("role",b);a.mc=b}},id=function(a,b,c){O&&a[sa]("aria-"+b,c)};var kd=function(a,b,c,d,e){if(!N&&!(Ab&&Jb("525")))return h;if(Cb&&e)return jd(a);if(e&&!d)return k;if(N&&!c&&(b==17||b==18))return k;if(N&&d&&b==a)return k;switch(a){case 13:return h;case 27:return!Ab}return jd(a)},jd=function(a){if(a>=48&&a<=57)return h;if(a>=96&&a<=106)return h;if(a>=65&&a<=90)return h;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return h;default:return k}};var V=function(a){a&&ld(this,a)};L(V,Sc);F=V[z];F.b=i;F.Ia=i;F.cb=i;F.Ja=i;F.ta=-1;F.fa=-1;
+var md={"3":13,"12":144,"63232":38,"63233":40,"63234":37,"63235":39,"63236":112,"63237":113,"63238":114,"63239":115,"63240":116,"63241":117,"63242":118,"63243":119,"63244":120,"63245":121,"63246":122,"63247":123,"63248":44,"63272":46,"63273":36,"63275":35,"63276":33,"63277":34,"63289":144,"63302":45},nd={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},od={61:187,
+59:186},pd=N||Ab&&Jb("525");V[z].Tb=function(a){if(pd&&!kd(a[u],this.ta,a.shiftKey,a[ya],a.altKey))this[ta](a);else this.fa=O&&a[u]in od?od[a[u]]:a[u]};V[z].Ub=function(){this.fa=this.ta=-1};
+V[z].handleEvent=function(a){var b=a.M,c,d;if(N&&a[w]=="keypress"){c=this.fa;d=c!=13&&c!=27?b[u]:0}else if(Ab&&a[w]=="keypress"){c=this.fa;d=b[ra]>=0&&b[ra]<63232&&jd(c)?b[ra]:0}else if(zb){c=this.fa;d=jd(c)?b[u]:0}else{c=b[u]||this.fa;d=b[ra]||0;if(Cb&&d==63&&!c)c=191}var e=c,g=b.keyIdentifier;if(c)if(c>=63232&&c in md)e=md[c];else{if(c==25&&a.shiftKey)e=9}else if(g&&g in nd)e=nd[g];a=e==this.ta;this.ta=e;b=new qd(e,d,a,b);try{this[r](b)}finally{b.K()}};V[z].c=function(){return this.b};
+var ld=function(a,b){a.Ja&&a.detach();a.b=b;a.Ia=R(a.b,"keypress",a);a.cb=R(a.b,"keydown",a.Tb,k,a);a.Ja=R(a.b,"keyup",a.Ub,k,a)};V[z].detach=function(){if(this.Ia){Jc(this.Ia);Jc(this.cb);Jc(this.Ja);this.Ja=this.cb=this.Ia=i}this.b=i;this.ta=-1};V[z].f=function(){V.d.f[C](this);this.detach()};var qd=function(a,b,c,d){d&&this.sa(d,void 0);ha(this,"key");ga(this,a);this.charCode=b;this.repeat=c};L(qd,hc);var sd=function(a){for(var b;a;){b=Qa(a);if(b=rd[b])break;a=a.d?a.d.constructor:i}if(b)return K(b.R)?b.R():new b;return i},ud=function(a,b){if(!a)f(l("Invalid class name "+a));if(!K(b))f(l("Invalid decorator function "+b));td[a]=b},rd={},td={};var vd=function(){},wd;Ja(vd);F=vd[z];F.da=function(){};F.m=function(a){return a.Fa().m("div",this.na(a)[Ea](" "),a.Ea)};F.N=function(a){return a};F.ma=function(a,b,c){if(a=a.c?a.c():a)if(N&&!Jb("7")){var d=xd(this,Lb(a),b);d[m](b);Sa(c?Mb:Nb,a)[D](i,d)}else c?Mb(a,b):Nb(a,b)};F.X=function(){return h};
+F.J=function(a,b){b.id&&ad(a,b.id);var c=this.N(b);c&&c[v]?yd(a,c[v][ua]?Za(c.childNodes):c[v]):yd(a,i);var d=0,e=this.o(),g=this.o(),j=k,o=k;c=k;var q=Lb(b);Ua(q,function(x){if(!j&&x==e){j=h;if(g==e)o=h}else if(!o&&x==g)o=h;else d|=zd(this,x)},this);a.l=d;if(!j){q[m](e);if(g==e)o=h}o||q[m](g);(a=a.z)&&q[m][D](q,a);if(N&&!Jb("7")){var s=xd(this,q);if(s[y]>0){q[m][D](q,s);c=h}}if(!j||!o||a||c)ja(b,q[Ea](" "));return b};F.bb=function(a){fd(a)&&this.xa(a.c(),h);a.j()&&this.wa(a,a.O())};
+F.Ma=function(a,b){Wc(a,!b,!N&&!zb)};F.xa=function(a,b){this.ma(a,this.o()+"-rtl",b)};F.ba=function(a){var b;if(a.v&32&&(b=a.n()))return cc(b);return k};F.wa=function(a,b){var c;if(a.v&32&&(c=a.n())){if(!b&&a.l&32){try{c.blur()}catch(d){}a.l&32&&a.oa(i)}cc(c)!=b&&dc(c,b)}};F.ya=function(a,b){Uc(a,b)};F.D=function(a,b,c){var d=a.c();if(d){var e=Ad(this,b);e&&this.ma(a,e,c);if(O){wd||(wd=ib(1,"disabled",4,"pressed",8,"selected",16,"checked",64,"expanded"));(a=wd[b])&&id(d,a,c)}}};F.n=function(a){return a.c()};
+F.o=function(){return"goog-control"};F.na=function(a){var b=this.o(),c=[b],d=this.o();d!=b&&c[m](d);b=a.l;for(d=[];b;){var e=b&-b;d[m](Ad(this,e));b&=~e}c[m][D](c,d);(a=a.z)&&c[m][D](c,a);N&&!Jb("7")&&c[m][D](c,xd(this,c));return c};
+var xd=function(a,b,c){var d=[];if(c)b=b.concat([c]);Ua([],function(e){if(Va(e,Sa(Wa,b))&&(!c||Wa(e,c)))d[m](e[Ea]("_"))});return d},Ad=function(a,b){a.Da||Bd(a);return a.Da[b]},zd=function(a,b){a.zb||Cd(a);a=ca(a.zb[b],10);return isNaN(a)?0:a},Bd=function(a){var b=a.o();a.Da=ib(1,b+"-disabled",2,b+"-hover",4,b+"-active",8,b+"-selected",16,b+"-checked",32,b+"-focused",64,b+"-open")},Cd=function(a){a.Da||Bd(a);a.zb=fb(a.Da)};var W=function(a,b,c){T[C](this,c);this.a=b||sd(this.constructor);this.Ea=a};L(W,T);F=W[z];F.Ea=i;F.l=0;F.v=39;F.Hb=255;F.Na=0;F.q=h;F.z=i;F.ra=h;F.Ba=k;F.n=function(){return this.a.n(this)};F.Ga=function(){return this.t||(this.t=new V)};F.qb=function(){return this.a};var Dd=function(a,b){if(b){if(a.z)Wa(a.z,b)||a.z[m](b);else a.z=[b];a.a.ma(a,b,h)}},Ed=function(a,b){if(b&&a.z){Xa(a.z,b);if(a.z[y]==0)a.z=i;a.a.ma(a,b,k)}};F=W[z];F.ma=function(a,b){b?Dd(this,a):Ed(this,a)};
+F.m=function(){var a=this.a.m(this);this.b=a;if(O){var b=this.a.da();b&&hd(a,b)}this.Ba||this.a.Ma(a,k);this.O()||this.a.ya(a,k)};F.N=function(){return this.a.N(this.c())};F.X=function(a){return this.a.X(a)};F.Ra=function(a){this.b=a=this.a.J(this,a);if(O){var b=this.a.da();b&&hd(a,b)}this.Ba||this.a.Ma(a,k);this.q=a[Aa].display!="none"};
+F.H=function(){W.d.H[C](this);this.a.bb(this);if(this.v&-2){this.ra&&Fd(this,h);if(this.v&32){var a=this.n();if(a){var b=this.Ga();ld(b,a);S(S(S(bd(this),b,"key",this.S),a,"focus",this.pa),a,"blur",this.oa)}}}};var Fd=function(a,b){var c=bd(a),d=a.c();if(b){S(S(S(S(c,d,"mouseover",a.Za),d,"mousedown",a.qa),d,"mouseup",a.$a),d,"mouseout",a.Ya);N&&S(c,d,"dblclick",a.rb)}else{Qc(Qc(Qc(Qc(c,d,"mouseover",a.Za),d,"mousedown",a.qa),d,"mouseup",a.$a),d,"mouseout",a.Ya);N&&Qc(c,d,"dblclick",a.rb)}};
+W[z].Z=function(){W.d.Z[C](this);this.t&&this.t.detach();this.O()&&this.j()&&this.a.wa(this,k)};W[z].f=function(){W.d.f[C](this);if(this.t){this.t.K();delete this.t}delete this.a;this.z=this.Ea=i};var yd=function(a,b){a.Ea=b};F=W[z];F.xa=function(a){W.d.xa[C](this,a);var b=this.c();b&&this.a.xa(b,a)};F.Ma=function(a){this.Ba=a;var b=this.c();b&&this.a.Ma(b,a)};F.O=function(){return this.q};
+F.ya=function(a,b){if(b||this.q!=a&&this[r](a?"show":"hide")){(b=this.c())&&this.a.ya(b,a);this.j()&&this.a.wa(this,a);this.q=a;return h}return k};F.j=function(){return!!!(this.l&1)};F.va=function(a){var b=this.h;if(!(b&&typeof b.j=="function"&&!b.j())&&Gd(this,1,!a)){if(!a){this[va](k);this.C(k)}this.O()&&this.a.wa(this,a);this.D(1,!a)}};F.C=function(a){Gd(this,2,a)&&this.D(2,a)};F.setActive=function(a){Gd(this,4,a)&&this.D(4,a)};
+var Hd=function(a,b){Gd(a,8,b)&&a.D(8,b)},Id=function(a,b){Gd(a,16,b)&&a.D(16,b)},Jd=function(a,b){Gd(a,32,b)&&a.D(32,b)},Kd=function(a,b){Gd(a,64,b)&&a.D(64,b)};W[z].D=function(a,b){if(this.v&a&&b!=!!(this.l&a)){this.a.D(this,a,b);this.l=b?this.l|a:this.l&~a}};
+var Ld=function(a,b,c){if(a.e&&a.l&b&&!c)f(l("Component already rendered"));!c&&a.l&b&&a.D(b,k);a.v=c?a.v|b:a.v&~b},X=function(a,b){return!!(a.Hb&b)&&!!(a.v&b)},Gd=function(a,b,c){return!!(a.v&b)&&!!(a.l&b)!=c&&(!(a.Na&b)||a[r](Zc(b,c)))&&!a.Sa};W[z].Za=function(a){!Md(a,this.c())&&this[r]("enter")&&this.j()&&X(this,2)&&this.C(h)};W[z].Ya=function(a){if(!Md(a,this.c())&&this[r]("leave")){X(this,4)&&this[va](k);X(this,2)&&this.C(k)}};var Md=function(a,b){return!!a.relatedTarget&&Zb(b,a.relatedTarget)};
+W[z].qa=function(a){if(this.j()){X(this,2)&&this.C(h);if(jc(a,0)){X(this,4)&&this[va](h);this.a.ba(this)&&this.n().focus()}}!this.Ba&&jc(a,0)&&a[oa]()};W[z].$a=function(a){if(this.j()){X(this,2)&&this.C(h);this.l&4&&Nd(this,a)&&X(this,4)&&this[va](k)}};W[z].rb=function(a){this.j()&&Nd(this,a)};var Nd=function(a,b){X(a,16)&&Id(a,!!!(a.l&16));X(a,8)&&Hd(a,h);X(a,64)&&Kd(a,!!!(a.l&64));var c=new gc("action",a);if(b)for(var d=["altKey","ctrlKey","metaKey","shiftKey"],e,g=0;e=d[g];g++)c[e]=b[e];return a[r](c)};
+W[z].pa=function(){X(this,32)&&Jd(this,h)};W[z].oa=function(){X(this,4)&&this[va](k);X(this,32)&&Jd(this,k)};W[z].S=function(a){if(this.O()&&this.j()&&this.Xa(a)){a[oa]();a[za]();return h}return k};W[z].Xa=function(a){return a[u]==13&&Nd(this,a)};if(!K(W))f(l("Invalid component class "+W));if(!K(vd))f(l("Invalid renderer class "+vd));var Od=Qa(W);rd[Od]=vd;ud("goog-control",function(){return new W(i)});var Pd=function(){};L(Pd,vd);Ja(Pd);Pd[z].m=function(a){return a.Fa().m("div",this.o())};Pd[z].J=function(a,b){if(b.tagName=="HR"){var c=b;b=this.m(a);c[E]&&c[E].insertBefore(b,c);Yb(c)}else Mb(b,this.o());return b};Pd[z].o=function(){return"goog-menuseparator"};var Qd=function(a,b){W[C](this,i,a||Pd.R(),b);Ld(this,1,k);Ld(this,2,k);Ld(this,4,k);Ld(this,32,k);this.l=1};L(Qd,W);Qd[z].H=function(){Qd.d.H[C](this);hd(this.c(),"separator")};ud("goog-menuseparator",function(){return new Qd});var Rd=function(){};Ja(Rd);Rd[z].da=function(){};var Sd=function(a,b,c){if(b)b.tabIndex=c?0:-1};F=Rd[z];F.m=function(a){return a.Fa().m("div",this.na(a)[Ea](" "))};F.N=function(a){return a};F.X=function(a){return a.tagName=="DIV"};F.J=function(a,b){b.id&&ad(a,b.id);var c=this.o(),d=k,e=Lb(b);e&&Ua(e,function(g){if(g==c)d=h;else g&&this.ib(a,g,c)},this);d||Mb(b,c);Td(this,a,b);return b};
+F.ib=function(a,b,c){if(b==c+"-disabled")a.va(k);else if(b==c+"-horizontal")Ud(a,"horizontal");else b==c+"-vertical"&&Ud(a,"vertical")};var Td=function(a,b,c){if(c){a=c[v];for(var d;a&&a[E]==c;){d=a[ua];if(a[na]==1){var e;a:{e=void 0;for(var g=Lb(a),j=0,o=g[y];j<o;j++)if(e=g[j]in td?td[g[j]]():i){e=e;break a}e=i}if(e){e.b=a;b.j()||e.va(k);b.Aa(e);e.J(a)}}else if(!a[Fa]||jb(a[Fa])=="")c[Ca](a);a=d}}};Rd[z].bb=function(a){a=a.c();Wc(a,h,O);if(N)a.hideFocus=h;var b=this.da();b&&hd(a,b)};Rd[z].n=function(a){return a.c()};
+Rd[z].o=function(){return"goog-container"};Rd[z].na=function(a){var b=this.o(),c=[b,a.U=="horizontal"?b+"-horizontal":b+"-vertical"];a.j()||c[m](b+"-disabled");return c};var Y=function(a,b,c){T[C](this,c);this.a=b||Rd.R();this.U=a||"vertical"};L(Y,T);F=Y[z];F.cc=i;F.t=i;F.a=i;F.U=i;F.q=h;F.Y=h;F.Va=h;F.k=-1;F.g=i;F.ia=k;F.Fb=k;F.P=i;F.n=function(){return this.cc||this.a.n(this)};F.Ga=function(){return this.t||(this.t=new V(this.n()))};F.qb=function(){return this.a};F.m=function(){this.b=this.a.m(this)};F.N=function(){return this.a.N(this.c())};F.X=function(a){return this.a.X(a)};F.Ra=function(a){this.b=this.a.J(this,a);if(a[Aa].display=="none")this.q=k};
+F.H=function(){Y.d.H[C](this);dd(this,function(b){b.e&&Vd(this,b)},this);var a=this.c();this.a.bb(this);this.ya(this.q,h);S(S(S(S(S(S(S(S(bd(this),this,"enter",this.Rb),this,"highlight",this.Sb),this,"unhighlight",this.$b),this,"open",this.Vb),this,"close",this.Pb),a,"mousedown",this.qa),Pb(a),"mouseup",this.Qb),a,["mousedown","mouseup","mouseover","mouseout"],this.Ob);this.ba()&&Wd(this,h)};
+var Wd=function(a,b){var c=bd(a),d=a.n();b?S(S(S(c,d,"focus",a.pa),d,"blur",a.oa),a.Ga(),"key",a.S):Qc(Qc(Qc(c,d,"focus",a.pa),d,"blur",a.oa),a.Ga(),"key",a.S)};F=Y[z];F.Z=function(){Xd(this,-1);this.g&&Kd(this.g,k);this.ia=k;Y.d.Z[C](this)};F.f=function(){Y.d.f[C](this);if(this.t){this.t.K();this.t=i}this.a=this.g=this.P=i};F.Rb=function(){return h};
+F.Sb=function(a){var b=gd(this,a[B]);if(b>-1&&b!=this.k){var c=U(this,this.k);c&&c.C(k);this.k=b;c=U(this,this.k);this.ia&&c[va](h);if(this.g&&c!=this.g)c.v&64?Kd(c,h):Kd(this.g,k)}id(this.c(),"activedescendant",a[B].c().id)};F.$b=function(a){if(a[B]==U(this,this.k))this.k=-1;id(this.c(),"activedescendant","")};F.Vb=function(a){if((a=a[B])&&a!=this.g&&a.h==this){this.g&&Kd(this.g,k);this.g=a}};F.Pb=function(a){if(a[B]==this.g)this.g=i};
+F.qa=function(a){this.Y&&Yd(this,h);var b=this.n(),c;a:{if(b)if((c=b.getAttributeNode("tabindex"))&&c.specified){c=b.tabIndex;c=typeof c=="number"&&c>=0;break a}c=k}c?b.focus():a[oa]()};F.Qb=function(){this.ia=k};F.Ob=function(a){var b;a:{b=a[B];if(this.P)for(var c=this.c();b&&b[E]&&b!=c;){var d=b.id;if(d in this.P){b=this.P[d];break a}b=b[E]}b=i}if(b)switch(a[w]){case "mousedown":b.qa(a);break;case "mouseup":b.$a(a);break;case "mouseover":b.Za(a);break;case "mouseout":b.Ya(a);break}};F.pa=function(){};
+F.oa=function(){Xd(this,-1);this.ia=k;this.g&&Kd(this.g,k)};F.S=function(a){if(this.j()&&ed(this)!=0&&this.Xa(a)){a[oa]();a[za]();return h}return k};
+F.Xa=function(a){var b=U(this,this.k);if(b&&typeof b.S=="function"&&b.S(a))return h;if(this.g&&this.g!=b&&typeof this.g.S=="function"&&this.g.S(a))return h;switch(a[u]){case 27:if(this.ba())this.n().blur();else return k;break;case 36:Zd(this);break;case 35:$d(this);break;case 38:if(this.U=="vertical")be(this);else return k;break;case 37:if(this.U=="horizontal")fd(this)?de(this):be(this);else return k;break;case 40:if(this.U=="vertical")de(this);else return k;break;case 39:if(this.U=="horizontal")fd(this)?
+be(this):de(this);else return k;break;default:return k}return h};var Vd=function(a,b){var c=b.c();c=c.id||(c.id=$c(b));if(!a.P)a.P={};a.P[c]=b};Y[z].Aa=function(a,b){Y.d.Aa[C](this,a,b)};Y[z].Pa=function(a,b,c){a.Na|=2;a.Na|=64;if(this.ba()||!this.Fb)Ld(a,32,k);a.e&&k!=a.ra&&Fd(a,k);a.ra=k;Y.d.Pa[C](this,a,b,c);c&&this.e&&Vd(this,a);b<=this.k&&this.k++};
+Y[z].removeChild=function(a,b){var c=gd(this,a);if(c!=-1)if(c==this.k)a.C(k);else c<this.k&&this.k--;(c=a.c())&&c.id&&cb(this.P,c.id);b=a=Y.d[Ca][C](this,a,b);b.e&&h!=b.ra&&Fd(b,h);b.ra=h;return a};var Ud=function(a,b){if(a.c())f(l("Component already rendered"));a.U=b};F=Y[z];F.O=function(){return this.q};F.ya=function(a,b){if(b||this.q!=a&&this[r](a?"show":"hide")){this.q=a;var c=this.c();if(c){Uc(c,a);this.ba()&&Sd(this.a,this.n(),this.Y&&this.q);this.q&&!b&&this[r]("aftershow")}return h}return k};
+F.j=function(){return this.Y};F.va=function(a){if(this.Y!=a&&this[r](a?"enable":"disable")){if(a){this.Y=h;dd(this,function(b){if(b.Db)delete b.Db;else b.va(h)})}else{dd(this,function(b){if(b.j())b.va(k);else b.Db=h});this.ia=this.Y=k}this.ba()&&Sd(this.a,this.n(),a&&this.q)}};F.ba=function(){return this.Va};F.wa=function(a){a!=this.Va&&this.e&&Wd(this,a);this.Va=a;this.Y&&this.q&&Sd(this.a,this.n(),a)};var Xd=function(a,b){if(b=U(a,b))b.C(h);else a.k>-1&&U(a,a.k).C(k)};
+Y[z].C=function(a){Xd(this,gd(this,a))};var Zd=function(a){ee(a,function(b,c){return(b+1)%c},ed(a)-1)},$d=function(a){ee(a,function(b,c){b--;return b<0?c-1:b},0)},de=function(a){ee(a,function(b,c){return(b+1)%c},a.k)},be=function(a){ee(a,function(b,c){b--;return b<0?c-1:b},a.k)},ee=function(a,b,c){c=c<0?gd(a,a.g):c;var d=ed(a);c=b(c,d);for(var e=0;e<=d;){var g=U(a,c);if(g&&g.O()&&g.j()&&g.v&2){a.gb(c);return h}e++;c=b(c,d)}return k};Y[z].gb=function(a){Xd(this,a)};var Yd=function(a,b){a.ia=b};var fe=function(){};L(fe,vd);Ja(fe);F=fe[z];F.o=function(){return"goog-tab"};F.da=function(){return"tab"};F.m=function(a){var b=fe.d.m[C](this,a);(a=a.Wa())&&this.jb(b,a);return b};F.J=function(a,b){b=fe.d.J[C](this,a,b);var c=this.Wa(b);c&&ge(a,c);if(a.l&8)if((c=a.h)&&K(c.ca)){a.D(8,k);c.ca(a)}return b};F.Wa=function(a){return a.title||""};F.jb=function(a,b){if(a)a.title=b||""};var he=function(a,b,c){W[C](this,a,b||fe.R(),c);Ld(this,8,h);this.Na|=9};L(he,W);he[z].Wa=function(){return this.Bb};he[z].jb=function(a){this.qb().jb(this.c(),a);this.Bb=a};var ge=function(a,b){a.Bb=b};ud("goog-tab",function(){return new he(i)});var ie=function(){};L(ie,Rd);Ja(ie);ie[z].o=function(){return"goog-tab-bar"};ie[z].da=function(){return"tablist"};ie[z].ib=function(a,b,c){this.vb||je(this);var d=this.vb[b];d?ke(a,d):ie.d.ib[C](this,a,b,c)};ie[z].na=function(a){var b=ie.d.na[C](this,a);this.Ca||le(this);b[m](this.Ca[a.dc]);return b};var le=function(a){var b=a.o();a.Ca=ib("top",b+"-top","bottom",b+"-bottom","start",b+"-start","end",b+"-end")},je=function(a){a.Ca||le(a);a.vb=fb(a.Ca)};var Z=function(a,b,c){ke(this,a||"top");Y[C](this,this.U,b||ie.R(),c);a=bd(this);S(a,this,"select",this.Yb);S(a,this,"unselect",this.Zb);S(a,this,"disable",this.Wb);S(a,this,"hide",this.Xb)};L(Z,Y);Z[z].Gb=h;Z[z].B=i;Z[z].f=function(){Z.d.f[C](this);this.B=i};Z[z].removeChild=function(a,b){me(this,a);return Z.d[Ca][C](this,a,b)};var ke=function(a,b){Ud(a,b=="start"||b=="end"?"vertical":"horizontal");a.dc=b};Z[z].gb=function(a){Z.d.gb[C](this,a);this.Gb&&ne(this,a)};
+Z[z].ca=function(a){if(a)Hd(a,h);else this.B&&Hd(this.B,k)};var ne=function(a,b){a.ca(U(a,b))},me=function(a,b){if(b&&b==a.B){for(var c=gd(a,b),d=c-1;b=U(a,d);d--)if(b.O()&&b.j()){a.ca(b);return}for(c=c+1;b=U(a,c);c++)if(b.O()&&b.j()){a.ca(b);return}a.ca(i)}};F=Z[z];F.Yb=function(a){this.B&&this.B!=a[B]&&Hd(this.B,k);this.B=a[B]};F.Zb=function(a){if(a[B]==this.B)this.B=i};F.Wb=function(a){me(this,a[B])};F.Xb=function(a){me(this,a[B])};F.pa=function(){U(this,this.k)||this.C(this.B||U(this,0))};
+ud("goog-tab-bar",function(){return new Z});var oe=function(a,b,c,d){function e(j){if(j){j.tabIndex=0;R(j,"click",g.gc,k,g);R(j,"keydown",g.hc,k,g)}}this.L=Rb(a)||i;this.la=Rb(d||i);this.Ta=(this.db=K(b)?b:i)||!b?i:Rb(b);this.i=c==h;var g=this;e(this.L);e(this.la);this.W(this.i)};L(oe,Sc);oe[z].f=function(){this.L&&Lc(this.L);this.la&&Lc(this.la);oe.d.f[C](this)};
+oe[z].W=function(a){if(this.Ta)Uc(this.Ta,a);else if(a&&this.db)this.Ta=this.db();if(this.la){Uc(this.L,!a);Uc(this.la,a)}else if(this.L)if(a){Nb(this.L,"goog-zippy-collapsed");Mb(this.L,"goog-zippy-expanded")}else{Nb(this.L,"goog-zippy-expanded");Mb(this.L,"goog-zippy-collapsed")}this.i=a;this[r](new pe("toggle",this,this.i))};oe[z].hc=function(a){if(a[u]==13||a[u]==32){this.W(!this.i);a[oa]();a[za]()}};oe[z].gc=function(){this.W(!this.i)};var pe=function(a,b,c){gc[C](this,a,b);this.lc=c};L(pe,gc);var re=function(a,b){this.kb=[];a=Rb(a);a=Sb(da,"span","ae-zippy",a);for(var c=0,d;d=a[c];c++){for(var e=d[E][E][E][ua];e&&e[na]!=1;)e=e[ua];this.kb[m](new oe(d,e,k))}this.Lb=new qe(this.kb,Rb(b))};re[z].Mb=function(){return this.Lb};re[z].Nb=function(){return this.kb};
+var qe=function(a,b){this.za=a;if(this.za[y]){a=0;for(var c;c=this.za[a];a++)R(c,"toggle",this.kc,k,this)}this.eb=0;this.i=k;a="ae-toggle ae-plus ae-action";this.za[y]||(a+=" ae-disabled");this.Q=Wb("span",{className:a},"Expand All");R(this.Q,"click",this.Ib,k,this);b[la](this.Q)};qe[z].Ib=function(){this.za[y]&&this.W(!this.i)};qe[z].kc=function(a){a=a.currentTarget;if(a.i)this.eb+=1;else this.eb-=1;if(a.i!=this.i)if(a.i){this.i=h;se(this,h)}else if(this.eb==0){this.i=k;se(this,k)}};
+qe[z].W=function(a){this.i=a;a=0;for(var b;b=this.za[a];a++)b.i!=this.i&&b.W(this.i);se(this)};
+var se=function(a,b){if(b!==ba?b:a.i){Nb(a.Q,"ae-plus");Mb(a.Q,"ae-minus");$b(a.Q,"Collapse All")}else{Nb(a.Q,"ae-minus");Mb(a.Q,"ae-plus");$b(a.Q,"Expand All")}},te=function(a){this.jc=a;this.Ab={};var b,c=Wb("div",{},b=Wb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),Wb("div",{className:"goog-tab-bar-clear"}),a=Wb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Z;d.J(b);R(d,"select",this.mb,k,this);R(d,"unselect",this.mb,k,this);b=
+0;for(var e;e=this.jc[b];b++)if(e=Rb("ae-stats-details-"+e)){var g=Sb(da,"h2",i,e)[0],j;j=void 0;if(N&&"innerText"in g)j=g.innerText[n](/(\r\n|\r|\n)/g,"\n");else{j=[];ec(g,j,h);j=j[Ea]("")}j=j[n](/\xAD/g,"");j=j[n](/ +/g," ");if(j!=" ")j=j[n](/^\s*/,"");j=j;Yb(g);g=new he(j);this.Ab[Qa(g)]=e;d.Aa(g,h);a[la](e);b==0?d.ca(g):Uc(e,k)}Rb("bd")[la](c)};te[z].mb=function(a){var b=this.Ab[Qa(a[B])];Uc(b,a[w]=="select")};Ga("ae.Stats.Details.Tabs",te,void 0);Ga("goog.ui.Zippy",oe,void 0);
+oe[z].setExpanded=oe[z].W;Ga("ae.Stats.MakeZippys",re,void 0);re[z].getExpandCollapse=re[z].Mb;re[z].getZippys=re[z].Nb;qe[z].setExpanded=qe[z].W;var $=function(){this.Qa=[];this.fb=[]},ue=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],ve=function(a){if(a<=0)return[2,0.5,1];for(var b=1;a<1;){a*=10;b/=10}for(;a>=10;){a/=10;b*=10}for(var c=0;c<ue[y];c++)if(a<=ue[c][2])return[ue[c][0],ue[c][1]*b,ue[c][2]*b];return[5,2*b,10*b]};$[z].Oa="stats/static/pix.gif";$[z].s="ae-stats-gantt-";$[z].ab=0;$[z].write=function(a){this.fb[m](a)};
+var we=function(a,b,c,d){a[t]('<tr class="'+a.s+'axisrow"><td width="20%"></td><td>');a[t]('<div class="'+a.s+'axis">');for(var e=0;e<=b;e++){a[t]('<img class="'+a.s+'tick" src="'+a.Oa+'" alt="" ');a[t]('style="left:'+e*c*d+'%"\n>');a[t]('<span class="'+a.s+'scale" style="left:'+e*c*d+'%">');a[t]("&nbsp;"+e*c+"</span>")}a[t]("</div></td></tr>\n")};
+$[z].Kb=function(){this.fb=[];var a=ve(this.ab),b=a[0],c=a[1];a=100/a[2];this[t]('<table class="'+this.s+'table">\n');we(this,b,c,a);for(var d=0;d<this.Qa[y];d++){var e=this.Qa[d];this[t]('<tr class="'+this.s+'datarow"><td width="20%">');if(e.label[y]>0){e.ga[y]>0&&this[t]('<a class="'+this.s+'link" href="'+e.ga+'">');this[t](e.label);e.ga[y]>0&&this[t]("</a>")}this[t]("</td>\n<td>");this[t]('<div class="'+this.s+'container">');e.ga[y]>0&&this[t]('<a class="'+this.s+'link" href="'+e.ga+'"\n>');this[t]('<img class="'+
+this.s+'bar" src="'+this.Oa+'" alt="" ');this[t]('style="left:'+e.start*a+"%;width:"+e.pb*a+'%;min-width:1px"\n>');if(e.Ua>0){this[t]('<img class="'+this.s+'extra" src="'+this.Oa+'" alt="" ');this[t]('style="left:'+e.start*a+"%;width:"+e.Ua*a+'%"\n>')}if(e.tb[y]>0){this[t]('<span class="'+this.s+'inline" style="left:'+(e.start+ea.max(e.pb,e.Ua))*a+'%">&nbsp;');this[t](e.tb);this[t]("</span>")}e.ga[y]>0&&this[t]("</a>");this[t]("</div></td></tr>\n")}we(this,b,c,a);this[t]("</table>\n");return this.fb[Ea]("")};
+$[z].Eb=function(a,b,c,d,e,g){this.ab=ea.max(this.ab,ea.max(b+c,b+d));this.Qa[m]({label:a,start:b,pb:c,Ua:d,tb:e,ga:g})};Ga("Gantt",$,void 0);$[z].add_bar=$[z].Eb;$[z].draw=$[z].Kb;})();
diff --git a/google-appengine/google/appengine/ext/appstats/static/gantt.js b/google-appengine/google/appengine/ext/appstats/static/gantt.js
index 4bc8a93..4c1d651 100644
--- a/google-appengine/google/appengine/ext/appstats/static/gantt.js
+++ b/google-appengine/google/appengine/ext/appstats/static/gantt.js
@@ -7,23 +7,28 @@
* @author schefflerjens@google.com (Jens Scheffler)
*/
-function Gantt() {
-
- // Overridable configuration constants.
- this.PIX = 'stats/static/pix.gif'; // URL of a transparent 1x1 GIF.
- this.PREFIX = 'ae-stats-gantt-'; // Class name this.PREFIX.
- this.HEIGHT = '1em'; // Height of one bar.
- this.EXTRA_HEIGHT = '0.5em'; // Height of the extra bar.
- this.BG_COLOR = '#eeeeff'; // Background color for the bar.
- this.COLOR = '#7777ff'; // Color of the main bar.
- this.EXTRA_COLOR = '#ff6666'; // Color of the extra bar.
- this.INLINE_FONT_SIZE = '80%'; // Font size of inline_label.
- this.INLINE_TOP = '0.1em'; // Top of inline label text.
- this.TICK_COLOR = 'grey'; // Color for ticks.
-
- // Internal fields used to render the chart
- // Should not be modified
- var SCALES = [[5, 0.2, 1.0],
+/**
+ * @constructor
+ */
+var Gantt = function() {
+ /**
+ * @type {Array}
+ */
+ this.bars = [];
+
+ /**
+ * @type {Array}
+ */
+ this.output = [];
+};
+
+
+/**
+ * Internal fields used to render the chart.
+ * Should not be modified.
+ * @type {Array.<Array>}
+ */
+Gantt.SCALES = [[5, 0.2, 1.0],
[6, 0.2, 1.2],
[5, 0.25, 1.25],
[6, 0.25, 1.5],
@@ -35,153 +40,243 @@ function Gantt() {
[6, 1.0, 6.0],
[4, 2.0, 8.0],
[5, 2.0, 10.0]];
- var bars = [];
- var highest_duration = 0;
- var output = [];
- /*
- * Appends text to the output array
- */
- var write = function(text) {
- output.push(text);
- }
- /*
- * Internal helper to draw a table row showing the scale.
- */
- var draw_scale = function(gantt, howmany, spacing, scale) {
- write('<tr class="' + gantt.PREFIX + 'axisrow"><td width="20%"></td><td>');
- write('<div class="' + gantt.PREFIX + 'axis">');
- for (var i = 0; i <= howmany; i++) {
- write('<img class="' + gantt.PREFIX + 'tick" src="' +
- gantt.PIX + '" alt="" ');
- write('style="left:' + (i * spacing * scale) + '%"\n>');
- write('<span class="' + gantt.PREFIX + 'scale" style="left:' +
- (i * spacing * scale) + '%">');
- write('&nbsp;' + (i * spacing) + '</span>'); // TODO: number format %4g
+/**
+ * Helper to compute the proper X axis scale.
+ * Args:
+ * highest: the highest value in the data series.
+ *
+ * Returns:
+ * A tuple (howmany, spacing, limit) where howmany is the number of
+ * increments, spacing is the increment to be used between successive
+ * axis labels, and limit is the rounded-up highest value of the
+ * axis. Within float precision, howmany * spacing == highest will
+ * hold.
+ *
+ * The axis is assumed to always start at zero.
+ */
+Gantt.compute_scale = function(highest) {
+ if (highest <= 0) {
+ return [2, 0.5, 1.0] // Special-case if there's no data.
+ }
+ var scale = 1.0
+ while (highest < 1.0) {
+ highest *= 10.0
+ scale /= 10.0
+ }
+ while (highest >= 10.0) {
+ highest /= 10.0
+ scale *= 10.0
+ }
+ // Now 1 <= highest < 10
+ for (var i = 0; i < Gantt.SCALES.length; i++) {
+ if (highest <= Gantt.SCALES[i][2]) {
+ return [Gantt.SCALES[i][0], Gantt.SCALES[i][1] * scale,
+ Gantt.SCALES[i][2] * scale];
}
- write('</div></td></tr>\n');
}
+ // Avoid the need for "assert False". Not actually reachable.
+ return [5, 2.0 * scale, 10.0 * scale];
+};
- /*
- * Helper to compute the proper X axis scale.
- * Args:
- * highest: the highest value in the data series.
- *
- * Returns:
- * A tuple (howmany, spacing, limit) where howmany is the number of
- * increments, spacing is the increment to be used between successive
- * axis labels, and limit is the rounded-up highest value of the
- * axis. Within float precision, howmany * spacing == highest will
- * hold.
- *
- * The axis is assumed to always start at zero.
- */
- var compute_scale = function(highest) {
- if (highest <= 0) {
- return [2, 0.5, 1.0] // Special-case if there's no data.
- }
- var scale = 1.0
- while (highest < 1.0) {
- highest *= 10.0
- scale /= 10.0
- }
- while (highest >= 10.0) {
- highest /= 10.0
- scale *= 10.0
- }
- // Now 1 <= highest < 10
- for (var i = 0; i < SCALES.length; i++) {
- if (highest <= SCALES[i][2]) {
- return [SCALES[i][0], SCALES[i][1] * scale, SCALES[i][2] * scale];
- }
- }
- // Avoid the need for "assert False". Not actually reachable.
- return [5, 2.0 * scale, 10.0 * scale];
+
+/**
+ * URL of a transparent 1x1 GIF.
+ * @type {string}
+ */
+Gantt.prototype.PIX = 'stats/static/pix.gif';
+
+
+/**
+ * CSS class name prefix.
+ * @type {string}
+ */
+Gantt.prototype.PREFIX = 'ae-stats-gantt-';
+
+
+/**
+ * Height of one bar.
+ * @type {string}
+ */
+Gantt.prototype.HEIGHT = '1em';
+
+
+/**
+ * Height of the extra bar.
+ * @type {string}
+ */
+Gantt.prototype.EXTRA_HEIGHT = '0.5em';
+
+
+/**
+ * Background color for the bar.
+ * @type {string}
+ */
+Gantt.prototype.BG_COLOR = '#eeeeff';
+
+
+/**
+ * Color of the main bar.
+ * @type {string}
+ */
+Gantt.prototype.COLOR = '#7777ff';
+
+
+/**
+ * Color of the extra bar.
+ * @type {string}
+ */
+Gantt.prototype.EXTRA_COLOR = '#ff6666';
+
+
+/**
+ * Font size of inline_label.
+ * @type {string}
+ */
+Gantt.prototype.INLINE_FONT_SIZE = '80%';
+
+
+/**
+ * Top of inline label text.
+ * @type {string}
+ */
+Gantt.prototype.INLINE_TOP = '0.1em';
+
+
+/**
+ * Color for ticks.
+ * @type {string}
+ */
+Gantt.prototype.TICK_COLOR = 'grey';
+
+
+/**
+ * @type {number}
+ */
+Gantt.prototype.highest_duration = 0;
+
+
+/*
+ * Appends text to the output array.
+ * @param {string} text The text to append to the output.
+ */
+Gantt.prototype.write = function(text) {
+ this.output.push(text);
+};
+
+
+/*
+ * Internal helper to draw a table row showing the scale.
+ * @param {number} howmany
+ * @param {number} spacing
+ * @param {number} scale
+ */
+Gantt.prototype.draw_scale = function(howmany, spacing, scale) {
+ this.write('<tr class="' + this.PREFIX + 'axisrow">' +
+ '<td width="20%"></td><td>');
+ this.write('<div class="' + this.PREFIX + 'axis">');
+ for (var i = 0; i <= howmany; i++) {
+ this.write('<img class="' + this.PREFIX + 'tick" src="' +
+ this.PIX + '" alt="" ');
+ this.write('style="left:' + (i * spacing * scale) + '%"\n>');
+ this.write('<span class="' + this.PREFIX + 'scale" style="left:' +
+ (i * spacing * scale) + '%">');
+ this.write('&nbsp;' + (i * spacing) + '</span>'); // TODO: number format %4g
}
+ this.write('</div></td></tr>\n');
+};
- /*
- * Add a bar to the chart.
- * Args:
- * label: Valid HTML or HTML-escaped text for the left column.
- * start: Start time for the event.
- * duration: Duration for the event.
- * extra_duration: Duration for the second bar; use 0 to suppress.
- * inline_label: Valid HTML or HTML-escaped text drawn after the bars;
- * use '' to suppress.
- * link_target: HTML-escaped link where clicking on any element
- * will take you; use '' for no linking.
- * All arguments representing times or durations should be integers
- * or floats expressed in seconds. The scale drawn is always
- * expressed in seconds (with limited precision).
- */
- this.add_bar = function(label, start, duration, extra_duration,
- inline_label, link_target) {
- highest_duration = Math.max(
- highest_duration, Math.max(start + duration, start + extra_duration));
- bars.push({label: label, start: start, duration: duration,
- extra_duration: extra_duration, inline_label: inline_label,
- link_target: link_target});
- return this;
- };
-
- /*
- * Draw the bar chart as HTML.
- */
- this.draw = function() {
- output = [];
- var scale = compute_scale(highest_duration);
- var howmany = scale[0];
- var spacing = scale[1];
- var limit = scale[2];
- scale = 100.0 / limit;
- write('<table class="' + this.PREFIX + 'table">\n');
- draw_scale(this, howmany, spacing, scale);
- for (var i = 0; i < bars.length; i++) {
- var bar = bars[i];
- write('<tr class="' + this.PREFIX + 'datarow"><td width="20%">');
- if (bar.label.length > 0) {
- if (bar.link_target.length > 0) {
- write('<a class="' + this.PREFIX + 'link" href="' +
- bar.link_target + '">');
- }
- write(bar.label);
- if (bar.link_target.length > 0) {
- write('</a>');
- }
- }
- write('</td>\n<td>');
- write('<div class="' + this.PREFIX + 'container">');
+
+/**
+ * Draw the bar chart as HTML.
+ */
+Gantt.prototype.draw = function() {
+ this.output = [];
+ var scale = Gantt.compute_scale(this.highest_duration);
+ var howmany = scale[0];
+ var spacing = scale[1];
+ var limit = scale[2];
+ scale = 100.0 / limit;
+ this.write('<table class="' + this.PREFIX + 'table">\n');
+ this.draw_scale(howmany, spacing, scale);
+ for (var i = 0; i < this.bars.length; i++) {
+ var bar = this.bars[i];
+ this.write('<tr class="' + this.PREFIX + 'datarow"><td width="20%">');
+ if (bar.label.length > 0) {
if (bar.link_target.length > 0) {
- write('<a class="' + this.PREFIX + 'link" href="' +
- bar.link_target + '"\n>');
- }
- write('<img class="' + this.PREFIX + 'bar" src="' +
- this.PIX + '" alt="" ');
- write('style="left:' + (bar.start * scale) + '%;width:' +
- (bar.duration * scale) + '%;min-width:1px"\n>');
- if (bar.extra_duration > 0) {
- write('<img class="' + this.PREFIX + 'extra" src="' +
- this.PIX + '" alt="" ');
- write('style="left:' + (bar.start * scale) + '%;width:' +
- (bar.extra_duration * scale) + '%"\n>');
- }
- if (bar.inline_label.length > 0) {
- write('<span class="' + this.PREFIX + 'inline" style="left:' +
- ((bar.start +
- Math.max(bar.duration, bar.extra_duration)) * scale) +
- '%">&nbsp;');
- write(bar.inline_label);
- write('</span>');
+ this.write('<a class="' + this.PREFIX + 'link" href="' +
+ bar.link_target + '">');
}
+ this.write(bar.label);
if (bar.link_target.length > 0) {
- write('</a>');
+ this.write('</a>');
}
- write('</div></td></tr>\n');
-
}
- draw_scale(this, howmany, spacing, scale);
- write('</table>\n');
- return output.join('');
- };
-}
+ this.write('</td>\n<td>');
+ this.write('<div class="' + this.PREFIX + 'container">');
+ if (bar.link_target.length > 0) {
+ this.write('<a class="' + this.PREFIX + 'link" href="' +
+ bar.link_target + '"\n>');
+ }
+ this.write('<img class="' + this.PREFIX + 'bar" src="' +
+ this.PIX + '" alt="" ');
+ this.write('style="left:' + (bar.start * scale) + '%;width:' +
+ (bar.duration * scale) + '%;min-width:1px"\n>');
+ if (bar.extra_duration > 0) {
+ this.write('<img class="' + this.PREFIX + 'extra" src="' +
+ this.PIX + '" alt="" ');
+ this.write('style="left:' + (bar.start * scale) + '%;width:' +
+ (bar.extra_duration * scale) + '%"\n>');
+ }
+ if (bar.inline_label.length > 0) {
+ this.write('<span class="' + this.PREFIX + 'inline" style="left:' +
+ ((bar.start +
+ Math.max(bar.duration, bar.extra_duration)) * scale) +
+ '%">&nbsp;');
+ this.write(bar.inline_label);
+ this.write('</span>');
+ }
+ if (bar.link_target.length > 0) {
+ this.write('</a>');
+ }
+ this.write('</div></td></tr>\n');
+
+ }
+ this.draw_scale(howmany, spacing, scale);
+ this.write('</table>\n');
+
+ var html = this.output.join('');
+ return html;
+};
+
+
+/**
+ * Add a bar to the chart.
+ * All arguments representing times or durations should be integers
+ * or floats expressed in seconds. The scale drawn is always
+ * expressed in seconds (with limited precision).
+ * @param {string} label Valid HTML or HTML-escaped text for the left column.
+ * @param {number} start Start time for the event.
+ * @param {number} duration Duration for the event.
+ * @param {number} extra_duration Duration for the second bar; use 0 to
+ * suppress.
+ * @param {string} inline_label Valid HTML or HTML-escaped text drawn after the
+ * bars; use '' to suppress.
+ * @param {string} link_target HTML-escaped link where clicking on any element
+ * will take you; use '' for no linking.
+ */
+Gantt.prototype.add_bar = function(label, start, duration, extra_duration,
+ inline_label, link_target) {
+ this.highest_duration = Math.max(
+ this.highest_duration, Math.max(start + duration,
+ start + extra_duration));
+ this.bars.push({label: label, start: start, duration: duration,
+ extra_duration: extra_duration, inline_label: inline_label,
+ link_target: link_target});
+};
+
+
+goog.exportSymbol('Gantt', Gantt);
+goog.exportProperty(Gantt.prototype, 'add_bar', Gantt.prototype.add_bar);
+goog.exportProperty(Gantt.prototype, 'draw', Gantt.prototype.draw);
diff --git a/google-appengine/google/appengine/ext/appstats/templates/details.html b/google-appengine/google/appengine/ext/appstats/templates/details.html
index 9dd44a6..20710af 100644
--- a/google-appengine/google/appengine/ext/appstats/templates/details.html
+++ b/google-appengine/google/appengine/ext/appstats/templates/details.html
@@ -163,7 +163,6 @@
{% endblock %}
{% block tailstuff %}
-<script src="static/gantt.js"></script>
<script>
var rpcZippyMaker = new ae.Stats.MakeZippys('ae-table-rpc',
'ae-rpc-expand-all');
@@ -184,9 +183,7 @@ var detailsTabs_ = new ae.Stats.Details.Tabs(['timeline', 'rpcstats',
'cgienv', 'syspath']);
</script>
<script>
-
function timelineClickHandler(zippyIndex, hash) {
-
rpcZippyMaker.getExpandCollapse().setExpanded(false);
rpcZippys[zippyIndex].setExpanded(true);
@@ -197,11 +194,19 @@ function timelineClickHandler(zippyIndex, hash) {
function renderChart() {
var chart = new Gantt();
{% for t in record.individual_stats_list %}
- chart.add_bar('{{t.service_call_name|escape}}', {{t.start_offset_milliseconds}}, {{t.duration_milliseconds}}, {{t.api_milliseconds}}, '{{t.duration_milliseconds}}ms{% if t.api_milliseconds %} ({{t.api_milliseconds}}ms api){% endif %}',
- 'javascript:timelineClickHandler(\'{{forloop.counter0}}\');');{% endfor %}
+ chart.add_bar('{{t.service_call_name|escape}}',
+ {{t.start_offset_milliseconds}}, {{t.duration_milliseconds}},
+ {{t.api_milliseconds}},
+ '{{t.duration_milliseconds}}ms{% if t.api_milliseconds %} ({{t.api_milliseconds}}ms api){% endif %}',
+ 'javascript:timelineClickHandler(\'{{forloop.counter0}}\');');
+ {% endfor %}
- chart.add_bar('<b>RPC Total</b>', 0, {{real_total}}, {{api_total}}, '{{real_total}}ms{% if api_total %} ({{api_total}}ms api){% endif %}', '');
- chart.add_bar('<b>Grand Total</b>', 0, {{record.duration_milliseconds}}, {{charged_total}}, '{{record.duration_milliseconds}}ms{% if charged_total %} ({{charged_total}}ms cpu+api){% endif %}', '');
+ chart.add_bar('<b>RPC Total</b>', 0, {{real_total}}, {{api_total}},
+ '{{real_total}}ms{% if api_total %} ({{api_total}}ms api){% endif %}',
+ '');
+ chart.add_bar('<b>Grand Total</b>', 0, {{record.duration_milliseconds}},
+ {{charged_total}},
+ '{{record.duration_milliseconds}}ms{% if charged_total %} ({{charged_total}}ms cpu+api){% endif %}', '');
document.getElementById('ae-rpc-chart').innerHTML = chart.draw();
}
renderChart();
diff --git a/google-appengine/google/appengine/ext/blobstore/blobstore.py b/google-appengine/google/appengine/ext/blobstore/blobstore.py
index 7fdef6a..2ea4228 100755
--- a/google-appengine/google/appengine/ext/blobstore/blobstore.py
+++ b/google-appengine/google/appengine/ext/blobstore/blobstore.py
@@ -27,30 +27,38 @@ class representing a blob-key.
import cgi
import email
-from google.appengine.api import blobstore
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
+from google.appengine.api.blobstore import blobstore
from google.appengine.ext import db
__all__ = ['BLOB_INFO_KIND',
'BLOB_KEY_HEADER',
+ 'BLOB_RANGE_HEADER',
+ 'BlobFetchSizeTooLargeError',
'BlobInfo',
'BlobInfoParseError',
'BlobKey',
+ 'BlobNotFoundError',
'BlobReferenceProperty',
- 'CreationFormatError',
+ 'DataIndexOutOfRangeError',
'Error',
'InternalError',
+ 'MAX_BLOB_FETCH_SIZE',
'UPLOAD_INFO_CREATION_HEADER',
'create_upload_url',
'delete',
+ 'fetch_data',
'get',
'parse_blob_info']
Error = blobstore.Error
InternalError = blobstore.InternalError
-CreationFormatError = blobstore.CreationFormatError
+BlobFetchSizeTooLargeError = blobstore.BlobFetchSizeTooLargeError
+BlobNotFoundError = blobstore.BlobNotFoundError
+_CreationFormatError = blobstore._CreationFormatError
+DataIndexOutOfRangeError = blobstore.DataIndexOutOfRangeError
BlobKey = blobstore.BlobKey
create_upload_url = blobstore.create_upload_url
@@ -63,9 +71,10 @@ class BlobInfoParseError(Error):
BLOB_INFO_KIND = blobstore.BLOB_INFO_KIND
BLOB_KEY_HEADER = blobstore.BLOB_KEY_HEADER
+BLOB_RANGE_HEADER = blobstore.BLOB_RANGE_HEADER
+MAX_BLOB_FETCH_SIZE = blobstore.MAX_BLOB_FETCH_SIZE
UPLOAD_INFO_CREATION_HEADER = blobstore.UPLOAD_INFO_CREATION_HEADER
-
class _GqlQuery(db.GqlQuery):
"""GqlQuery class that explicitly sets model-class.
@@ -361,10 +370,9 @@ def parse_blob_info(field_storage):
'%s is not a valid value for %s size.' % (size, field_name))
try:
- creation = blobstore.parse_creation(creation_string)
- except CreationFormatError, e:
- raise BlobInfoParseError('Could not parse creation for %s: %s' % (
- field_name, str(e)))
+ creation = blobstore._parse_creation(creation_string, field_name)
+ except blobstore._CreationFormatError, err:
+ raise BlobInfoParseError(str(err))
return BlobInfo(blob_key,
{'content_type': content_type,
@@ -423,3 +431,37 @@ class BlobReferenceProperty(db.Property):
elif isinstance(value, BlobKey):
value = BlobInfo(value)
return super(BlobReferenceProperty, self).validate(value)
+
+
+def fetch_data(blob, start_index, end_index):
+ """Fetch data for blob.
+
+ Fetches a fragment of a blob up to MAX_BLOB_FETCH_SIZE in length. Attempting
+ to fetch a fragment that extends beyond the boundaries of the blob will return
+ the amount of data from start_index until the end of the blob, which will be
+ a smaller size than requested. Requesting a fragment which is entirely
+ outside the boundaries of the blob will return empty string. Attempting
+ to fetch a negative index will raise an exception.
+
+ Args:
+ blob: BlobInfo, BlobKey, str or unicode representation of BlobKey of
+ blob to fetch data from.
+ start_index: Start index of blob data to fetch. May not be negative.
+ end_index: End index (exclusive) of blob data to fetch. Must be
+ >= start_index.
+
+ Returns:
+ str containing partial data of blob. If the indexes are legal but outside
+ the boundaries of the blob, will return empty string.
+
+ Raises:
+ TypeError if start_index or end_index are not indexes. Also when blob
+ is not a string, BlobKey or BlobInfo.
+ DataIndexOutOfRangeError when start_index < 0 or end_index < start_index.
+ BlobFetchSizeTooLargeError when request blob fragment is larger than
+ MAX_BLOB_FETCH_SIZE.
+ BlobNotFoundError when blob does not exist.
+ """
+ if isinstance(blob, BlobInfo):
+ blob = blob.key()
+ return blobstore.fetch_data(blob, start_index, end_index)
diff --git a/google-appengine/google/appengine/ext/bulkload/__init__.py b/google-appengine/google/appengine/ext/bulkload/__init__.py
index 75a899a..ecec2ea 100755
--- a/google-appengine/google/appengine/ext/bulkload/__init__.py
+++ b/google-appengine/google/appengine/ext/bulkload/__init__.py
@@ -15,421 +15,23 @@
# limitations under the License.
#
-"""A mix-in handler for bulk loading data into an application.
+"""Bulkload package: Helpers for both bulkloader and bulkload_client.
For complete documentation, see the Tools and Libraries section of the
documentation.
-To use this in your app, first write a script, e.g. bulkload.py, that
-instantiates a Loader for each entity kind you want to import and call
-bulkload.main(instance). For example:
-
-person = bulkload.Loader(
- 'Person',
- [('name', str),
- ('email', datastore_types.Email),
- ('cool', bool), # ('0', 'False', 'No', '')=False, otherwise bool(value)
- ('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
- ])
-
-if __name__ == '__main__':
- bulkload.main(person)
-
-See the Loader class for more information. Then, add a handler for it in your
-app.yaml, e.g.:
-
- handlers:
- - url: /load
- script: bulkload.py
- login: admin
-
-Finally, deploy your app and run bulkloader.py. For example, to load the
-file people.csv into a dev_appserver running on your local machine:
-
-./bulkloader.py --filename people.csv --kind Person --cookie ... \
- --url http://localhost:8080/load
-
-The kind parameter is used to look up the Loader instance that will be used.
-The bulkload handler should usually be admin_only, so that non-admins can't use
-the shell to modify your app's data. The bulkload client uses the cookie
-parameter to piggyback its HTTP requests on your login session. A GET request
-to the URL specified for your bulkload script will give you a cookie parameter
-you can use (/load in the example above). If your bulkload handler is not
-admin_only, you may omit the cookie parameter.
-
-If you want to do extra processing before the entities are stored, you can
-subclass Loader and override HandleEntity. HandleEntity is called once with
-each entity that is imported from the CSV data. You can return one or more
-entities from HandleEntity to be stored in its place, or None if nothing
-should be stored.
-
-For example, this loads calendar events and stores them as
-datastore_entities.Event entities. It also populates their author field with a
-reference to the corresponding datastore_entites.Contact entity. If no Contact
-entity exists yet for the given author, it creates one and stores it first.
-
-class EventLoader(bulkload.Loader):
- def __init__(self):
- EventLoader.__init__(self, 'Event',
- [('title', str),
- ('creator', str),
- ('where', str),
- ('startTime', lambda x:
- datetime.datetime.fromtimestamp(float(x))),
- ])
-
- def HandleEntity(self, entity):
- event = datastore_entities.Event(entity.title)
- event.update(entity)
-
- creator = event['creator']
- if creator:
- contact = datastore.Query('Contact', {'title': creator}).Get(1)
- if not contact:
- contact = [datastore_entities.Contact(creator)]
- datastore.Put(contact[0])
- event['author'] = contact[0].key()
-
- return event
-
-if __name__ == '__main__':
- bulkload.main(EventLoader())
+This package contains two separate systems:
+ * The historical and deprecated bulkload/bulkload_client server mix-in,
+ in the 'bulkload.bulkload' module; exposed here for backwards compatability.
+ * New helpers for the bulkloader client (appengine/tools/bulkloader.py).
+ Many of these helpers can also run on the server though there is not
+ (as of January 2010) any support for using them there.
"""
+import bulkload_deprecated
+Validate = bulkload_deprecated.Validate
+Loader = bulkload_deprecated.Loader
+BulkLoad = bulkload_deprecated.BulkLoad
+main = bulkload_deprecated.main
-
-
-import Cookie
-import StringIO
-import csv
-import httplib
-import os
-import traceback
-
-import google
-import wsgiref.handlers
-
-from google.appengine.api import datastore
-from google.appengine.ext import webapp
-from google.appengine.ext.bulkload import constants
-
-
-def Validate(value, type):
- """ Checks that value is non-empty and of the right type.
-
- Raises ValueError if value is None or empty, TypeError if it's not the given
- type.
-
- Args:
- value: any value
- type: a type or tuple of types
- """
- if not value:
- raise ValueError('Value should not be empty; received %s.' % value)
- elif not isinstance(value, type):
- raise TypeError('Expected a %s, but received %s (a %s).' %
- (type, value, value.__class__))
-
-
-class Loader(object):
- """A base class for creating datastore entities from input data.
-
- To add a handler for bulk loading a new entity kind into your datastore,
- write a subclass of this class that calls Loader.__init__ from your
- class's __init__.
-
- If you need to run extra code to convert entities from the input
- data, create new properties, or otherwise modify the entities before
- they're inserted, override HandleEntity.
-
- See the CreateEntity method for the creation of entities from the
- (parsed) input data.
- """
-
- __loaders = {}
- __kind = None
- __properties = None
-
- def __init__(self, kind, properties):
- """ Constructor.
-
- Populates this Loader's kind and properties map. Also registers it with
- the bulk loader, so that all you need to do is instantiate your Loader,
- and the bulkload handler will automatically use it.
-
- Args:
- kind: a string containing the entity kind that this loader handles
-
- properties: list of (name, converter) tuples.
-
- This is used to automatically convert the CSV columns into properties.
- The converter should be a function that takes one argument, a string
- value from the CSV file, and returns a correctly typed property value
- that should be inserted. The tuples in this list should match the
- columns in your CSV file, in order.
-
- For example:
- [('name', str),
- ('id_number', int),
- ('email', datastore_types.Email),
- ('user', users.User),
- ('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
- ('description', datastore_types.Text),
- ]
- """
- Validate(kind, basestring)
- self.__kind = kind
-
- Validate(properties, list)
- for name, fn in properties:
- Validate(name, basestring)
- assert callable(fn), (
- 'Conversion function %s for property %s is not callable.' % (fn, name))
-
- self.__properties = properties
-
- Loader.__loaders[kind] = self
-
-
- def kind(self):
- """ Return the entity kind that this Loader handes.
- """
- return self.__kind
-
- def CreateEntity(self, values, key_name=None):
- """ Creates an entity from a list of property values.
-
- Args:
- values: list/tuple of str
- key_name: if provided, the name for the (single) resulting Entity
-
- Returns:
- list of datastore.Entity
-
- The returned entities are populated with the property values from the
- argument, converted to native types using the properties map given in
- the constructor, and passed through HandleEntity. They're ready to be
- inserted.
-
- Raises:
- AssertionError if the number of values doesn't match the number
- of properties in the properties map.
- """
- Validate(values, (list, tuple))
- assert len(values) == len(self.__properties), (
- 'Expected %d CSV columns, found %d.' %
- (len(self.__properties), len(values)))
-
- entity = datastore.Entity(self.__kind, name=key_name)
- for (name, converter), val in zip(self.__properties, values):
- if converter is bool and val.lower() in ('0', 'false', 'no'):
- val = False
- entity[name] = converter(val)
-
- entities = self.HandleEntity(entity)
-
- if entities is not None:
- if not isinstance(entities, (list, tuple)):
- entities = [entities]
-
- for entity in entities:
- if not isinstance(entity, datastore.Entity):
- raise TypeError('Expected a datastore.Entity, received %s (a %s).' %
- (entity, entity.__class__))
-
- return entities
-
-
- def HandleEntity(self, entity):
- """ Subclasses can override this to add custom entity conversion code.
-
- This is called for each entity, after its properties are populated from
- CSV but before it is stored. Subclasses can override this to add custom
- entity handling code.
-
- The entity to be inserted should be returned. If multiple entities should
- be inserted, return a list of entities. If no entities should be inserted,
- return None or [].
-
- Args:
- entity: datastore.Entity
-
- Returns:
- datastore.Entity or list of datastore.Entity
- """
- return entity
-
-
- @staticmethod
- def RegisteredLoaders():
- """ Returns a list of the Loader instances that have been created.
- """
- return dict(Loader.__loaders)
-
-
-class BulkLoad(webapp.RequestHandler):
- """A handler for bulk load requests.
-
- This class contains handlers for the bulkloading process. One for
- GET to provide cookie information for the upload script, and one
- handler for a POST request to upload the entities.
-
- In the POST request, the body contains the data representing the
- entities' property values. The original format was a sequences of
- lines of comma-separated values (and is handled by the Load
- method). The current (version 1) format is a binary format described
- in the Tools and Libraries section of the documentation, and is
- handled by the LoadV1 method).
- """
-
- def get(self):
- """ Handle a GET. Just show an info page.
- """
- page = self.InfoPage(self.request.uri)
- self.response.out.write(page)
-
-
- def post(self):
- """ Handle a POST. Reads CSV data, converts to entities, and stores them.
- """
- self.response.headers['Content-Type'] = 'text/plain'
- response, output = self.Load(self.request.get(constants.KIND_PARAM),
- self.request.get(constants.CSV_PARAM))
- self.response.set_status(response)
- self.response.out.write(output)
-
-
- def InfoPage(self, uri):
- """ Renders an information page with the POST endpoint and cookie flag.
-
- Args:
- uri: a string containing the request URI
- Returns:
- A string with the contents of the info page to be displayed
- """
- page = """
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html><head>
-<title>Bulk Loader</title>
-</head><body>"""
-
- page += ('The bulk load endpoint is: <a href="%s">%s</a><br />\n' %
- (uri, uri))
-
- cookies = os.environ.get('HTTP_COOKIE', None)
- if cookies:
- cookie = Cookie.BaseCookie(cookies)
- for param in ['ACSID', 'dev_appserver_login']:
- value = cookie.get(param)
- if value:
- page += ("Pass this flag to the client: --cookie='%s=%s'\n" %
- (param, value.value))
- break
-
- else:
- page += 'No cookie found!\n'
-
- page += '</body></html>'
- return page
-
- def IterRows(self, reader):
- """ Yields a tuple of a line number and row for each row of the CSV data.
-
- Args:
- reader: a csv reader for the input data.
- """
- line_num = 1
- for columns in reader:
- yield (line_num, columns)
- line_num += 1
-
- def LoadEntities(self, iter, loader, key_format=None):
- """Generates entities and loads them into the datastore. Returns
- a tuple of HTTP code and string reply.
-
- Args:
- iter: an iterator yielding pairs of a line number and row contents.
- key_format: a format string to convert a line number into an
- entity id. If None, then entity ID's are automatically generated.
- """
- entities = []
- output = []
- for line_num, columns in iter:
- key_name = None
- if key_format is not None:
- key_name = key_format % line_num
- if columns:
- try:
- output.append('\nLoading from line %d...' % line_num)
- new_entities = loader.CreateEntity(columns, key_name=key_name)
- if new_entities:
- entities.extend(new_entities)
- output.append('done.')
- except:
- stacktrace = traceback.format_exc()
- output.append('error:\n%s' % stacktrace)
- return (httplib.BAD_REQUEST, ''.join(output))
-
- datastore.Put(entities)
-
- return (httplib.OK, ''.join(output))
-
- def Load(self, kind, data):
- """Parses CSV data, uses a Loader to convert to entities, and stores them.
-
- On error, fails fast. Returns a "bad request" HTTP response code and
- includes the traceback in the output.
-
- Args:
- kind: a string containing the entity kind that this loader handles
- data: a string containing the CSV data to load
-
- Returns:
- tuple (response code, output) where:
- response code: integer HTTP response code to return
- output: string containing the HTTP response body
- """
- data = data.encode('utf-8')
- Validate(kind, basestring)
- Validate(data, basestring)
- output = []
-
- try:
- loader = Loader.RegisteredLoaders()[kind]
- except KeyError:
- output.append('Error: no Loader defined for kind %s.' % kind)
- return (httplib.BAD_REQUEST, ''.join(output))
-
- buffer = StringIO.StringIO(data)
- reader = csv.reader(buffer, skipinitialspace=True)
-
- try:
- csv.field_size_limit(800000)
- except AttributeError:
- pass
-
- return self.LoadEntities(self.IterRows(reader), loader)
-
-
-def main(*loaders):
- """Starts bulk upload.
-
- Raises TypeError if not, at least one Loader instance is given.
-
- Args:
- loaders: One or more Loader instance.
- """
- if not loaders:
- raise TypeError('Expected at least one argument.')
-
- for loader in loaders:
- if not isinstance(loader, Loader):
- raise TypeError('Expected a Loader instance; received %r' % loader)
-
- application = webapp.WSGIApplication([('.*', BulkLoad)])
- wsgiref.handlers.CGIHandler().run(application)
-
-if __name__ == '__main__':
- main()
diff --git a/google-appengine/google/appengine/ext/bulkload/bulkload_deprecated.py b/google-appengine/google/appengine/ext/bulkload/bulkload_deprecated.py
new file mode 100755
index 0000000..5eeacae
--- /dev/null
+++ b/google-appengine/google/appengine/ext/bulkload/bulkload_deprecated.py
@@ -0,0 +1,359 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""DEPRECATED mix-in handler for bulk loading data into an application.
+
+Please use the new bulkloader.
+"""
+
+
+
+
+
+import Cookie
+import StringIO
+import csv
+import httplib
+import os
+import traceback
+
+import google
+import wsgiref.handlers
+
+from google.appengine.api import datastore
+from google.appengine.ext import webapp
+from google.appengine.ext.bulkload import constants
+
+
+def Validate(value, type):
+ """ Checks that value is non-empty and of the right type.
+
+ Raises ValueError if value is None or empty, TypeError if it's not the given
+ type.
+
+ Args:
+ value: any value
+ type: a type or tuple of types
+ """
+ if not value:
+ raise ValueError('Value should not be empty; received %s.' % value)
+ elif not isinstance(value, type):
+ raise TypeError('Expected a %s, but received %s (a %s).' %
+ (type, value, value.__class__))
+
+
+class Loader(object):
+ """A base class for creating datastore entities from input data.
+
+ To add a handler for bulk loading a new entity kind into your datastore,
+ write a subclass of this class that calls Loader.__init__ from your
+ class's __init__.
+
+ If you need to run extra code to convert entities from the input
+ data, create new properties, or otherwise modify the entities before
+ they're inserted, override HandleEntity.
+
+ See the CreateEntity method for the creation of entities from the
+ (parsed) input data.
+ """
+
+ __loaders = {}
+ __kind = None
+ __properties = None
+
+ def __init__(self, kind, properties):
+ """ Constructor.
+
+ Populates this Loader's kind and properties map. Also registers it with
+ the bulk loader, so that all you need to do is instantiate your Loader,
+ and the bulkload handler will automatically use it.
+
+ Args:
+ kind: a string containing the entity kind that this loader handles
+
+ properties: list of (name, converter) tuples.
+
+ This is used to automatically convert the CSV columns into properties.
+ The converter should be a function that takes one argument, a string
+ value from the CSV file, and returns a correctly typed property value
+ that should be inserted. The tuples in this list should match the
+ columns in your CSV file, in order.
+
+ For example:
+ [('name', str),
+ ('id_number', int),
+ ('email', datastore_types.Email),
+ ('user', users.User),
+ ('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
+ ('description', datastore_types.Text),
+ ]
+ """
+ Validate(kind, basestring)
+ self.__kind = kind
+
+ Validate(properties, list)
+ for name, fn in properties:
+ Validate(name, basestring)
+ assert callable(fn), (
+ 'Conversion function %s for property %s is not callable.' % (fn, name))
+
+ self.__properties = properties
+
+ Loader.__loaders[kind] = self
+
+
+ def kind(self):
+ """ Return the entity kind that this Loader handes.
+ """
+ return self.__kind
+
+ def CreateEntity(self, values, key_name=None):
+ """ Creates an entity from a list of property values.
+
+ Args:
+ values: list/tuple of str
+ key_name: if provided, the name for the (single) resulting Entity
+
+ Returns:
+ list of datastore.Entity
+
+ The returned entities are populated with the property values from the
+ argument, converted to native types using the properties map given in
+ the constructor, and passed through HandleEntity. They're ready to be
+ inserted.
+
+ Raises:
+ AssertionError if the number of values doesn't match the number
+ of properties in the properties map.
+ """
+ Validate(values, (list, tuple))
+ assert len(values) == len(self.__properties), (
+ 'Expected %d CSV columns, found %d.' %
+ (len(self.__properties), len(values)))
+
+ entity = datastore.Entity(self.__kind, name=key_name)
+ for (name, converter), val in zip(self.__properties, values):
+ if converter is bool and val.lower() in ('0', 'false', 'no'):
+ val = False
+ entity[name] = converter(val)
+
+ entities = self.HandleEntity(entity)
+
+ if entities is not None:
+ if not isinstance(entities, (list, tuple)):
+ entities = [entities]
+
+ for entity in entities:
+ if not isinstance(entity, datastore.Entity):
+ raise TypeError('Expected a datastore.Entity, received %s (a %s).' %
+ (entity, entity.__class__))
+
+ return entities
+
+
+ def HandleEntity(self, entity):
+ """ Subclasses can override this to add custom entity conversion code.
+
+ This is called for each entity, after its properties are populated from
+ CSV but before it is stored. Subclasses can override this to add custom
+ entity handling code.
+
+ The entity to be inserted should be returned. If multiple entities should
+ be inserted, return a list of entities. If no entities should be inserted,
+ return None or [].
+
+ Args:
+ entity: datastore.Entity
+
+ Returns:
+ datastore.Entity or list of datastore.Entity
+ """
+ return entity
+
+
+ @staticmethod
+ def RegisteredLoaders():
+ """ Returns a list of the Loader instances that have been created.
+ """
+ return dict(Loader.__loaders)
+
+
+class BulkLoad(webapp.RequestHandler):
+ """A handler for bulk load requests.
+
+ This class contains handlers for the bulkloading process. One for
+ GET to provide cookie information for the upload script, and one
+ handler for a POST request to upload the entities.
+
+ In the POST request, the body contains the data representing the
+ entities' property values. The original format was a sequences of
+ lines of comma-separated values (and is handled by the Load
+ method). The current (version 1) format is a binary format described
+ in the Tools and Libraries section of the documentation, and is
+ handled by the LoadV1 method).
+ """
+
+ def get(self):
+ """ Handle a GET. Just show an info page.
+ """
+ page = self.InfoPage(self.request.uri)
+ self.response.out.write(page)
+
+
+ def post(self):
+ """ Handle a POST. Reads CSV data, converts to entities, and stores them.
+ """
+ self.response.headers['Content-Type'] = 'text/plain'
+ response, output = self.Load(self.request.get(constants.KIND_PARAM),
+ self.request.get(constants.CSV_PARAM))
+ self.response.set_status(response)
+ self.response.out.write(output)
+
+
+ def InfoPage(self, uri):
+ """ Renders an information page with the POST endpoint and cookie flag.
+
+ Args:
+ uri: a string containing the request URI
+ Returns:
+ A string with the contents of the info page to be displayed
+ """
+ page = """
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+ "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html><head>
+<title>Bulk Loader</title>
+</head><body>"""
+
+ page += ('The bulk load endpoint is: <a href="%s">%s</a><br />\n' %
+ (uri, uri))
+
+ cookies = os.environ.get('HTTP_COOKIE', None)
+ if cookies:
+ cookie = Cookie.BaseCookie(cookies)
+ for param in ['ACSID', 'dev_appserver_login']:
+ value = cookie.get(param)
+ if value:
+ page += ("Pass this flag to the client: --cookie='%s=%s'\n" %
+ (param, value.value))
+ break
+
+ else:
+ page += 'No cookie found!\n'
+
+ page += '</body></html>'
+ return page
+
+ def IterRows(self, reader):
+ """ Yields a tuple of a line number and row for each row of the CSV data.
+
+ Args:
+ reader: a csv reader for the input data.
+ """
+ line_num = 1
+ for columns in reader:
+ yield (line_num, columns)
+ line_num += 1
+
+ def LoadEntities(self, iter, loader, key_format=None):
+ """Generates entities and loads them into the datastore. Returns
+ a tuple of HTTP code and string reply.
+
+ Args:
+ iter: an iterator yielding pairs of a line number and row contents.
+ key_format: a format string to convert a line number into an
+ entity id. If None, then entity ID's are automatically generated.
+ """
+ entities = []
+ output = []
+ for line_num, columns in iter:
+ key_name = None
+ if key_format is not None:
+ key_name = key_format % line_num
+ if columns:
+ try:
+ output.append('\nLoading from line %d...' % line_num)
+ new_entities = loader.CreateEntity(columns, key_name=key_name)
+ if new_entities:
+ entities.extend(new_entities)
+ output.append('done.')
+ except:
+ stacktrace = traceback.format_exc()
+ output.append('error:\n%s' % stacktrace)
+ return (httplib.BAD_REQUEST, ''.join(output))
+
+ datastore.Put(entities)
+
+ return (httplib.OK, ''.join(output))
+
+ def Load(self, kind, data):
+ """Parses CSV data, uses a Loader to convert to entities, and stores them.
+
+ On error, fails fast. Returns a "bad request" HTTP response code and
+ includes the traceback in the output.
+
+ Args:
+ kind: a string containing the entity kind that this loader handles
+ data: a string containing the CSV data to load
+
+ Returns:
+ tuple (response code, output) where:
+ response code: integer HTTP response code to return
+ output: string containing the HTTP response body
+ """
+ data = data.encode('utf-8')
+ Validate(kind, basestring)
+ Validate(data, basestring)
+ output = []
+
+ try:
+ loader = Loader.RegisteredLoaders()[kind]
+ except KeyError:
+ output.append('Error: no Loader defined for kind %s.' % kind)
+ return (httplib.BAD_REQUEST, ''.join(output))
+
+ buffer = StringIO.StringIO(data)
+ reader = csv.reader(buffer, skipinitialspace=True)
+
+ try:
+ csv.field_size_limit(800000)
+ except AttributeError:
+ pass
+
+ return self.LoadEntities(self.IterRows(reader), loader)
+
+
+def main(*loaders):
+ """Starts bulk upload.
+
+ Raises TypeError if not, at least one Loader instance is given.
+
+ Args:
+ loaders: One or more Loader instance.
+ """
+ if not loaders:
+ raise TypeError('Expected at least one argument.')
+
+ for loader in loaders:
+ if not isinstance(loader, Loader):
+ raise TypeError('Expected a Loader instance; received %r' % loader)
+
+ application = webapp.WSGIApplication([('.*', BulkLoad)])
+ wsgiref.handlers.CGIHandler().run(application)
+
+if __name__ == '__main__':
+ main()
diff --git a/google-appengine/google/appengine/ext/db/__init__.py b/google-appengine/google/appengine/ext/db/__init__.py
index aea630a..7ecce9d 100755
--- a/google-appengine/google/appengine/ext/db/__init__.py
+++ b/google-appengine/google/appengine/ext/db/__init__.py
@@ -109,6 +109,7 @@ BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
Timeout = datastore_errors.Timeout
+CommittedButStillApplying = datastore_errors.CommittedButStillApplying
ValidationError = BadValueError
@@ -129,6 +130,9 @@ BlobKey = datastore_types.BlobKey
READ_CAPABILITY = datastore.READ_CAPABILITY
WRITE_CAPABILITY = datastore.WRITE_CAPABILITY
+STRONG_CONSISTENCY = datastore.STRONG_CONSISTENCY
+EVENTUAL_CONSISTENCY = datastore.EVENTUAL_CONSISTENCY
+
_kind_map = {}
@@ -338,6 +342,33 @@ def _initialize_properties(model_class, name, bases, dct):
name for name, prop in model_class._properties.items() if not prop.indexed)
+def _coerce_to_key(value):
+ """Returns the value's key.
+
+ Args:
+ value: a Model or Key instance or string encoded key or None
+
+ Returns:
+ The corresponding key, or None if value is None.
+ """
+ if value is None:
+ return None
+
+ value, multiple = datastore.NormalizeAndTypeCheck(
+ value, (Model, Key, basestring))
+
+ if len(value) > 1:
+ raise datastore_errors.BadArgumentError('Expected only one model or key')
+ value = value[0]
+
+ if isinstance(value, Model):
+ return value.key()
+ elif isinstance(value, basestring):
+ return Key(value)
+ else:
+ return value
+
+
class PropertiedClass(type):
"""Meta-class for initializing Model classes properties.
@@ -847,6 +878,9 @@ class Model(object):
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
datastore.Delete(self.key(), rpc=rpc)
+ self._key = self.key()
+ self._key_name = None
+ self._parent_key = None
self._entity = None
@@ -979,9 +1013,12 @@ class Model(object):
key_names: A single key-name or a list of key-names.
parent: Parent of instances to get. Can be a model or key.
"""
+ try:
+ parent = _coerce_to_key(parent)
+ except BadKeyError, e:
+ raise BadArgumentError(str(e))
+
rpc = datastore.GetRpcFromKwargs(kwargs)
- if isinstance(parent, Model):
- parent = parent.key()
key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
for name in key_names]
@@ -1170,18 +1207,21 @@ class Model(object):
return cls.properties()
-def create_rpc(deadline=None, callback=None):
+def create_rpc(deadline=None, callback=None, read_policy=STRONG_CONSISTENCY):
"""Create an rpc for use in configuring datastore calls.
Args:
deadline: float, deadline for calls in seconds.
callback: callable, a callback triggered when this rpc completes,
accepts one argument: the returned rpc.
+ read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually
+ consistent reads
Returns:
A datastore.DatastoreRPC instance.
"""
- return datastore.CreateRPC(deadline=deadline, callback=callback)
+ return datastore.CreateRPC(
+ deadline=deadline, callback=callback, read_policy=read_policy)
def get(keys, **kwargs):
"""Fetch the specific Model instance with the given key from the datastore.
@@ -1254,21 +1294,16 @@ def delete(models, **kwargs):
TransactionFailedError if the data could not be committed.
"""
rpc = datastore.GetRpcFromKwargs(kwargs)
- models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
- models, (Model, Key, basestring))
- keys = []
- for model_or_key in models_or_keys:
- if isinstance(model_or_key, Model):
- key = model_or_key = model_or_key.key()
- elif isinstance(model_or_key, basestring):
- key = model_or_key = Key(model_or_key)
- else:
- key = model_or_key
- keys.append(key)
+
+ if not isinstance(models, (list, tuple)):
+ models = [models]
+ keys = [_coerce_to_key(v) for v in models]
+
datastore.Delete(keys, rpc=rpc)
+
def allocate_ids(model, size, **kwargs):
- """Allocates a range of IDs of size for the model_key defined by model
+ """Allocates a range of IDs of size for the model_key defined by model.
Allocates a range of IDs in the datastore such that those IDs will not
be automatically assigned to new entities. You can only allocate IDs
@@ -1276,25 +1311,15 @@ def allocate_ids(model, size, **kwargs):
datastore_errors.Error.
Args:
- model: Model instance, Key or string to serve as a model specifying the
- ID sequence in which to allocate IDs.
+ model: Model instance, Key or string to serve as a template specifying the
+ ID sequence in which to allocate IDs. Returned ids should only be used
+ in entities with the same parent (if any) and kind as this key.
Returns:
(start, end) of the allocated range, inclusive.
"""
- rpc = datastore.GetRpcFromKwargs(kwargs)
- models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
- model, (Model, Key, basestring))
- keys = []
- for model_or_key in models_or_keys:
- if isinstance(model_or_key, Model):
- key = model_or_key = model_or_key.key()
- elif isinstance(model_or_key, basestring):
- key = model_or_key = Key(model_or_key)
- else:
- key = model_or_key
- keys.append(key)
- return datastore.AllocateIds(keys, size, rpc=rpc)
+ return datastore.AllocateIds(_coerce_to_key(model), size, **kwargs)
+
class Expando(Model):
"""Dynamically expandable model.
@@ -1392,7 +1417,11 @@ class Expando(Model):
ValueError on attempt to assign empty list.
"""
check_reserved_word(key)
- if key[:1] != '_' and key not in self.properties():
+ if (key[:1] != '_' and
+
+
+
+ not hasattr(getattr(type(self), key, None), '__set__')):
if value == []:
raise ValueError('Cannot store empty list to dynamic property %s' %
key)
@@ -1405,6 +1434,31 @@ class Expando(Model):
else:
super(Expando, self).__setattr__(key, value)
+ def __getattribute__(self, key):
+ """Get attribute from expando.
+
+ Must be overridden to allow dynamic properties to obscure class attributes.
+ Since all attributes are stored in self._dynamic_properties, the normal
+ __getattribute__ does not attempt to access it until __setattr__ is called.
+ By then, the static attribute being overwritten has already been located
+ and returned from the call.
+
+ This method short circuits the usual __getattribute__ call when finding a
+ dynamic property and returns it to the user via __getattr__. __getattr__
+ is called to preserve backward compatibility with older Expando models
+ that may have overridden the original __getattr__.
+
+ NOTE: Access to properties defined by Python descriptors are not obscured
+ because setting those attributes are done through the descriptor and does
+ not place those attributes in self._dynamic_properties.
+ """
+ if not key.startswith('_'):
+ dynamic_properties = self._dynamic_properties
+ if dynamic_properties is not None and key in dynamic_properties:
+ return self.__getattr__(key)
+
+ return super(Expando, self).__getattribute__(key)
+
def __getattr__(self, key):
"""If no explicit attribute defined, retrieve value from entity.
@@ -1418,8 +1472,9 @@ class Expando(Model):
AttributeError when there is no attribute for key on object or
contained entity.
"""
- if self._dynamic_properties and key in self._dynamic_properties:
- return self._dynamic_properties[key]
+ _dynamic_properties = self._dynamic_properties
+ if _dynamic_properties is not None and key in _dynamic_properties:
+ return _dynamic_properties[key]
else:
return getattr(super(Expando, self), key)
@@ -1529,6 +1584,9 @@ class _BaseQuery(object):
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
+ Args:
+ rpc: datastore.DatastoreRPC to use for this request.
+
Returns:
Iterator for this query.
"""
@@ -1597,6 +1655,7 @@ class _BaseQuery(object):
Args:
limit: Maximum number of results to return.
offset: Optional number of results to skip first; default zero.
+ rpc: datastore.DatastoreRPC to use for this request.
Returns:
A list of db.Model instances. There may be fewer than 'limit'
@@ -2081,16 +2140,35 @@ class GqlQuery(_BaseQuery):
for name, arg in kwds.iteritems():
self._kwds[name] = _normalize_query_parameter(arg)
- def run(self):
- """Override _BaseQuery.run() so the LIMIT clause is handled properly."""
- query_run = self._proto_query.Run(*self._args, **self._kwds)
- if self._keys_only:
- return query_run
+ def run(self, **kwargs):
+ """Iterator for this query that handles the LIMIT clause property.
+
+ If the GQL query string contains a LIMIT clause, this function fetches
+ all results before returning an iterator. Otherwise results are retrieved
+ in batches by the iterator.
+
+ Args:
+ rpc: datastore.DatastoreRPC to use for this request.
+
+ Returns:
+ Iterator for this query.
+ """
+ if self._proto_query.limit() >= 0:
+ return iter(self.fetch(limit=self._proto_query.limit(),
+ offset=self._proto_query.offset(),
+ **kwargs))
else:
- return _QueryIterator(self._model_class, iter(query_run))
+ results = _BaseQuery.run(self, **kwargs)
+ try:
+ for _ in xrange(self._proto_query.offset()):
+ results.next()
+ except StopIteration:
+ pass
+
+ return results
def _get_query(self):
- return self._proto_query.Bind(self._args, self._kwds)
+ return self._proto_query.Bind(self._args, self._kwds, self._cursor)
class UnindexedProperty(Property):
diff --git a/google-appengine/google/appengine/ext/db/stats.py b/google-appengine/google/appengine/ext/db/stats.py
index fdd95d3..b309d6a 100755
--- a/google-appengine/google/appengine/ext/db/stats.py
+++ b/google-appengine/google/appengine/ext/db/stats.py
@@ -24,10 +24,7 @@ application's datastore by offline processes run by the Google App Engine team.
-try:
- from google.appengine.ext import db
-except ImportError:
- from google.appengine.ext import db
+from google.appengine.ext import db
class BaseStatistic(db.Model):
diff --git a/google-appengine/google/appengine/ext/gql/__init__.py b/google-appengine/google/appengine/ext/gql/__init__.py
index 39c0e78..7e5f3d0 100755
--- a/google-appengine/google/appengine/ext/gql/__init__.py
+++ b/google-appengine/google/appengine/ext/gql/__init__.py
@@ -195,7 +195,7 @@ class GQL(object):
else:
pass
- def Bind(self, args, keyword_args):
+ def Bind(self, args, keyword_args, cursor=None):
"""Bind the existing query to the argument list.
Assumes that the input args are first positional, then a dictionary.
@@ -228,8 +228,10 @@ class GQL(object):
query_count = 1
for i in xrange(query_count):
- queries.append(datastore.Query(self._entity, _app=self.__app,
- keys_only=self._keys_only))
+ queries.append(datastore.Query(self._entity,
+ _app=self.__app,
+ keys_only=self._keys_only,
+ cursor=cursor))
logging.log(LOG_LEVEL,
'Binding with %i positional args %s and %i keywords %s'
@@ -646,9 +648,7 @@ class GQL(object):
"""
bind_results = self.Bind(args, keyword_args)
- offset = 0
- if self.__offset != -1:
- offset = self.__offset
+ offset = self.offset()
if self.__limit == -1:
it = bind_results.Run()
@@ -675,6 +675,13 @@ class GQL(object):
"""Return numerical result count limit."""
return self.__limit
+ def offset(self):
+ """Return numerical result offset."""
+ if self.__offset == -1:
+ return 0
+ else:
+ return self.__offset
+
def orderings(self):
"""Return the result ordering list."""
return self.__orderings
diff --git a/google-appengine/google/appengine/ext/remote_api/handler.py b/google-appengine/google/appengine/ext/remote_api/handler.py
index 26ffc10..2c7ab7e 100755
--- a/google-appengine/google/appengine/ext/remote_api/handler.py
+++ b/google-appengine/google/appengine/ext/remote_api/handler.py
@@ -229,6 +229,8 @@ SERVICE_PB_MAP = {
'taskqueue': {
'Add': (taskqueue_service_pb.TaskQueueAddRequest,
taskqueue_service_pb.TaskQueueAddResponse),
+ 'BulkAdd': (taskqueue_service_pb.TaskQueueBulkAddRequest,
+ taskqueue_service_pb.TaskQueueBulkAddResponse),
'UpdateQueue':(taskqueue_service_pb.TaskQueueUpdateQueueRequest,
taskqueue_service_pb.TaskQueueUpdateQueueResponse),
'FetchQueues':(taskqueue_service_pb.TaskQueueFetchQueuesRequest,
diff --git a/google-appengine/google/appengine/ext/remote_api/remote_api_pb.py b/google-appengine/google/appengine/ext/remote_api/remote_api_pb.py
index bd6a777..bd6a777 100644..100755
--- a/google-appengine/google/appengine/ext/remote_api/remote_api_pb.py
+++ b/google-appengine/google/appengine/ext/remote_api/remote_api_pb.py
diff --git a/google-appengine/google/appengine/ext/remote_api/remote_api_stub.py b/google-appengine/google/appengine/ext/remote_api/remote_api_stub.py
index 1b8da9c..1b992f2 100755
--- a/google-appengine/google/appengine/ext/remote_api/remote_api_stub.py
+++ b/google-appengine/google/appengine/ext/remote_api/remote_api_stub.py
@@ -450,7 +450,8 @@ def ConfigureRemoteApi(app_id,
rtok=None,
secure=False,
services=None,
- default_auth_domain=None):
+ default_auth_domain=None,
+ save_cookies=False):
"""Does necessary setup to allow easy remote access to App Engine APIs.
Either servername must be provided or app_id must not be None. If app_id
@@ -474,6 +475,7 @@ def ConfigureRemoteApi(app_id,
services: A list of services to set up stubs for. If specified, only those
services are configured; by default all supported services are configured.
default_auth_domain: The authentication domain to use by default.
+ save_cookies: Forwarded to rpc_server_factory function.
Raises:
urllib2.HTTPError: if app_id is not provided and there is an error while
@@ -485,7 +487,8 @@ def ConfigureRemoteApi(app_id,
if not servername:
servername = '%s.appspot.com' % (app_id,)
server = rpc_server_factory(servername, auth_func, GetUserAgent(),
- GetSourceName(), debug_data=False, secure=secure)
+ GetSourceName(), save_cookies=save_cookies,
+ debug_data=False, secure=secure)
if not app_id:
if not rtok:
random.seed()
diff --git a/google-appengine/google/appengine/ext/webapp/__init__.py b/google-appengine/google/appengine/ext/webapp/__init__.py
index 446475a..cc025e4 100755
--- a/google-appengine/google/appengine/ext/webapp/__init__.py
+++ b/google-appengine/google/appengine/ext/webapp/__init__.py
@@ -66,9 +66,12 @@ import sys
import traceback
import urlparse
import webob
+import wsgiref.handlers
import wsgiref.headers
import wsgiref.util
+wsgiref.handlers.BaseHandler.os_environ = {}
+
RE_FIND_GROUPS = re.compile('\(.*?\)')
_CHARSET_RE = re.compile(r';\s*charset=([^;\s]*)', re.I)
@@ -473,7 +476,8 @@ class WSGIApplication(object):
"""Initializes this application with the given URL mapping.
Args:
- url_mapping: list of (URI, RequestHandler) pairs (e.g., [('/', ReqHan)])
+ url_mapping: list of (URI regular expression, RequestHandler) pairs
+ (e.g., [('/', ReqHan)])
debug: if true, we send Python stack traces to the browser on errors
"""
self._init_url_mappings(url_mapping)
@@ -541,7 +545,12 @@ class WSGIApplication(object):
for regexp, handler in handler_tuples:
- handler_map[handler.__name__] = handler
+ try:
+ handler_name = handler.__name__
+ except AttributeError:
+ pass
+ else:
+ handler_map[handler_name] = handler
if not regexp.startswith('^'):
regexp = '^' + regexp
diff --git a/google-appengine/google/appengine/ext/webapp/blobstore_handlers.py b/google-appengine/google/appengine/ext/webapp/blobstore_handlers.py
index 2d6e73b..a8f1985 100755
--- a/google-appengine/google/appengine/ext/webapp/blobstore_handlers.py
+++ b/google-appengine/google/appengine/ext/webapp/blobstore_handlers.py
@@ -19,26 +19,119 @@
Contains handlers to help with uploading and downloading blobs.
+Public Classes:
BlobstoreDownloadHandler: Has helper method for easily sending blobs
to client.
BlobstoreUploadHandler: Handler for receiving upload notification requests.
+
+Public Exceptions (indentation indications class hierarchy):
+ Error: Base class for service handler errors.
+ RangeFormatError: Raised when Range header has invalid format.
+ UnsupportedRangeFormatError: Raised when range header has valid format
+ but a particular feature or unit type is not supported.
"""
import cgi
+import cStringIO
+import re
+import sys
from google.appengine.ext import blobstore
from google.appengine.ext import webapp
+from webob import byterange
+
+
+__all__ = [
+ 'Error',
+ 'RangeFormatError',
+ 'UnsupportedRangeFormatError',
+
+ 'BlobstoreDownloadHandler',
+ 'BlobstoreUploadHandler',
+]
+
_CONTENT_DISPOSITION_FORMAT = 'attachment; filename="%s"'
+_SEND_BLOB_PARAMETERS = frozenset(['use_range'])
+
+_RANGE_NUMERIC_FORMAT = r'([0-9]*)-([0-9]*)'
+_RANGE_FORMAT = r'([a-zA-Z]+)=%s' % _RANGE_NUMERIC_FORMAT
+_RANGE_FORMAT_REGEX = re.compile('^%s$' % _RANGE_FORMAT)
+_UNSUPPORTED_RANGE_FORMAT_REGEX = re.compile(
+ '^%s(?:,%s)+$' % (_RANGE_FORMAT, _RANGE_NUMERIC_FORMAT))
+_BYTES_UNIT = 'bytes'
+
+
+class Error(Exception):
+ """Base class for all errors in blobstore handlers module."""
+
+
+class RangeFormatError(webapp.Error):
+ """Raised when Range header incorrectly formatted."""
+
+
+class UnsupportedRangeFormatError(RangeFormatError):
+ """Raised when Range format is correct, but not supported."""
+
+
+def _check_ranges(start, end, use_range, range_header):
+ """Set the range header.
+
+ Args:
+ start: As passed in from send_blob.
+ end: As passed in from send_blob.
+ use_range: As passed in from send blob.
+ range_header: Range header as received in HTTP request.
+
+ Returns:
+ Range header appropriate for placing in blobstore.BLOB_RANGE_HEADER.
+
+ Raises:
+ ValueError if parameters are incorrect. This happens:
+ - start > end.
+ - start < 0 and end is also provided.
+ - end < 0
+ - If index provided AND using the HTTP header, they don't match.
+ This is a safeguard.
+ """
+ if end is not None and start is None:
+ raise ValueError('May not specify end value without start.')
+
+ use_indexes = start is not None
+ if use_indexes:
+ if end is not None:
+ if start > end:
+ raise ValueError('start must be < end.')
+
+ range_indexes = byterange.Range.serialize_bytes(_BYTES_UNIT, [(start, end)])
+
+ if use_range and use_indexes:
+ if range_header != range_indexes:
+ raise ValueError('May not provide non-equivalent range indexes and '
+ 'range headers: (header) %s != (indexes) %s'
+ % (range_header, range_indexes))
+
+ if use_range and range_header is not None:
+ return range_header
+ elif use_indexes:
+ return range_indexes
+ else:
+ return None
+
+
class BlobstoreDownloadHandler(webapp.RequestHandler):
"""Base class for creating handlers that may send blobs to users."""
- def send_blob(self, blob_key_or_info, content_type=None, save_as=None):
+ def send_blob(self,
+ blob_key_or_info,
+ content_type=None,
+ save_as=None,
+ **kwargs):
"""Send a blob-response based on a blob_key.
Sets the correct response header for serving a blob. If BlobInfo
@@ -52,9 +145,24 @@ class BlobstoreDownloadHandler(webapp.RequestHandler):
filename to save-as. If string is provided, use string as filename.
If None or False, do not send as attachment.
- Raises:
- ValueError on invalid save_as parameter.
+ Raises:
+ ValueError on invalid save_as parameter.
+ UnsupportedRangeFormatError: If the range format in the header is
+ valid, but not supported.
+ RangeFormatError: If the range format in the header is not valid.
"""
+ if set(kwargs) - _SEND_BLOB_PARAMETERS:
+ invalid_keywords = []
+ for keyword in kwargs:
+ if keyword not in _SEND_BLOB_PARAMETERS:
+ invalid_keywords.append(keyword)
+ if len(invalid_keywords) == 1:
+ raise TypeError('send_blob got unexpected keyword argument %s.'
+ % invalid_keywords[0])
+ else:
+ raise TypeError('send_blob got unexpected keyword arguments: %s'
+ % sorted(invalid_keywords))
+
if isinstance(blob_key_or_info, blobstore.BlobInfo):
blob_key = blob_key_or_info.key()
blob_info = blob_key_or_info
@@ -86,10 +194,51 @@ class BlobstoreDownloadHandler(webapp.RequestHandler):
if not blob_info:
raise ValueError('Expected BlobInfo value for blob_key_or_info.')
else:
- raise ValueError('Unexpected value for save_as')
+ raise ValueError('Unexpected value for save_as.')
self.response.clear()
+ def get_range(self):
+ """Get range from header if it exists.
+
+ Returns:
+ Tuple (start, end):
+ start: Start index. None if there is None.
+ end: End index. None if there is None.
+ None if there is no request header.
+
+ Raises:
+ UnsupportedRangeFormatError: If the range format in the header is
+ valid, but not supported.
+ RangeFormatError: If the range format in the header is not valid.
+ """
+ range_header = self.request.headers.get('range', None)
+ if range_header is None:
+ return None
+
+ try:
+ original_stdout = sys.stdout
+ sys.stdout = cStringIO.StringIO()
+ try:
+ parsed_range = byterange.Range.parse_bytes(range_header)
+ finally:
+ sys.stdout = original_stdout
+ except TypeError, err:
+ raise RangeFormatError('Invalid range header: %s' % err)
+ if parsed_range is None:
+ raise RangeFormatError('Invalid range header: %s' % range_header)
+
+ units, ranges = parsed_range
+ if len(ranges) != 1:
+ raise UnsupportedRangeFormatError(
+ 'Unable to support multiple range values in Range header.')
+
+ if units != _BYTES_UNIT:
+ raise UnsupportedRangeFormatError(
+ 'Invalid unit in range header type: %s', range_header)
+
+ return ranges[0]
+
class BlobstoreUploadHandler(webapp.RequestHandler):
"""Base class for creation blob upload handlers."""
diff --git a/google-appengine/google/appengine/tools/adaptive_thread_pool.py b/google-appengine/google/appengine/tools/adaptive_thread_pool.py
index 8458289..25bcdc2 100755
--- a/google-appengine/google/appengine/tools/adaptive_thread_pool.py
+++ b/google-appengine/google/appengine/tools/adaptive_thread_pool.py
@@ -385,7 +385,8 @@ class ThreadGate(object):
self.__thread_semaphore.acquire()
if self.__backoff_time > 0.0:
if not threading.currentThread().exit_flag:
- logger.info('Backing off due to errors: %.1f seconds',
+ logger.info('[%s] Backing off due to errors: %.1f seconds',
+ threading.currentThread().getName(),
self.__backoff_time)
self.__sleep(self.__backoff_time)
diff --git a/google-appengine/google/appengine/tools/appcfg.py b/google-appengine/google/appengine/tools/appcfg.py
index c6aacc2..4f828ab 100755
--- a/google-appengine/google/appengine/tools/appcfg.py
+++ b/google-appengine/google/appengine/tools/appcfg.py
@@ -204,12 +204,14 @@ def GetVersionObject(isfile=os.path.isfile, open_fn=open):
return version
-def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable_func):
+def RetryWithBackoff(initial_delay, backoff_factor, max_delay, max_tries,
+ callable_func):
"""Calls a function multiple times, backing off more and more each time.
Args:
initial_delay: Initial delay after first try, in seconds.
backoff_factor: Delay will be multiplied by this factor after each try.
+ max_delay: Max delay factor.
max_tries: Maximum number of tries.
callable_func: The method to call, will pass no arguments.
@@ -220,12 +222,18 @@ def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable_func):
Whatever the function raises--an exception will immediately stop retries.
"""
delay = initial_delay
- while not callable_func() and max_tries > 0:
+ if callable_func():
+ return True
+ while max_tries > 1:
StatusUpdate('Will check again in %s seconds.' % delay)
time.sleep(delay)
delay *= backoff_factor
+ if max_delay and delay > max_delay:
+ delay = max_delay
max_tries -= 1
- return max_tries > 0
+ if callable_func():
+ return True
+ return False
def _VersionList(release):
@@ -603,6 +611,11 @@ class DosEntryUpload(object):
def DoUpload(self):
"""Uploads the dos entries."""
+ StatusUpdate('Uploading DOS entries.')
+ self.server.Send('/api/dos/update',
+ app_id=self.config.application,
+ version=self.config.version,
+ payload=self.dos.ToYAML())
class IndexOperation(object):
@@ -764,7 +777,8 @@ class LogsRequester(object):
"""Provide facilities to export request logs."""
def __init__(self, server, config, output_file,
- num_days, append, severity, now, vhost, include_vhost):
+ num_days, append, severity, end, vhost, include_vhost,
+ time_func=time.time):
"""Constructor.
Args:
@@ -775,9 +789,10 @@ class LogsRequester(object):
num_days: Number of days worth of logs to export; 0 for all available.
append: True if appending to an existing file.
severity: App log severity to request (0-4); None for no app logs.
- now: POSIX timestamp used for calculating valid dates for num_days.
+ end: date object representing last day of logs to return.
vhost: The virtual host of log messages to get. None for all hosts.
include_vhost: If true, the virtual host is included in log messages.
+ time_func: Method that return a timestamp representing now (for testing).
"""
self.server = server
self.config = config
@@ -793,15 +808,18 @@ class LogsRequester(object):
if self.append:
self.sentinel = FindSentinel(self.output_file)
self.write_mode = 'a'
+
+ self.skip_until = False
+ now = PacificDate(time_func())
+ if end < now:
+ self.skip_until = end
+ else:
+ end = now
+
self.valid_dates = None
if self.num_days:
- patterns = []
- now = PacificTime(now)
- for i in xrange(self.num_days):
- then = time.gmtime(now - 24*3600 * i)
- patterns.append(re.escape(time.strftime('%d/%m/%Y', then)))
- patterns.append(re.escape(time.strftime('%d/%b/%Y', then)))
- self.valid_dates = re.compile(r'[^[]+\[(' + '|'.join(patterns) + r'):')
+ start = end - datetime.timedelta(self.num_days - 1)
+ self.valid_dates = (start, end)
def DownloadLogs(self):
"""Download the requested logs.
@@ -813,13 +831,14 @@ class LogsRequester(object):
StatusUpdate('Downloading request logs for %s %s.' %
(self.config.application, self.version_id))
tf = tempfile.TemporaryFile()
- offset = None
+ last_offset = None
try:
while True:
try:
- offset = self.RequestLogLines(tf, offset)
- if not offset:
+ new_offset = self.RequestLogLines(tf, last_offset)
+ if not new_offset or new_offset == last_offset:
break
+ last_offset = new_offset
except KeyboardInterrupt:
StatusUpdate('Keyboard interrupt; saving data downloaded so far.')
break
@@ -858,7 +877,7 @@ class LogsRequester(object):
logging.info('Request with offset %r.', offset)
kwds = {'app_id': self.config.application,
'version': self.version_id,
- 'limit': 100,
+ 'limit': 1000,
}
if offset:
kwds['offset'] = offset
@@ -882,14 +901,27 @@ class LogsRequester(object):
del lines[-1]
valid_dates = self.valid_dates
sentinel = self.sentinel
+ skip_until = self.skip_until
len_sentinel = None
if sentinel:
len_sentinel = len(sentinel)
for line in lines:
- if ((sentinel and
- line.startswith(sentinel) and
- line[len_sentinel : len_sentinel+1] in ('', '\0')) or
- (valid_dates and not valid_dates.match(line))):
+ if (sentinel and
+ line.startswith(sentinel) and
+ line[len_sentinel : len_sentinel+1] in ('', '\0')):
+ return None
+
+ linedate = DateOfLogLine(line)
+ if not linedate:
+ continue
+
+ if skip_until:
+ if linedate > skip_until:
+ continue
+ else:
+ self.skip_until = skip_until = False
+
+ if valid_dates and not valid_dates[0] <= linedate <= valid_dates[1]:
return None
tf.write(line + '\n')
if not lines:
@@ -897,6 +929,35 @@ class LogsRequester(object):
return offset
+def DateOfLogLine(line):
+ """Returns a date object representing the log line's timestamp.
+
+ Args:
+ line: a log line string.
+ Returns:
+ A date object representing the timestamp or None if parsing fails.
+ """
+ m = re.compile(r'[^[]+\[(\d+/[A-Za-z]+/\d+):[^\d]*').match(line)
+ if not m:
+ return None
+ try:
+ return datetime.date(*time.strptime(m.group(1), '%d/%b/%Y')[:3])
+ except ValueError:
+ return None
+
+
+def PacificDate(now):
+ """For a UTC timestamp, return the date in the US/Pacific timezone.
+
+ Args:
+ now: A posix timestamp giving current UTC time.
+
+ Returns:
+ A date object representing what day it is in the US/Pacific timezone.
+ """
+ return datetime.date(*time.gmtime(PacificTime(now))[:3])
+
+
def PacificTime(now):
"""Helper to return the number of seconds between UTC and Pacific time.
@@ -1314,6 +1375,38 @@ class AppVersionUpload(object):
else:
self.blob_batcher.AddToBatch(path, payload, mime_type)
+ def Precompile(self):
+ """Handle bytecode precompilation."""
+ StatusUpdate('Precompilation starting.')
+ files = []
+ while True:
+ if files:
+ StatusUpdate('Precompilation: %d files left.' % len(files))
+ files = self.PrecompileBatch(files)
+ if not files:
+ break
+ StatusUpdate('Precompilation completed.')
+
+ def PrecompileBatch(self, files):
+ """Precompile a batch of files.
+
+ Args:
+ files: Either an empty list (for the initial request) or a list
+ of files to be precompiled.
+
+ Returns:
+ Either an empty list (if no more files need to be precompiled)
+ or a list of files to be precompiled subsequently.
+ """
+ payload = LIST_DELIMITER.join(files)
+ response = self.server.Send('/api/appversion/precompile',
+ app_id=self.app_id,
+ version=self.version,
+ payload=payload)
+ if not response:
+ return []
+ return response.split(LIST_DELIMITER)
+
def Commit(self):
"""Commits the transaction, making the new app version available.
@@ -1331,7 +1424,7 @@ class AppVersionUpload(object):
try:
self.Deploy()
- if not RetryWithBackoff(1, 2, 8, self.IsReady):
+ if not RetryWithBackoff(1, 2, 60, 20, self.IsReady):
logging.warning('Version still not ready to serve, aborting.')
raise Exception('Version not ready.')
self.StartServing()
@@ -1457,6 +1550,10 @@ class AppVersionUpload(object):
self.blob_batcher.Flush()
StatusUpdate('Uploaded %d files and blobs' % num_files)
+ if (self.config.derived_file_type and
+ appinfo.PYTHON_PRECOMPILED in self.config.derived_file_type):
+ self.Precompile()
+
self.Commit()
except KeyboardInterrupt:
@@ -2121,7 +2218,7 @@ class AppCfgApp(object):
try:
end_date = self._ParseEndDate(self.options.end_date)
- except ValueError:
+ except (TypeError, ValueError):
self.parser.error('End date must be in the format YYYY-MM-DD.')
basepath = self.args[0]
@@ -2137,20 +2234,19 @@ class AppCfgApp(object):
logs_requester.DownloadLogs()
def _ParseEndDate(self, date, time_func=time.time):
- """Translates a user-readable end date to a POSIX timestamp.
+ """Translates an ISO 8601 date to a date object.
Args:
- date: A utc date string as YYYY-MM-DD.
+ date: A date string as YYYY-MM-DD.
time_func: time.time() function for testing.
Returns:
- A POSIX timestamp representing the last moment of that day.
- If no date is given, returns a timestamp representing now.
+ A date object representing the last day of logs to get.
+ If no date is given, returns today in the US/Pacific timezone.
"""
if not date:
- return time_func()
- struct_time = time.strptime('%s' % date, '%Y-%m-%d')
- return calendar.timegm(struct_time) + 86400
+ return PacificDate(time_func())
+ return datetime.date(*[int(i) for i in date.split('-')])
def _RequestLogsOptions(self, parser):
"""Adds request_logs-specific options to 'parser'.
@@ -2161,7 +2257,7 @@ class AppCfgApp(object):
parser.add_option('-n', '--num_days', type='int', dest='num_days',
action='store', default=None,
help='Number of days worth of log data to get. '
- 'The cut-off point is midnight UTC. '
+ 'The cut-off point is midnight US/Pacific. '
'Use 0 to get all available logs. '
'Default is 1, unless --append is also given; '
'then the default is 0.')
@@ -2526,14 +2622,13 @@ in production as well as restart any indexes that were not completed."""),
The 'update_queue' command will update any new, removed or changed task queue
definitions from the optional queue.yaml file."""),
-
-
-
-
-
-
-
-
+ 'update_dos': Action(
+ function='UpdateDos',
+ usage='%prog [options] update_dos <directory>',
+ short_desc='Update application dos definitions.',
+ long_desc="""
+The 'update_dos' command will update any new, removed or changed dos
+definitions from the optional dos.yaml file."""),
'vacuum_indexes': Action(
function='VacuumIndexes',
diff --git a/google-appengine/google/appengine/tools/bulkloader.py b/google-appengine/google/appengine/tools/bulkloader.py
index 804e508..cce71ef 100755
--- a/google-appengine/google/appengine/tools/bulkloader.py
+++ b/google-appengine/google/appengine/tools/bulkloader.py
@@ -3822,6 +3822,7 @@ def _PerformBulkload(arg_dict,
logger.info('Bandwidth: %s bytes/second', bandwidth_limit)
logger.info('HTTP connections: %s/second', http_limit)
logger.info('Entities inserted/fetched/modified: %s/second', rps_limit)
+ logger.info('Batch Size: %s', batch_size)
throttle = remote_api_throttle.Throttle(layout=throttle_layout)
diff --git a/google-appengine/google/appengine/tools/dev_appserver.py b/google-appengine/google/appengine/tools/dev_appserver.py
index 11af764..542775d 100755
--- a/google-appengine/google/appengine/tools/dev_appserver.py
+++ b/google-appengine/google/appengine/tools/dev_appserver.py
@@ -99,6 +99,7 @@ from google.appengine.api.capabilities import capability_stub
from google.appengine.api.labs.taskqueue import taskqueue_stub
from google.appengine.api.memcache import memcache_stub
from google.appengine.api.xmpp import xmpp_service_stub
+from google.appengine.datastore import datastore_sqlite_stub
from google.appengine import dist
@@ -680,6 +681,12 @@ def SetupEnvironment(cgi_path,
env['USER_ID'] = user_id
if admin:
env['USER_IS_ADMIN'] = '1'
+ if env['AUTH_DOMAIN'] == '*':
+ auth_domain = 'gmail.com'
+ parts = email_addr.split('@')
+ if len(parts) == 2 and parts[1]:
+ auth_domain = parts[1]
+ env['AUTH_DOMAIN'] = auth_domain
for key in headers:
if key in _IGNORE_REQUEST_HEADERS:
@@ -939,6 +946,8 @@ class FakeFile(file):
ALLOWED_DIRS = set([
os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
+ os.path.normcase(os.path.dirname(os.path.realpath(os.__file__))),
+ os.path.normcase(os.path.dirname(os.path.abspath(os.__file__))),
])
NOT_ALLOWED_DIRS = set([
@@ -1144,11 +1153,14 @@ class FakeFile(file):
normcase=normcase):
relative_filename = logical_dirfakefile[len(FakeFile._root_path):]
- if (not FakeFile._allow_skipped_files and
- FakeFile._skip_files.match(relative_filename)):
- logging.warning('Blocking access to skipped file "%s"',
- logical_filename)
- return False
+ if not FakeFile._allow_skipped_files:
+ path = relative_filename
+ while path != os.path.dirname(path):
+ if FakeFile._skip_files.match(path):
+ logging.warning('Blocking access to skipped file "%s"',
+ logical_filename)
+ return False
+ path = os.path.dirname(path)
if FakeFile._static_file_config_matcher.IsStaticFile(relative_filename):
logging.warning('Blocking access to static file "%s"',
@@ -3212,6 +3224,9 @@ def CreateRequestHandler(root_path,
self.send_response(httplib.INTERNAL_SERVER_ERROR, title)
self.wfile.write('Content-Type: text/html\r\n\r\n')
self.wfile.write('<pre>%s</pre>' % cgi.escape(msg))
+ except KeyboardInterrupt, e:
+ logging.info('Server interrupted by user, terminating')
+ self.server.stop_serving_forever()
except:
msg = 'Exception encountered handling request'
logging.exception(msg)
@@ -3484,6 +3499,7 @@ def SetupStubs(app_id, **config):
login_url: Relative URL which should be used for handling user login/logout.
blobstore_path: Path to the directory to store Blobstore blobs in.
datastore_path: Path to the file to store Datastore file stub data in.
+ use_sqlite: Use the SQLite stub for the datastore.
history_path: DEPRECATED, No-op.
clear_datastore: If the datastore should be cleared on startup.
smtp_host: SMTP host used for sending test mail.
@@ -3502,6 +3518,7 @@ def SetupStubs(app_id, **config):
blobstore_path = config['blobstore_path']
datastore_path = config['datastore_path']
clear_datastore = config['clear_datastore']
+ use_sqlite = config.get('use_sqlite', False)
require_indexes = config.get('require_indexes', False)
smtp_host = config.get('smtp_host', None)
smtp_port = config.get('smtp_port', 25)
@@ -3525,9 +3542,14 @@ def SetupStubs(app_id, **config):
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
- datastore = datastore_file_stub.DatastoreFileStub(
- app_id, datastore_path, require_indexes=require_indexes,
- trusted=trusted)
+ if use_sqlite:
+ datastore = datastore_sqlite_stub.DatastoreSqliteStub(
+ app_id, datastore_path, require_indexes=require_indexes,
+ trusted=trusted)
+ else:
+ datastore = datastore_file_stub.DatastoreFileStub(
+ app_id, datastore_path, require_indexes=require_indexes,
+ trusted=trusted)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
fixed_login_url = '%s?%s=%%s' % (login_url,
@@ -3740,6 +3762,7 @@ class HTTPServerWithScheduler(BaseHTTPServer.HTTPServer):
BaseHTTPServer.HTTPServer.__init__(self, server_address,
request_handler_class)
self._events = []
+ self._stopped = False
def get_request(self, time_func=time.time, select_func=select.select):
"""Overrides the base get_request call.
@@ -3766,6 +3789,20 @@ class HTTPServerWithScheduler(BaseHTTPServer.HTTPServer):
unused_eta, runnable = heapq.heappop(self._events)
runnable()
+ def serve_forever(self):
+ """Handle one request at a time until told to stop."""
+ while not self._stopped:
+ self.handle_request()
+
+ def stop_serving_forever(self):
+ """Stop the serve_forever() loop.
+
+ Stop happens on the next handle_request() loop; it will not stop
+ immediately. Since dev_appserver.py must run on py2.5 we can't
+ use newer features of SocketServer (e.g. shutdown(), added in py2.6).
+ """
+ self._stopped = True
+
def AddEvent(self, eta, runnable):
"""Add a runnable event to be run at the specified time.
diff --git a/google-appengine/google/appengine/tools/dev_appserver_main.py b/google-appengine/google/appengine/tools/dev_appserver_main.py
index 5762f54..644eb03 100755
--- a/google-appengine/google/appengine/tools/dev_appserver_main.py
+++ b/google-appengine/google/appengine/tools/dev_appserver_main.py
@@ -32,6 +32,8 @@ Options:
--blobstore_path=PATH Path to use for storing Blobstore file stub data.
--datastore_path=PATH Path to use for storing Datastore file stub data.
(Default %(datastore_path)s)
+ --use_sqlite Use the new, SQLite based datastore stub.
+ (Default false)
--history_path=PATH Path to use for storing Datastore history.
(Default %(history_path)s)
--require_indexes Disallows queries that require composite indexes
@@ -94,6 +96,7 @@ ARG_AUTH_DOMAIN = 'auth_domain'
ARG_CLEAR_DATASTORE = 'clear_datastore'
ARG_BLOBSTORE_PATH = 'blobstore_path'
ARG_DATASTORE_PATH = 'datastore_path'
+ARG_USE_SQLITE = 'use_sqlite'
ARG_DEBUG_IMPORTS = 'debug_imports'
ARG_ENABLE_SENDMAIL = 'enable_sendmail'
ARG_SHOW_MAIL_BODY = 'show_mail_body'
@@ -126,6 +129,7 @@ DEFAULT_ARGS = {
'dev_appserver.blobstore'),
ARG_DATASTORE_PATH: os.path.join(tempfile.gettempdir(),
'dev_appserver.datastore'),
+ ARG_USE_SQLITE: False,
ARG_HISTORY_PATH: os.path.join(tempfile.gettempdir(),
'dev_appserver.datastore.history'),
ARG_LOGIN_URL: '/_ah/login',
@@ -189,6 +193,7 @@ def ParseArguments(argv):
'clear_datastore',
'blobstore_path=',
'datastore_path=',
+ 'use_sqlite',
'debug',
'debug_imports',
'enable_sendmail',
@@ -234,6 +239,9 @@ def ParseArguments(argv):
if option == '--datastore_path':
option_dict[ARG_DATASTORE_PATH] = os.path.abspath(value)
+ if option == '--use_sqlite':
+ option_dict[ARG_USE_SQLITE] = True
+
if option == '--history_path':
option_dict[ARG_HISTORY_PATH] = os.path.abspath(value)
diff --git a/google-appengine/google/appengine/tools/dev_appserver_upload.py b/google-appengine/google/appengine/tools/dev_appserver_upload.py
index 8d3932a..0cb4996 100755
--- a/google-appengine/google/appengine/tools/dev_appserver_upload.py
+++ b/google-appengine/google/appengine/tools/dev_appserver_upload.py
@@ -33,9 +33,9 @@ import md5
import random
import time
-from google.appengine.api import blobstore
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
+from google.appengine.api.blobstore import blobstore
try:
@@ -61,24 +61,6 @@ class InvalidMIMETypeFormatError(Error):
"""MIME type was formatted incorrectly."""
-def _FormatDateTime(stamp):
- """Format a timestamp with milliseconds.
-
- This method is necessary to format a timestamp with microseconds on Python
- versions before 2.6.
-
- DO NOT USE OUTSIDE THIS MODULE.
-
- Args:
- stamp: datetime.datetime object to format.
-
- Returns:
- Formatted datetime as Python 2.6 format '%Y-%m-%d %H:%M:%S.%f'.
- """
- return '%s.%06d' % (stamp.strftime(blobstore.BASE_CREATION_HEADER_FORMAT),
- stamp.microsecond)
-
-
def GenerateBlobKey(time_func=time.time, random_func=random.random):
"""Generate a unique BlobKey.
@@ -281,8 +263,8 @@ class UploadCGIHandler(object):
**form_item.type_options)
headers = dict(form_item.headers)
headers['Content-Length'] = str(content_length)
- headers[blobstore.UPLOAD_INFO_CREATION_HEADER] = _FormatDateTime(
- creation)
+ headers[blobstore.UPLOAD_INFO_CREATION_HEADER] = (
+ blobstore._format_creation(creation))
for key, value in headers.iteritems():
external.add_header(key, value)
diff --git a/google-appengine/google/appengine/tools/remote_api_shell.py b/google-appengine/google/appengine/tools/remote_api_shell.py
index 8705803..d08357d 100755
--- a/google-appengine/google/appengine/tools/remote_api_shell.py
+++ b/google-appengine/google/appengine/tools/remote_api_shell.py
@@ -75,7 +75,8 @@ def main(argv):
path = DEFAULT_PATH
remote_api_stub.ConfigureRemoteApi(appid, path, auth_func,
- servername=options.server)
+ servername=options.server,
+ save_cookies=True)
remote_api_stub.MaybeInvokeAuthentication()
os.environ['SERVER_SOFTWARE'] = 'Development (remote_api_shell)/1.0'
diff --git a/google-appengine/lib/ipaddr/ipaddr/__init__.py b/google-appengine/lib/ipaddr/ipaddr/__init__.py
index 329a370..c46edf6 100644
--- a/google-appengine/lib/ipaddr/ipaddr/__init__.py
+++ b/google-appengine/lib/ipaddr/ipaddr/__init__.py
@@ -1,4 +1,4 @@
-#!/usr/bin/python2.4
+#!/usr/bin/python
#
# Copyright 2007 Google Inc.
# Licensed to PSF under a Contributor Agreement.
@@ -15,143 +15,247 @@
# implied. See the License for the specific language governing
# permissions and limitations under the License.
-"""An IPv4/IPv6 manipulation library in Python.
+"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
This library is used to create/poke/manipulate IPv4 and IPv6 addresses
-and prefixes.
+and networks.
"""
-__version__ = '1.1.1'
+__version__ = '2.1.1'
import struct
-class Error(Exception):
- """Base class for exceptions."""
+class AddressValueError(ValueError):
+ """A Value Error related to the address."""
-class IPTypeError(Error):
+class NetmaskValueError(ValueError):
+ """A Value Error related to the netmask."""
- """Tried to perform a v4 action on v6 object or vice versa."""
+def IPAddress(address, version=None):
+ """Take an IP string/int and return an object of the correct type.
-class IPAddressExclusionError(Error):
-
- """An Error we should never see occurred in address exclusion."""
-
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+ version: An Integer, 4 or 6. If set, don't try to automatically
+ determine what the IP address type is. important for things
+ like IPAddress(1), which could be IPv4, '0.0.0.0.1', or IPv6,
+ '::1'.
-class IPv4IpValidationError(Error):
+ Returns:
+ An IPv4Address or IPv6Address object.
- """Raised when an IPv4 address is invalid."""
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address.
- def __init__(self, ip):
- Error.__init__(self)
- self.ip = ip
+ """
+ if version:
+ if version == 4:
+ return IPv4Address(address)
+ elif version == 6:
+ return IPv6Address(address)
- def __str__(self):
- return repr(self.ip) + ' is not a valid IPv4 address'
+ try:
+ return IPv4Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
+ try:
+ return IPv6Address(address)
+ except (AddressValueError, NetmaskValueError):
+ pass
-class IPv4NetmaskValidationError(Error):
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
+ address)
- """Raised when a netmask is invalid."""
- def __init__(self, netmask):
- Error.__init__(self)
- self.netmask = netmask
+def IPNetwork(address, version=None, strict=False):
+ """Take an IP string/int and return an object of the correct type.
- def __str__(self):
- return repr(self.netmask) + ' is not a valid IPv4 netmask'
+ Args:
+ address: A string or integer, the IP address. Either IPv4 or
+ IPv6 addresses may be supplied; integers less than 2**32 will
+ be considered to be IPv4 by default.
+ version: An Integer, if set, don't try to automatically
+ determine what the IP address type is. important for things
+ like IPNetwork(1), which could be IPv4, '0.0.0.1/32', or IPv6,
+ '::1/128'.
+ Returns:
+ An IPv4Network or IPv6Network object.
-class IPv6IpValidationError(Error):
+ Raises:
+ ValueError: if the string passed isn't either a v4 or a v6
+ address. Or if a strict network was requested and a strict
+ network wasn't given.
- """Raised when an IPv6 address is invalid."""
+ """
+ if version:
+ if version == 4:
+ return IPv4Network(address, strict)
+ elif version == 6:
+ return IPv6Network(address, strict)
- def __init__(self, ip):
- Error.__init__(self)
- self.ip = ip
+ try:
+ return IPv4Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
- def __str__(self):
- return repr(self.ip) + ' is not a valid IPv6 address'
+ try:
+ return IPv6Network(address, strict)
+ except (AddressValueError, NetmaskValueError):
+ pass
+ raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
+ address)
-class IPv6NetmaskValidationError(Error):
- """Raised when an IPv6 netmask is invalid."""
+def _find_address_range(addresses):
+ """Find a sequence of addresses.
- def __init__(self, netmask):
- Error.__init__(self)
- self.netmask = netmask
+ Args:
+ addresses: a list of IPv4 or IPv6 addresses.
- def __str__(self):
- return repr(self.netmask) + ' is not a valid IPv6 netmask'
+ Returns:
+ A tuple containing the first and last IP addresses in the sequence.
+ """
+ first = last = addresses[0]
+ for ip in addresses[1:]:
+ if ip._ip == last._ip + 1:
+ last = ip
+ else:
+ break
+ return (first, last)
-class PrefixlenDiffInvalidError(Error):
+def _get_prefix_length(number1, number2, bits):
+ """Get the number of leading bits that are same for two numbers.
- """Raised when Sub/Supernets is called with a bad prefixlen_diff."""
+ Args:
+ number1: an integer.
+ number2: another integer.
+ bits: the maximum number of bits to compare.
- def __init__(self, error_str):
- Error.__init__(self)
- self.error_str = error_str
+ Returns:
+ The number of leading bits that are the same for two numbers.
+ """
+ for i in range(bits):
+ if number1 >> i == number2 >> i:
+ return bits - i
+ return 0
-def IP(ipaddr):
- """Take an IP string/int and return an object of the correct type.
+def _count_righthand_zero_bits(number, bits):
+ """Count the number of zero bits on the right hand side.
Args:
- ipaddr: A string or integer, the IP address. Either IPv4 or
- IPv6 addresses may be supplied; integers less than 2**32 will
- be considered to be IPv4.
+ number: an integer.
+ bits: maximum number of bits to count.
Returns:
- An IPv4 or IPv6 object.
-
- Raises:
- ValueError: if the string passed isn't either a v4 or a v6
- address.
+ The number of zero bits on the right hand side of the number.
"""
+ if number == 0:
+ return bits
+ for i in range(bits):
+ if (number >> i) % 2:
+ return i
- try:
- return IPv4(ipaddr)
- except (IPv4IpValidationError, IPv4NetmaskValidationError):
- pass
+def summarize_address_range(first, last):
+ """Summarize a network range given the first and last IP addresses.
- try:
- return IPv6(ipaddr)
- except (IPv6IpValidationError, IPv6NetmaskValidationError):
- pass
+ Example:
+ >>> summarize_address_range(IPv4Address('1.1.1.0'),
+ IPv4Address('1.1.1.130'))
+ [IPv4Network('1.1.1.0/25'), IPv4Network('1.1.1.128/31'),
+ IPv4Network('1.1.1.130/32')]
- raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
- ipaddr)
+ Args:
+ first: the first IPv4Address or IPv6Address in the range.
+ last: the last IPv4Address or IPv6Address in the range.
+
+ Returns:
+ The address range collapsed to a list of IPv4Network's or
+ IPv6Network's.
+ Raise:
+ TypeError:
+ If the first and last objects are not IP addresses.
+ If the first and last objects are not the same version.
+ ValueError:
+ If the last object is not greater than the first.
+ If the version is not 4 or 6.
+
+ """
+ if not (isinstance(first, _BaseIP) and isinstance(last, _BaseIP)):
+ raise TypeError('first and last must be IP addresses, not networks')
+ if first.version != last.version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(self), str(other)))
+ if first > last:
+ raise ValueError('last IP address must be greater than first')
+
+ networks = []
+
+ if first.version == 4:
+ ip = IPv4Network
+ elif first.version == 6:
+ ip = IPv6Network
+ else:
+ raise ValueError('unknown IP version')
+
+ ip_bits = first._max_prefixlen
+ first_int = first._ip
+ last_int = last._ip
+ while first_int <= last_int:
+ nbits = _count_righthand_zero_bits(first_int, ip_bits)
+ current = None
+ while nbits >= 0:
+ addend = 2**nbits - 1
+ current = first_int + addend
+ nbits -= 1
+ if current <= last_int:
+ break
+ prefix = _get_prefix_length(first_int, current, ip_bits)
+ net = ip('%s/%d' % (str(first), prefix))
+ networks.append(net)
+ if current == ip._ALL_ONES:
+ break
+ first_int = current + 1
+ first = IPAddress(first_int, version=first._version)
+ return networks
def _collapse_address_list_recursive(addresses):
"""Loops through the addresses, collapsing concurrent netblocks.
Example:
- ip1 = IPv4('1.1.0.0/24')
- ip2 = IPv4('1.1.1.0/24')
- ip3 = IPv4('1.1.2.0/24')
- ip4 = IPv4('1.1.3.0/24')
- ip5 = IPv4('1.1.4.0/24')
- ip6 = IPv4('1.1.0.1/22')
+ ip1 = IPv4Network'1.1.0.0/24')
+ ip2 = IPv4Network'1.1.1.0/24')
+ ip3 = IPv4Network'1.1.2.0/24')
+ ip4 = IPv4Network'1.1.3.0/24')
+ ip5 = IPv4Network'1.1.4.0/24')
+ ip6 = IPv4Network'1.1.0.1/22')
_collapse_address_list_recursive([ip1, ip2, ip3, ip4, ip5, ip6]) ->
- [IPv4('1.1.0.0/22'), IPv4('1.1.4.0/24')]
+ [IPv4Network('1.1.0.0/22'), IPv4Network('1.1.4.0/24')]
This shouldn't be called directly; it is called via
collapse_address_list([]).
Args:
- addresses: A list of IPv4 or IPv6 objects.
+ addresses: A list of IPv4Network's or IPv6Network's
Returns:
- A list of IPv4 or IPv6 objects depending on what we were passed.
+ A list of IPv4Network's or IPv6Network's depending on what we were
+ passed.
"""
ret_array = []
@@ -183,14 +287,50 @@ def collapse_address_list(addresses):
[IPv4('1.1.0.0/23')]
Args:
- addresses: A list of IPv4 or IPv6 objects.
+ addresses: A list of IPv4Network or IPv6Network objects.
Returns:
- A list of IPv4 or IPv6 objects depending on what we were passed.
+ A list of IPv4Network or IPv6Network objects depending on what we
+ were passed.
+
+ Raises:
+ TypeError: If passed a list of mixed version objects.
"""
- return _collapse_address_list_recursive(
- sorted(addresses, key=BaseIP._get_networks_key))
+ i = 0
+ addrs = []
+ ips = []
+ nets = []
+
+ # split IP addresses and networks
+ for ip in addresses:
+ if isinstance(ip, _BaseIP):
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(ip), str(ips[-1])))
+ ips.append(ip)
+ elif ip._prefixlen == ip._max_prefixlen:
+ if ips and ips[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(ip), str(ips[-1])))
+ ips.append(ip.ip)
+ else:
+ if nets and nets[-1]._version != ip._version:
+ raise TypeError("%s and %s are not of the same version" % (
+ str(ip), str(ips[-1])))
+ nets.append(ip)
+
+ # sort and dedup
+ ips = sorted(set(ips))
+ nets = sorted(set(nets))
+
+ while i < len(ips):
+ (first, last) = _find_address_range(ips[i:])
+ i = ips.index(last) + 1
+ addrs.extend(summarize_address_range(first, last))
+
+ return _collapse_address_list_recursive(sorted(
+ addrs + nets, key=_BaseNet._get_networks_key))
# backwards compatibility
CollapseAddrList = collapse_address_list
@@ -201,59 +341,75 @@ CollapseAddrList = collapse_address_list
# distinguish between packed representations and strings, for example
# b'12::' (the IPv4 address 49.50.58.58) and '12::' (an IPv6 address).
try:
- _compat_has_real_bytes = bytes != str
+ _compat_has_real_bytes = bytes is not str
except NameError: # <Python2.6
_compat_has_real_bytes = False
-class BaseIP(object):
+def get_mixed_type_key(obj):
+ """Return a key suitable for sorting between networks and addresses.
- """A generic IP object.
+ Address and Network objects are not sortable by default; they're
+ fundamentally different so the expression
+
+ IPv4Address('1.1.1.1') <= IPv4Network('1.1.1.1/24')
- This IP class contains most of the methods which are used by
- the IPv4 and IPv6 classes.
+ doesn't make any sense. There are some times however, where you may wish
+ to have ipaddr sort these for you anyway. If you need to do this, you
+ can use this function as the key= argument to sorted().
+
+ Args:
+ obj: either a Network or Address object.
+ Returns:
+ appropriate key.
"""
+ if isinstance(obj, _BaseNet):
+ return obj._get_networks_key()
+ elif isinstance(obj, _BaseIP):
+ return obj._get_address_key()
+ return NotImplemented
- def __getitem__(self, n):
- if n >= 0:
- if self.network + n > self.broadcast:
- raise IndexError
- return self._string_from_ip_int(self.network + n)
- else:
- n += 1
- if self.broadcast + n < self.network:
- raise IndexError
- return self._string_from_ip_int(self.broadcast + n)
+class _IPAddrBase(object):
- def __lt__(self, other):
- try:
- if self.version != other.version:
- return self.version < other.version
- if self.ip != other.ip:
- return self.ip < other.ip
- if self.netmask != other.netmask:
- return self.netmask < other.netmask
- return False
- except AttributeError:
- return NotImplemented
+ """The mother class."""
- def __gt__(self, other):
- try:
- if self.version != other.version:
- return self.version > other.version
- if self.ip != other.ip:
- return self.ip > other.ip
- if self.netmask != other.netmask:
- return self.netmask > other.netmask
- return False
- except AttributeError:
- return NotImplemented
+ def __index__(self):
+ return self._ip
+
+ def __int__(self):
+ return self._ip
+
+ def __hex__(self):
+ return hex(self._ip)
+
+ @property
+ def exploded(self):
+ """Return the longhand version of the IP address as a string."""
+ return self._explode_shorthand_ip_string()
+
+ @property
+ def compressed(self):
+ """Return the shorthand version of the IP address as a string."""
+ return str(self)
+
+
+class _BaseIP(_IPAddrBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by single IP addresses.
+
+ """
+
+ def __init__(self, address):
+ if '/' in str(address):
+ raise AddressValueError(address)
def __eq__(self, other):
try:
- return (self.version == other.version
- and self.ip == other.ip
- and self.netmask == other.netmask)
+ return (self._ip == other._ip
+ and self._version == other._version)
except AttributeError:
return NotImplemented
@@ -275,17 +431,216 @@ class BaseIP(object):
return NotImplemented
return not lt
+ def __lt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseIP):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self._ip != other._ip:
+ return self._ip < other._ip
+ return False
+
+ def __gt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseIP):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self._ip != other._ip:
+ return self._ip > other._ip
+ return False
+
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, str(self))
- def __index__(self):
- return self.ip
+ def __str__(self):
+ return '%s' % self._string_from_ip_int(self._ip)
- def __int__(self):
- return self.ip
+ def __hash__(self):
+ return hash(hex(self._ip))
- def __hex__(self):
- return hex(int(self))
+ def _get_address_key(self):
+ return (self._version, self)
+
+ @property
+ def version(self):
+ raise NotImplementedError('BaseIP has no version')
+
+
+class _BaseNet(_IPAddrBase):
+
+ """A generic IP object.
+
+ This IP class contains the version independent methods which are
+ used by networks.
+
+ """
+
+ def __init__(self, address):
+ self._cache = {}
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, str(self))
+
+ def iterhosts(self):
+ """Generate Iterator over usable hosts in a network.
+
+ This is like __iter__ except it doesn't return the network
+ or broadcast addresses.
+
+ """
+ cur = int(self.network) + 1
+ bcast = int(self.broadcast) - 1
+ while cur <= bcast:
+ cur += 1
+ yield IPAddress(cur - 1, version=self._version)
+
+ def __iter__(self):
+ cur = int(self.network)
+ bcast = int(self.broadcast)
+ while cur <= bcast:
+ cur += 1
+ yield IPAddress(cur - 1, version=self._version)
+
+ def __getitem__(self, n):
+ network = int(self.network)
+ broadcast = int(self.broadcast)
+ if n >= 0:
+ if network + n > broadcast:
+ raise IndexError
+ return IPAddress(network + n, version=self._version)
+ else:
+ n += 1
+ if broadcast + n < network:
+ raise IndexError
+ return IPAddress(broadcast + n, version=self._version)
+
+ def __lt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseNet):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self.network != other.network:
+ return self.network < other.network
+ if self.netmask != other.netmask:
+ return self.netmask < other.netmask
+ return False
+
+ def __gt__(self, other):
+ if self._version != other._version:
+ raise TypeError('%s and %s are not of the same version' % (
+ str(self), str(other)))
+ if not isinstance(other, _BaseNet):
+ raise TypeError('%s and %s are not of the same type' % (
+ str(self), str(other)))
+ if self.network != other.network:
+ return self.network > other.network
+ if self.netmask != other.netmask:
+ return self.netmask > other.netmask
+ return False
+
+ def __le__(self, other):
+ gt = self.__gt__(other)
+ if gt is NotImplemented:
+ return NotImplemented
+ return not gt
+
+ def __ge__(self, other):
+ lt = self.__lt__(other)
+ if lt is NotImplemented:
+ return NotImplemented
+ return not lt
+
+ def __eq__(self, other):
+ try:
+ return (self._version == other._version
+ and self.network == other.network
+ and int(self.netmask) == int(other.netmask))
+ except AttributeError:
+ return NotImplemented
+
+ def __ne__(self, other):
+ eq = self.__eq__(other)
+ if eq is NotImplemented:
+ return NotImplemented
+ return not eq
+
+ def __str__(self):
+ return '%s/%s' % (str(self.ip),
+ str(self._prefixlen))
+
+ def __hash__(self):
+ return hash(int(self.network) ^ int(self.netmask))
+
+ def __contains__(self, other):
+ # dealing with another network.
+ if isinstance(other, _BaseNet):
+ return (int(self.network) <= int(other._ip) and
+ int(self.broadcast) >= int(other.broadcast))
+ # dealing with another address
+ else:
+ return (int(self.network) <= int(other._ip) <=
+ int(self.broadcast))
+
+ def overlaps(self, other):
+ """Tell if self is partly contained in other."""
+ return self.network in other or self.broadcast in other or (
+ other.network in self or other.broadcast in self)
+
+ @property
+ def network(self):
+ x = self._cache.get('network')
+ if x is None:
+ x = IPAddress(self._ip & int(self.netmask), version=self._version)
+ self._cache['network'] = x
+ return x
+
+ @property
+ def broadcast(self):
+ x = self._cache.get('broadcast')
+ if x is None:
+ x = IPAddress(self._ip | int(self.hostmask), version=self._version)
+ self._cache['broadcast'] = x
+ return x
+
+ @property
+ def hostmask(self):
+ x = self._cache.get('hostmask')
+ if x is None:
+ x = IPAddress(int(self.netmask) ^ self._ALL_ONES,
+ version=self._version)
+ self._cache['hostmask'] = x
+ return x
+
+ @property
+ def with_prefixlen(self):
+ return '%s/%d' % (str(self.ip), self._prefixlen)
+
+ @property
+ def with_netmask(self):
+ return '%s/%s' % (str(self.ip), str(self.netmask))
+
+ @property
+ def with_hostmask(self):
+ return '%s/%s' % (str(self.ip), str(self.hostmask))
+
+ @property
+ def numhosts(self):
+ """Number of hosts in the current subnet."""
+ return int(self.broadcast) - int(self.network) + 1
+
+ @property
+ def version(self):
+ raise NotImplementedError('BaseNet has no version')
+
+ @property
+ def prefixlen(self):
+ return self._prefixlen
def address_exclude(self, other):
"""Remove an address from a larger block.
@@ -316,26 +671,23 @@ class BaseIP(object):
other.
Raises:
- IPTypeError: If self and other are of difffering address
+ TypeError: If self and other are of difffering address
versions.
- IPAddressExclusionError: There was some unknown error in the
- address exclusion process. This likely points to a bug
- elsewhere in this code.
ValueError: If other is not completely contained by self.
"""
- if not self.version == other.version:
- raise IPTypeError("%s and %s aren't of the same version" % (
+ if not self._version == other._version:
+ raise TypeError("%s and %s are not of the same version" % (
str(self), str(other)))
if other not in self:
raise ValueError('%s not contained in %s' % (str(other),
str(self)))
-
ret_addrs = []
# Make sure we're comparing the network of other.
- other = IP(other.network_ext + '/' + str(other.prefixlen))
+ other = IPNetwork('%s/%s' % (str(other.network), str(other.prefixlen)),
+ version=other._version)
s1, s2 = self.subnet()
while s1 != other and s2 != other:
@@ -347,20 +699,20 @@ class BaseIP(object):
s1, s2 = s2.subnet()
else:
# If we got here, there's a bug somewhere.
- raise IPAddressExclusionError('Error performing exclusion: '
- 's1: %s s2: %s other: %s' %
- (str(s1), str(s2), str(other)))
+ assert True == False, ('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (str(s1), str(s2), str(other)))
if s1 == other:
ret_addrs.append(s2)
elif s2 == other:
ret_addrs.append(s1)
else:
# If we got here, there's a bug somewhere.
- raise IPAddressExclusionError('Error performing exclusion: '
- 's1: %s s2: %s other: %s' %
- (str(s1), str(s2), str(other)))
+ assert True == False, ('Error performing exclusion: '
+ 's1: %s s2: %s other: %s' %
+ (str(s1), str(s2), str(other)))
- return sorted(ret_addrs, key=BaseIP._get_networks_key)
+ return sorted(ret_addrs, key=_BaseNet._get_networks_key)
def compare_networks(self, other):
"""Compare two IP objects.
@@ -369,7 +721,7 @@ class BaseIP(object):
representation of the network addresses. This means that the
host bits aren't considered at all in this method. If you want
to compare host bits, you can easily enough do a
- 'HostA.ip < HostB.ip'
+ 'HostA._ip < HostB._ip'
Args:
other: An IP object.
@@ -390,17 +742,17 @@ class BaseIP(object):
If the IP versions of self and other are different, returns:
- -1 if self.version < other.version
+ -1 if self._version < other._version
eg: IPv4('10.0.0.1/24') < IPv6('::1/128')
- 1 if self.version > other.version
+ 1 if self._version > other._version
eg: IPv6('::1/128') > IPv4('255.255.255.0/24')
"""
- if self.version < other.version:
+ if self._version < other._version:
return -1
- if self.version > other.version:
+ if self._version > other._version:
return 1
- # self.version == other.version below here:
+ # self._version == other._version below here:
if self.network < other.network:
return -1
if self.network > other.network:
@@ -421,75 +773,7 @@ class BaseIP(object):
and list.sort().
"""
- return (self.version, self.network, self.netmask)
-
- prefixlen = property(
- fget=lambda self: self._prefixlen,
- fset=lambda self, prefixlen: self._set_prefix(prefixlen))
-
- def __str__(self):
- return '%s/%s' % (self._string_from_ip_int(self.ip),
- str(self.prefixlen))
-
- def __hash__(self):
- return hash(self.ip ^ self.netmask)
-
- def __contains__(self, other):
- return self.network <= other.ip and self.broadcast >= other.broadcast
-
- @property
- def ip_ext(self):
- """Dotted decimal or colon string version of the IP address."""
- return self._string_from_ip_int(self.ip)
-
- @property
- def ip_ext_full(self):
- """Canonical string version of the IP address."""
- return self.ip_ext
-
- @property
- def broadcast(self):
- """Integer representation of the broadcast address."""
- return self.ip | self.hostmask
-
- @property
- def broadcast_ext(self):
- """Dotted decimal or colon string version of the broadcast."""
- return self._string_from_ip_int(self.broadcast)
-
- @property
- def hostmask(self):
- """Integer representation of the hostmask."""
- return self.netmask ^ self._ALL_ONES
-
- @property
- def hostmask_ext(self):
- """Dotted decimal or colon string version of the hostmask."""
- return self._string_from_ip_int(self.hostmask)
-
- @property
- def network(self):
- """Integer representation of the network."""
- return self.ip & self.netmask
-
- @property
- def network_ext(self):
- """Dotted decimal or colon string version of the network."""
- return self._string_from_ip_int(self.network)
-
- @property
- def netmask_ext(self):
- """Dotted decimal or colon string version of the netmask."""
- return self._string_from_ip_int(self.netmask)
-
- @property
- def numhosts(self):
- """Number of hosts in the current subnet."""
- return self.broadcast - self.network + 1
-
- @property
- def version(self):
- raise NotImplementedError('BaseIP has no version')
+ return (self._version, self.network, self.netmask)
def _ip_int_from_prefix(self, prefixlen=None):
"""Turn the prefix length netmask into a int for comparison.
@@ -502,7 +786,7 @@ class BaseIP(object):
"""
if not prefixlen and prefixlen != 0:
- prefixlen = self.prefixlen
+ prefixlen = self._prefixlen
return self._ALL_ONES ^ (self._ALL_ONES >> prefixlen)
def _prefix_from_ip_int(self, ip_int, mask=32):
@@ -535,186 +819,76 @@ class BaseIP(object):
"""
if not prefixlen:
- prefixlen = self.prefixlen
+ prefixlen = self._prefixlen
return self._string_from_ip_int(self._ip_int_from_prefix(prefixlen))
- # backwards compatibility
- AddressExclude = address_exclude
- CompareNetworks = compare_networks
- Contains = __contains__
- def set_prefix(self, prefixlen): self.prefixlen = prefixlen
- SetPrefix = set_prefix
- def get_prefix(self): return self.prefixlen
-
-
-class IPv4(BaseIP):
-
- """This class represents and manipulates 32-bit IPv4 addresses.
-
- Attributes: [examples for IPv4('1.2.3.4/27')]
- .ip: 16909060
- .ip_ext: '1.2.3.4'
- .ip_ext_full: '1.2.3.4'
- .network: 16909056L
- .network_ext: '1.2.3.0'
- .hostmask: 31L (0x1F)
- .hostmask_ext: '0.0.0.31'
- .broadcast: 16909087L (0x102031F)
- .broadcast_ext: '1.2.3.31'
- .netmask: 4294967040L (0xFFFFFFE0)
- .netmask_ext: '255.255.255.224'
- .prefixlen: 27
-
- """
-
- # Equivalent to 255.255.255.255 or 32 bits of 1's.
- _ALL_ONES = (2**32) - 1
-
- def __init__(self, ipaddr):
- """Instantiate a new IPv4 object.
-
- Args:
- ipaddr: A string or integer representing the IP [& network].
- '192.168.1.1/32'
- '192.168.1.1/255.255.255.255'
- '192.168.1.1/0.0.0.255'
- '192.168.1.1'
- are all functionally the same in IPv4. That is to say,
- failing to provide a subnetmask will create an object with
- a mask of /32. A netmask of '255.255.255.255' is assumed
- to be /32 and '0.0.0.0' is assumed to be /0, even though
- other netmasks can be expressed both as host- and
- net-masks. (255.0.0.0 == 0.255.255.255)
-
- Additionally, an integer can be passed, so
- IPv4('192.168.1.1') == IPv4(3232235777).
- or, more generally
- IPv4(IPv4('192.168.1.1').ip) == IPv4('192.168.1.1')
-
- Raises:
- IPv4IpValidationError: If ipaddr isn't a valid IPv4 address.
- IPv4NetmaskValidationError: If the netmask isn't valid for
- an IPv4 address.
-
- """
- BaseIP.__init__(self)
- self._version = 4
-
- # Efficient constructor from integer.
- if isinstance(ipaddr, int) or isinstance(ipaddr, long):
- self.ip = ipaddr
- self._prefixlen = 32
- self.netmask = self._ALL_ONES
- if ipaddr < 0 or ipaddr > self._ALL_ONES:
- raise IPv4IpValidationError(ipaddr)
- return
-
- # Constructing from a packed address
- if _compat_has_real_bytes:
- if isinstance(ipaddr, bytes) and len(ipaddr) == 4:
- self.ip = struct.unpack('!I', ipaddr)[0]
- self._prefixlen = 32
- self.netmask = self._ALL_ONES
- return
-
- # Assume input argument to be string or any object representation
- # which converts into a formatted IP prefix string.
- addr = str(ipaddr).split('/')
-
- if len(addr) > 2:
- raise IPv4IpValidationError(ipaddr)
-
- if not self._is_valid_ip(addr[0]):
- raise IPv4IpValidationError(addr[0])
-
- self.ip = self._ip_int_from_string(addr[0])
-
- if len(addr) == 2:
- mask = addr[1].split('.')
- if len(mask) == 4:
- # We have dotted decimal netmask.
- if not self._is_valid_netmask(addr[1]):
- raise IPv4NetmaskValidationError(addr[1])
- if self._is_hostmask(addr[1]):
- self.netmask = (
- self._ip_int_from_string(addr[1]) ^ self._ALL_ONES)
- else:
- self.netmask = self._ip_int_from_string(addr[1])
- self._prefixlen = self._prefix_from_ip_int(self.netmask)
- else:
- # We have a netmask in prefix length form.
- if not self._is_valid_netmask(addr[1]):
- raise IPv4NetmaskValidationError(addr[1])
- self._prefixlen = int(addr[1])
- self.netmask = self._ip_int_from_prefix(self._prefixlen)
- else:
- self._prefixlen = 32
- self.netmask = self._ip_int_from_prefix(self._prefixlen)
-
- def _set_prefix(self, prefixlen):
- """Change the prefix length.
-
- Args:
- prefixlen: An integer, the new prefix length.
-
- Raises:
- IPv4NetmaskValidationError: If prefixlen is out of bounds.
-
- """
- if not 0 <= prefixlen <= 32:
- raise IPv4NetmaskValidationError(prefixlen)
- self._prefixlen = prefixlen
- self.netmask = self._ip_int_from_prefix(self._prefixlen)
-
- def subnet(self, prefixlen_diff=1):
+ def iter_subnets(self, prefixlen_diff=1, new_prefix=None):
"""The subnets which join to make the current subnet.
In the case that self contains only one IP
- (self._prefixlen == 32), return a list with just ourself.
+ (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
+ for IPv6), return a list with just ourself.
Args:
prefixlen_diff: An integer, the amount the prefix length
- should be increased by. Given a /24 network and a
- prefixlen_diff of 3, for example, 8 subnets of size /27
- will be returned. The default value of 1 splits the
- current network into two halves.
+ should be increased by. This should not be set if
+ new_prefix is also set.
+ new_prefix: The desired new prefix length. This must be a
+ larger number (smaller prefix) than the existing prefix.
+ This should not be set if prefixlen_diff is also set.
Returns:
- A list of IPv4 objects.
+ An iterator of IPv(4|6) objects.
Raises:
- PrefixlenDiffInvalidError: The prefixlen_diff is too small
- or too large.
+ ValueError: The prefixlen_diff is too small or too large.
+ OR
+ prefixlen_diff and new_prefix are both set or new_prefix
+ is a smaller number than the current prefix (smaller
+ number means a larger network)
"""
- if self._prefixlen == 32:
- return [self]
+ if self._prefixlen == self._max_prefixlen:
+ yield self
+ return
+
+ if new_prefix is not None:
+ if new_prefix < self._prefixlen:
+ raise ValueError('new prefix must be longer')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = new_prefix - self._prefixlen
if prefixlen_diff < 0:
- raise PrefixlenDiffInvalidError('prefix length diff must be > 0')
- new_prefixlen = self.prefixlen + prefixlen_diff
+ raise ValueError('prefix length diff must be > 0')
+ new_prefixlen = self._prefixlen + prefixlen_diff
if not self._is_valid_netmask(str(new_prefixlen)):
- raise PrefixlenDiffInvalidError(
+ raise ValueError(
'prefix length diff %d is invalid for netblock %s' % (
new_prefixlen, str(self)))
- first = IPv4(
- self._string_from_ip_int(self.network) + '/' +
- str(self._prefixlen + prefixlen_diff))
- subnets = [first]
+ first = IPNetwork('%s/%s' % (str(self.network),
+ str(self._prefixlen + prefixlen_diff)),
+ version=self._version)
+
+ yield first
current = first
while True:
broadcast = current.broadcast
if broadcast == self.broadcast:
- break
- current = IPv4(self._string_from_ip_int(broadcast + 1) + '/' +
- str(new_prefixlen))
- subnets.append(current)
+ return
+ new_addr = IPAddress(int(broadcast) + 1, version=self._version)
+ current = IPNetwork('%s/%s' % (str(new_addr), str(new_prefixlen)),
+ version=self._version)
- return subnets
+ yield current
- def supernet(self, prefixlen_diff=1):
+ def subnet(self, prefixlen_diff=1, new_prefix=None):
+ """Return a list of subnets, rather than an interator."""
+ return list(self.iter_subnets(prefixlen_diff, new_prefix))
+
+ def supernet(self, prefixlen_diff=1, new_prefix=None):
"""The supernet containing the current network.
Args:
@@ -724,102 +898,87 @@ class IPv4(BaseIP):
/21 netmask is returned.
Returns:
- An IPv4 object.
+ An IPv4 network object.
Raises:
- PrefixlenDiffInvalidError: If
- self.prefixlen - prefixlen_diff < 0. I.e., you have a
+ ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have a
negative prefix length.
+ OR
+ If prefixlen_diff and new_prefix are both set or new_prefix is a
+ larger number than the current prefix (larger number means a
+ smaller network)
"""
- if self.prefixlen == 0:
+ if self._prefixlen == 0:
return self
- if self.prefixlen - prefixlen_diff < 0:
- raise PrefixlenDiffInvalidError(
- 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
- (self.prefixlen, prefixlen_diff))
- return IPv4(self.ip_ext + '/' + str(self.prefixlen - prefixlen_diff))
-
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
- Returns:
- A boolean, True if the address is reserved per RFC 1918.
+ if new_prefix is not None:
+ if new_prefix > self._prefixlen:
+ raise ValueError('new prefix must be shorter')
+ if prefixlen_diff != 1:
+ raise ValueError('cannot set prefixlen_diff and new_prefix')
+ prefixlen_diff = self._prefixlen - new_prefix
- """
- return (self in IPv4('10.0.0.0/8') or
- self in IPv4('172.16.0.0/12') or
- self in IPv4('192.168.0.0/16'))
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
-
- Returns:
- A boolean, True if the address is multicast.
- See RFC 3171 for details.
-
- """
- return self in IPv4('224.0.0.0/4')
-
- @property
- def is_loopback(self):
- """Test if the address is a loopback adddress.
-
- Returns:
- A boolean, True if the address is a loopback per RFC 3330.
-
- """
- return self in IPv4('127.0.0.0/8')
+ if self.prefixlen - prefixlen_diff < 0:
+ raise ValueError(
+ 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
+ (self.prefixlen, prefixlen_diff))
+ return IPNetwork('%s/%s' % (str(self.network),
+ str(self.prefixlen - prefixlen_diff)),
+ version=self._version)
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
+ # backwards compatibility
+ Subnet = subnet
+ Supernet = supernet
+ AddressExclude = address_exclude
+ CompareNetworks = compare_networks
+ Contains = __contains__
- Returns:
- A boolean, True if the address is link-local per RFC 3927.
- """
- return self in IPv4('169.254.0.0/16')
+class _BaseV4(object):
- @property
- def version(self):
- return self._version
+ """Base IPv4 object.
- @property
- def packed(self):
- """The binary representation of this address."""
- return struct.pack('!I', self.ip)
+ The following methods are used by IPv4 objects in both single IP
+ addresses and networks.
- def _is_hostmask(self, ip_str):
- """Test if the IP string is a hostmask (rather than a netmask).
+ """
- Args:
- ip_str: A string, the potential hostmask.
+ # Equivalent to 255.255.255.255 or 32 bits of 1's.
+ _ALL_ONES = (2**32) - 1
- Returns:
- A boolean, True if the IP string is a hostmask.
+ def __init__(self, address):
+ self._version = 4
+ self._max_prefixlen = 32
- """
- parts = [int(x) for x in ip_str.split('.')]
- if parts[0] < parts[-1]:
- return True
- return False
+ def _explode_shorthand_ip_string(self, ip_str=None):
+ if not ip_str:
+ ip_str = str(self)
+ return ip_str
def _ip_int_from_string(self, ip_str):
"""Turn the given IP string into an integer for comparison.
Args:
- ip_str: A string, the IP address.
+ ip_str: A string, the IP ip_str.
Returns:
- The IP address as an integer.
+ The IP ip_str as an integer.
+
+ Raises:
+ AddressValueError: if the string isn't a valid IP string.
"""
packed_ip = 0
- for oc in ip_str.split('.'):
- packed_ip = (packed_ip << 8) | int(oc)
+ octets = ip_str.split('.')
+ if len(octets) != 4:
+ raise AddressValueError(ip_str)
+ for oc in octets:
+ try:
+ packed_ip = (packed_ip << 8) | int(oc)
+ except ValueError:
+ raise AddressValueError(ip_str)
return packed_ip
def _string_from_ip_int(self, ip_int):
@@ -838,22 +997,23 @@ class IPv4(BaseIP):
ip_int >>= 8
return '.'.join(octets)
- def _is_valid_ip(self, ip_str):
+ def _is_valid_ip(self, address):
"""Validate the dotted decimal notation IP/netmask string.
Args:
- ip_str: A string, the IP address.
+ address: A string, either representing a quad-dotted ip
+ or an integer which is a valid IPv4 IP address.
Returns:
A boolean, True if the string is a valid dotted decimal IP
string.
"""
- octets = ip_str.split('.')
+ octets = address.split('.')
if len(octets) == 1:
# We have an integer rather than a dotted decimal IP.
try:
- return int(ip_str) >= 0 and int(ip_str) <= self._ALL_ONES
+ return int(address) >= 0 and int(address) <= self._ALL_ONES
except ValueError:
return False
@@ -868,305 +1028,438 @@ class IPv4(BaseIP):
return False
return True
- def _is_valid_netmask(self, netmask):
- """Verify that the netmask is valid.
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
- Args:
- netmask: A string, either a prefix or dotted decimal
- netmask.
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return struct.pack('!I', self._ip)
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
Returns:
- A boolean, True if the prefix represents a valid IPv4
- netmask.
+ A boolean, True if the address is within the
+ reserved IPv4 Network range.
+
+ """
+ return self in IPv4Network('240.0.0.0/4')
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
+
+ Returns:
+ A boolean, True if the address is reserved per RFC 1918.
"""
- if len(netmask.split('.')) == 4:
- return self._is_valid_ip(netmask)
- try:
- netmask = int(netmask)
- except ValueError:
- return False
- return 0 <= netmask <= 32
+ return (self in IPv4Network('10.0.0.0/8') or
+ self in IPv4Network('172.16.0.0/12') or
+ self in IPv4Network('192.168.0.0/16'))
- # backwards compatibility
- Subnet = subnet
- Supernet = supernet
- IsRFC1918 = lambda self: self.is_private
- IsMulticast = lambda self: self.is_multicast
- IsLoopback = lambda self: self.is_loopback
- IsLinkLocal = lambda self: self.is_link_local
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
-class IPv6(BaseIP):
+ Returns:
+ A boolean, True if the address is multicast.
+ See RFC 3171 for details.
- """This class respresents and manipulates 128-bit IPv6 addresses.
+ """
+ return self in IPv4Network('224.0.0.0/4')
- Attributes: [examples for IPv6('2001:658:22A:CAFE:200::1/64')]
- .ip: 42540616829182469433547762482097946625L
- .ip_ext: '2001:658:22a:cafe:200::1'
- .ip_ext_full: '2001:0658:022a:cafe:0200:0000:0000:0001'
- .network: 42540616829182469433403647294022090752L
- .network_ext: '2001:658:22a:cafe::'
- .hostmask: 18446744073709551615L
- .hostmask_ext: '::ffff:ffff:ffff:ffff'
- .broadcast: 42540616829182469451850391367731642367L
- .broadcast_ext: '2001:658:22a:cafe:ffff:ffff:ffff:ffff'
- .netmask: 340282366920938463444927863358058659840L
- .netmask_ext: 64
- .prefixlen: 64
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
- """
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 5735 3.
+
+ """
+ return self in IPv4Network('0.0.0.0')
+
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
+
+ Returns:
+ A boolean, True if the address is a loopback per RFC 3330.
+
+ """
+ return self in IPv4Network('127.0.0.0/8')
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
+
+ Returns:
+ A boolean, True if the address is link-local per RFC 3927.
+
+ """
+ return self in IPv4Network('169.254.0.0/16')
- _ALL_ONES = (2**128) - 1
- def __init__(self, ipaddr):
- """Instantiate a new IPv6 object.
+class IPv4Address(_BaseV4, _BaseIP):
+ """Represent and manipulate single IPv4 Addresses."""
+
+ def __init__(self, address):
+
+ """
Args:
- ipaddr: A string or integer representing the IP or the IP
- and prefix/netmask.
- '2001:4860::/128'
- '2001:4860:0000:0000:0000:0000:0000:0000/128'
- '2001:4860::'
- are all functionally the same in IPv6. That is to say,
- failing to provide a subnetmask will create an object with
- a mask of /128.
+ address: A string or integer representing the IP
+ '192.168.1.1'
Additionally, an integer can be passed, so
- IPv6('2001:4860::') ==
- IPv6(42541956101370907050197289607612071936L).
+ IPv4Address('192.168.1.1') == IPv4Address(3232235777).
or, more generally
- IPv6(IPv6('2001:4860::').ip) == IPv6('2001:4860::')
+ IPv4Address(int(IPv4Address('192.168.1.1'))) ==
+ IPv4Address('192.168.1.1')
Raises:
- IPv6IpValidationError: If ipaddr isn't a valid IPv6 address.
- IPv6NetmaskValidationError: If the netmask isn't valid for
- an IPv6 address.
+ AddressValueError: If ipaddr isn't a valid IPv4 address.
"""
- BaseIP.__init__(self)
- self._version = 6
+ _BaseIP.__init__(self, address)
+ _BaseV4.__init__(self, address)
# Efficient constructor from integer.
- if isinstance(ipaddr, long) or isinstance(ipaddr, int):
- self.ip = ipaddr
- self._prefixlen = 128
- self.netmask = self._ALL_ONES
- if ipaddr < 0 or ipaddr > self._ALL_ONES:
- raise IPv6IpValidationError(ipaddr)
+ if isinstance(address, (int, long)):
+ self._ip = address
+ if address < 0 or address > self._ALL_ONES:
+ raise AddressValueError(address)
return
# Constructing from a packed address
if _compat_has_real_bytes:
- if isinstance(ipaddr, bytes) and len(ipaddr) == 16:
- tmp = struct.unpack('!QQ', ipaddr)
- self.ip = (tmp[0] << 64) | tmp[1]
- self._prefixlen = 128
- self.netmask = self._ALL_ONES
+ if isinstance(address, bytes) and len(address) == 4:
+ self._ip = struct.unpack('!I', address)[0]
return
# Assume input argument to be string or any object representation
- # which converts into a formatted IP prefix string.
- addr_str = str(ipaddr)
- if not addr_str:
- raise IPv6IpValidationError('')
- addr = addr_str.split('/')
- if len(addr) > 1:
- if self._is_valid_netmask(addr[1]):
- self._prefixlen = int(addr[1])
- else:
- raise IPv6NetmaskValidationError(addr[1])
- else:
- self._prefixlen = 128
+ # which converts into a formatted IP string.
+ addr_str = str(address)
+ if not self._is_valid_ip(addr_str):
+ raise AddressValueError(addr_str)
- self.netmask = self._ip_int_from_prefix(self._prefixlen)
+ self._ip = self._ip_int_from_string(addr_str)
- if not self._is_valid_ip(addr[0]):
- raise IPv6IpValidationError(addr[0])
- self.ip = self._ip_int_from_string(addr[0])
+class IPv4Network(_BaseV4, _BaseNet):
- @property
- def ip_ext_full(self):
- """Returns the expanded version of the IPv6 string."""
- return self._explode_shorthand_ip_string(self.ip_ext)
+ """This class represents and manipulates 32-bit IPv4 networks.
+
+ Attributes: [examples for IPv4Network('1.2.3.4/27')]
+ ._ip: 16909060
+ .ip: IPv4Address('1.2.3.4')
+ .network: IPv4Address('1.2.3.0')
+ .hostmask: IPv4Address('0.0.0.31')
+ .broadcast: IPv4Address('1.2.3.31')
+ .netmask: IPv4Address('255.255.255.224')
+ .prefixlen: 27
+
+ """
+
+ # the valid octets for host and netmasks. only useful for IPv4.
+ _valid_mask_octets = set((255, 254, 252, 248, 240, 224, 192, 128, 0))
- def _set_prefix(self, prefixlen):
- """Change the prefix length.
+ def __init__(self, address, strict=False):
+ """Instantiate a new IPv4 network object.
Args:
- prefixlen: An integer, the new prefix length.
+ address: A string or integer representing the IP [& network].
+ '192.168.1.1/24'
+ '192.168.1.1/255.255.255.0'
+ '192.168.1.1/0.0.0.255'
+ are all functionally the same in IPv4. Similarly,
+ '192.168.1.1'
+ '192.168.1.1/255.255.255.255'
+ '192.168.1.1/32'
+ are also functionaly equivalent. That is to say, failing to
+ provide a subnetmask will create an object with a mask of /32.
+
+ If the mask (portion after the / in the argument) is given in
+ dotted quad form, it is treated as a netmask if it starts with a
+ non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
+ starts with a zero field (e.g. 0.255.255.255 == /8), with the
+ single exception of an all-zero mask which is treated as a
+ netmask == /0. If no mask is given, a default of /32 is used.
+
+ Additionally, an integer can be passed, so
+ IPv4Network('192.168.1.1') == IPv4Network(3232235777).
+ or, more generally
+ IPv4Network(int(IPv4Network('192.168.1.1'))) ==
+ IPv4Network('192.168.1.1')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 192.168.1.0/24 and not an
+ IP address on a network, eg, 192.168.1.1/24.
Raises:
- IPv6NetmaskValidationError: If prefixlen is out of bounds.
+ AddressValueError: If ipaddr isn't a valid IPv4 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv4 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
"""
- if not 0 <= prefixlen <= 128:
- raise IPv6NetmaskValidationError(prefixlen)
- self._prefixlen = prefixlen
- self.netmask = self._ip_int_from_prefix(self.prefixlen)
+ _BaseNet.__init__(self, address)
+ _BaseV4.__init__(self, address)
- def subnet(self, prefixlen_diff=1):
- """The subnets which join to make the current subnet.
+ # Efficient constructor from integer.
+ if isinstance(address, (int, long)):
+ self._ip = address
+ self.ip = IPv4Address(self._ip)
+ self._prefixlen = 32
+ self.netmask = IPv4Address(self._ALL_ONES)
+ if address < 0 or address > self._ALL_ONES:
+ raise AddressValueError(address)
+ return
- In the case that self contains only one IP
- (self._prefixlen == 128), return a list with just ourself.
+ # Constructing from a packed address
+ if _compat_has_real_bytes:
+ if isinstance(address, bytes) and len(address) == 4:
+ self._ip = struct.unpack('!I', address)[0]
+ self.ip = IPv4Address(self._ip)
+ self._prefixlen = 32
+ self.netmask = IPv4Address(self._ALL_ONES)
+ return
- Args:
- prefixlen_diff: An integer, the amount the prefix length
- should be increased by.
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = str(address).split('/')
- Returns:
- A list of IPv6 objects.
+ if len(addr) > 2:
+ raise AddressValueError(address)
- Raises:
- PrefixlenDiffInvalidError: The prefixlen_diff is too small
- or too large.
+ if not self._is_valid_ip(addr[0]):
+ raise AddressValueError(addr[0])
- """
- # Preserve original functionality (return [self] if
- # self.prefixlen == 128).
- if self.prefixlen == 128:
- return [self]
+ self._ip = self._ip_int_from_string(addr[0])
+ self.ip = IPv4Address(self._ip)
- if prefixlen_diff < 0:
- raise PrefixlenDiffInvalidError('Prefix length diff must be > 0')
- new_prefixlen = self.prefixlen + prefixlen_diff
- if not self._is_valid_netmask(str(new_prefixlen)):
- raise PrefixlenDiffInvalidError(
- 'Prefix length diff %d is invalid for netblock %s' % (
- new_prefixlen, str(self)))
- first = IPv6(
- self._string_from_ip_int(self.network) + '/' +
- str(self._prefixlen + prefixlen_diff))
- subnets = [first]
- current = first
- while True:
- broadcast = current.broadcast
- if current.broadcast == self.broadcast:
- break
- current = IPv6(self._string_from_ip_int(broadcast + 1) + '/' +
- str(new_prefixlen))
- subnets.append(current)
+ if len(addr) == 2:
+ mask = addr[1].split('.')
+ if len(mask) == 4:
+ # We have dotted decimal netmask.
+ if self._is_valid_netmask(addr[1]):
+ self.netmask = IPv4Address(self._ip_int_from_string(
+ addr[1]))
+ elif self._is_hostmask(addr[1]):
+ self.netmask = IPv4Address(
+ self._ip_int_from_string(addr[1]) ^ self._ALL_ONES)
+ else:
+ raise NetmaskValueError('%s is not a valid netmask'
+ % addr[1])
- return subnets
+ self._prefixlen = self._prefix_from_ip_int(int(self.netmask))
+ else:
+ # We have a netmask in prefix length form.
+ if not self._is_valid_netmask(addr[1]):
+ raise NetmaskValueError(addr[1])
+ self._prefixlen = int(addr[1])
+ self.netmask = IPv4Address(self._ip_int_from_prefix(
+ self._prefixlen))
+ else:
+ self._prefixlen = 32
+ self.netmask = IPv4Address(self._ip_int_from_prefix(
+ self._prefixlen))
+ if strict:
+ if self.ip != self.network:
+ raise ValueError('%s has host bits set' %
+ self.ip)
- def supernet(self, prefixlen_diff=1):
- """The supernet containing the current network.
+ def _is_hostmask(self, ip_str):
+ """Test if the IP string is a hostmask (rather than a netmask).
Args:
- prefixlen_diff: An integer, the amount the prefix length of the
- network should be decreased by. For example, given a /96
- network and a prefixlen_diff of 3, a supernet with a /93
- netmask is returned.
+ ip_str: A string, the potential hostmask.
Returns:
- An IPv6 object.
-
- Raises:
- PrefixlenDiffInvalidError: If
- self._prefixlen - prefixlen_diff < 0. I.e., you have a
- negative prefix length.
+ A boolean, True if the IP string is a hostmask.
"""
- if self.prefixlen == 0:
- return self
- if self.prefixlen - prefixlen_diff < 0:
- raise PrefixlenDiffInvalidError(
- 'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
- (self.prefixlen, prefixlen_diff))
- return IPv6(self.ip_ext + '/' + str(self.prefixlen - prefixlen_diff))
+ bits = ip_str.split('.')
+ try:
+ parts = [int(x) for x in bits if int(x) in self._valid_mask_octets]
+ except ValueError:
+ return False
+ if len(parts) != len(bits):
+ return False
+ if parts[0] < parts[-1]:
+ return True
+ return False
+
+ def _is_valid_netmask(self, netmask):
+ """Verify that the netmask is valid.
- @property
- def is_multicast(self):
- """Test if the address is reserved for multicast use.
+ Args:
+ netmask: A string, either a prefix or dotted decimal
+ netmask.
Returns:
- A boolean, True if the address is a multicast address.
- See RFC 2373 2.7 for details.
+ A boolean, True if the prefix represents a valid IPv4
+ netmask.
"""
- return self in IPv6('ff00::/8')
+ mask = netmask.split('.')
+ if len(mask) == 4:
+ if [x for x in mask if int(x) not in self._valid_mask_octets]:
+ return False
+ if [y for idx, y in enumerate(mask) if idx > 0 and
+ y > mask[idx - 1]]:
+ return False
+ return True
+ try:
+ netmask = int(netmask)
+ except ValueError:
+ return False
+ return 0 <= netmask <= 32
- @property
- def is_unspecified(self):
- """Test if the address is unspecified.
+ # backwards compatibility
+ IsRFC1918 = lambda self: self.is_private
+ IsMulticast = lambda self: self.is_multicast
+ IsLoopback = lambda self: self.is_loopback
+ IsLinkLocal = lambda self: self.is_link_local
- Returns:
- A boolean, True if this is the unspecified address as defined in
- RFC 2373 2.5.2.
- """
- return self == IPv6('::')
+class _BaseV6(object):
- @property
- def is_loopback(self):
- """Test if the address is a loopback adddress.
+ """Base IPv6 object.
- Returns:
- A boolean, True if the address is a loopback address as defined in
- RFC 2373 2.5.3.
+ The following methods are used by IPv6 objects in both single IP
+ addresses and networks.
- """
- return self == IPv6('::1')
+ """
- @property
- def is_link_local(self):
- """Test if the address is reserved for link-local.
+ _ALL_ONES = (2**128) - 1
+
+ def __init__(self, address):
+ self._version = 6
+ self._max_prefixlen = 128
+
+ def _ip_int_from_string(self, ip_str=None):
+ """Turn an IPv6 ip_str into an integer.
+
+ Args:
+ ip_str: A string, the IPv6 ip_str.
Returns:
- A boolean, True if the address is reserved per RFC 4291.
+ A long, the IPv6 ip_str.
+
+ Raises:
+ AddressValueError: if ip_str isn't a valid IP Address.
"""
- return self in IPv6('fe80::/10')
+ if not ip_str:
+ ip_str = str(self.ip)
- @property
- def is_site_local(self):
- """Test if the address is reserved for site-local.
+ ip_int = 0
- Note that the site-local address space has been deprecated by RFC 3879.
- Use is_private to test if this address is in the space of unique local
- addresses as defined by RFC 4193.
+ fields = self._explode_shorthand_ip_string(ip_str).split(':')
- Returns:
- A boolean, True if the address is reserved per RFC 3513 2.5.6.
+ # Do we have an IPv4 mapped (::ffff:a.b.c.d) or compact (::a.b.c.d)
+ # ip_str?
+ if fields[-1].count('.') == 3:
+ ipv4_string = fields.pop()
+ ipv4_int = IPv4Network(ipv4_string)._ip
+ octets = []
+ for _ in xrange(2):
+ octets.append(hex(ipv4_int & 0xFFFF).lstrip('0x').rstrip('L'))
+ ipv4_int >>= 16
+ fields.extend(reversed(octets))
- """
- return self in IPv6('fec0::/10')
+ for field in fields:
+ try:
+ ip_int = (ip_int << 16) + int(field or '0', 16)
+ except ValueError:
+ raise AddressValueError(ip_str)
- @property
- def is_private(self):
- """Test if this address is allocated for private networks.
+ return ip_int
+
+ def _compress_hextets(self, hextets):
+ """Compresses a list of hextets.
+
+ Compresses a list of strings, replacing the longest continuous
+ sequence of "0" in the list with "" and adding empty strings at
+ the beginning or at the end of the string such that subsequently
+ calling ":".join(hextets) will produce the compressed version of
+ the IPv6 address.
+
+ Args:
+ hextets: A list of strings, the hextets to compress.
Returns:
- A boolean, True if the address is reserved per RFC 4193.
+ A list of strings.
"""
- return self in IPv6('fc00::/7')
+ best_doublecolon_start = -1
+ best_doublecolon_len = 0
+ doublecolon_start = -1
+ doublecolon_len = 0
+ for index in range(len(hextets)):
+ if hextets[index] == '0':
+ doublecolon_len += 1
+ if doublecolon_start == -1:
+ # Start of a sequence of zeros.
+ doublecolon_start = index
+ if doublecolon_len > best_doublecolon_len:
+ # This is the longest sequence of zeros so far.
+ best_doublecolon_len = doublecolon_len
+ best_doublecolon_start = doublecolon_start
+ else:
+ doublecolon_len = 0
+ doublecolon_start = -1
- @property
- def version(self):
- return self._version
+ if best_doublecolon_len > 1:
+ best_doublecolon_end = (best_doublecolon_start +
+ best_doublecolon_len)
+ # For zeros at the end of the address.
+ if best_doublecolon_end == len(hextets):
+ hextets += ['']
+ hextets[best_doublecolon_start:best_doublecolon_end] = ['']
+ # For zeros at the beginning of the address.
+ if best_doublecolon_start == 0:
+ hextets = [''] + hextets
- @property
- def packed(self):
- """The binary representation of this address."""
- return struct.pack('!QQ', self.ip >> 64, self.ip & (2**64 - 1))
+ return hextets
- def _is_shorthand_ip(self, ip_str=None):
- """Determine if the address is shortened.
+ def _string_from_ip_int(self, ip_int=None):
+ """Turns a 128-bit integer into hexadecimal notation.
Args:
- ip_str: A string, the IPv6 address.
+ ip_int: An integer, the IP address.
Returns:
- A boolean, True if the address is shortened.
+ A string, the hexadecimal representation of the address.
+
+ Raises:
+ ValueError: The address is bigger than 128 bits of all ones.
"""
- if ip_str.count('::') == 1:
- return True
- return False
+ if not ip_int and ip_int != 0:
+ ip_int = int(self._ip)
+
+ if ip_int > self._ALL_ONES:
+ raise ValueError('IPv6 address is too large')
- def _explode_shorthand_ip_string(self, ip_str):
+ hex_str = '%032x' % ip_int
+ hextets = []
+ for x in range(0, 32, 4):
+ hextets.append('%x' % int(hex_str[x:x+4], 16))
+
+ hextets = self._compress_hextets(hextets)
+ return ':'.join(hextets)
+
+ def _explode_shorthand_ip_string(self, ip_str=None):
"""Expand a shortened IPv6 address.
Args:
@@ -1176,6 +1469,11 @@ class IPv6(BaseIP):
A string, the expanded IPv6 address.
"""
+ if not ip_str:
+ ip_str = str(self)
+ if isinstance(self, _BaseNet):
+ ip_str = str(self.ip)
+
if self._is_shorthand_ip(ip_str):
new_ip = []
hextet = ip_str.split('::')
@@ -1195,7 +1493,7 @@ class IPv6(BaseIP):
# We've already got a longhand ip_str.
return ip_str
- def _is_valid_ip(self, ip_str=None):
+ def _is_valid_ip(self, ip_str):
"""Ensure we have a valid IPv6 address.
Probably not as exhaustive as it should be.
@@ -1207,9 +1505,6 @@ class IPv6(BaseIP):
A boolean, True if this is a valid IPv6 address.
"""
- if not ip_str:
- ip_str = self.ip_ext
-
# We need to have at least one ':'.
if ':' not in ip_str:
return False
@@ -1239,156 +1534,321 @@ class IPv6(BaseIP):
# hextets are between 0x0 and 0xFFFF.
for hextet in ip_str.split(':'):
if hextet.count('.') == 3:
- # If we have an IPv4 mapped address, the IPv4 portion has to be
- # at the end of the IPv6 portion.
+ # If we have an IPv4 mapped address, the IPv4 portion has to
+ # be at the end of the IPv6 portion.
if not ip_str.split(':')[-1] == hextet:
return False
try:
- IPv4(hextet)
- except IPv4IpValidationError:
+ IPv4Network(hextet)
+ except AddressValueError:
+ return False
+ else:
+ try:
+ # a value error here means that we got a bad hextet,
+ # something like 0xzzzz
+ if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF:
+ return False
+ except ValueError:
return False
- elif int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF:
- return False
return True
- def _is_valid_netmask(self, prefixlen):
- """Verify that the netmask/prefixlen is valid.
+ def _is_shorthand_ip(self, ip_str=None):
+ """Determine if the address is shortened.
Args:
- prefixlen: A string, the netmask in prefix length format.
+ ip_str: A string, the IPv6 address.
Returns:
- A boolean, True if the prefix represents a valid IPv6
- netmask.
+ A boolean, True if the address is shortened.
"""
- try:
- prefixlen = int(prefixlen)
- except ValueError:
- return False
- return 0 <= prefixlen <= 128
+ if ip_str.count('::') == 1:
+ return True
+ return False
- def _ip_int_from_string(self, ip_str=None):
- """Turn an IPv6 address into an integer.
+ @property
+ def max_prefixlen(self):
+ return self._max_prefixlen
- Args:
- ip_str: A string, the IPv6 address.
+ @property
+ def packed(self):
+ """The binary representation of this address."""
+ return struct.pack('!QQ', self._ip >> 64, self._ip & (2**64 - 1))
+
+ @property
+ def version(self):
+ return self._version
+
+ @property
+ def is_multicast(self):
+ """Test if the address is reserved for multicast use.
Returns:
- A long, the IPv6 address.
+ A boolean, True if the address is a multicast address.
+ See RFC 2373 2.7 for details.
"""
- if not ip_str:
- ip_str = self.ip_ext
+ return self in IPv6Network('ff00::/8')
- ip_int = 0
+ @property
+ def is_reserved(self):
+ """Test if the address is otherwise IETF reserved.
- fields = self._explode_shorthand_ip_string(ip_str).split(':')
+ Returns:
+ A boolean, True if the address is within one of the
+ reserved IPv6 Network ranges.
- # Do we have an IPv4 mapped (::ffff:a.b.c.d) or compact (::a.b.c.d)
- # address?
- if fields[-1].count('.') == 3:
- ipv4_string = fields.pop()
- ipv4_int = IPv4(ipv4_string).ip
- octets = []
- for _ in xrange(2):
- octets.append(hex(ipv4_int & 0xFFFF).lstrip('0x').rstrip('L'))
- ipv4_int >>= 16
- fields.extend(reversed(octets))
+ """
+ return (self in IPv6Network('::/8') or
+ self in IPv6Network('100::/8') or
+ self in IPv6Network('200::/7') or
+ self in IPv6Network('400::/6') or
+ self in IPv6Network('800::/5') or
+ self in IPv6Network('1000::/4') or
+ self in IPv6Network('4000::/3') or
+ self in IPv6Network('6000::/3') or
+ self in IPv6Network('8000::/3') or
+ self in IPv6Network('A000::/3') or
+ self in IPv6Network('C000::/3') or
+ self in IPv6Network('E000::/4') or
+ self in IPv6Network('F000::/5') or
+ self in IPv6Network('F800::/6') or
+ self in IPv6Network('FE00::/9'))
- for field in fields:
- ip_int = (ip_int << 16) + int(field, 16)
+ @property
+ def is_unspecified(self):
+ """Test if the address is unspecified.
- return ip_int
+ Returns:
+ A boolean, True if this is the unspecified address as defined in
+ RFC 2373 2.5.2.
- def _compress_hextets(self, hextets):
- """Compresses a list of hextets.
+ """
+ return self == IPv6Network('::')
- Compresses a list of strings, replacing the longest continuous
- sequence of "0" in the list with "" and adding empty strings at
- the beginning or at the end of the string such that subsequently
- calling ":".join(hextets) will produce the compressed version of
- the IPv6 address.
+ @property
+ def is_loopback(self):
+ """Test if the address is a loopback address.
- Args:
- hextets: A list of strings, the hextets to compress.
+ Returns:
+ A boolean, True if the address is a loopback address as defined in
+ RFC 2373 2.5.3.
+
+ """
+ return self == IPv6Network('::1')
+
+ @property
+ def is_link_local(self):
+ """Test if the address is reserved for link-local.
Returns:
- A list of strings.
+ A boolean, True if the address is reserved per RFC 4291.
"""
- best_doublecolon_start = -1
- best_doublecolon_len = 0
- doublecolon_start = -1
- doublecolon_len = 0
- for index in range(len(hextets)):
- if hextets[index] == '0':
- doublecolon_len += 1
- if doublecolon_start == -1:
- # Start of a sequence of zeros.
- doublecolon_start = index
- if doublecolon_len > best_doublecolon_len:
- # This is the longest sequence of zeros so far.
- best_doublecolon_len = doublecolon_len
- best_doublecolon_start = doublecolon_start
- else:
- doublecolon_len = 0
- doublecolon_start = -1
+ return self in IPv6Network('fe80::/10')
- if best_doublecolon_len > 1:
- best_doublecolon_end = (best_doublecolon_start +
- best_doublecolon_len)
- # For zeros at the end of the address.
- if best_doublecolon_end == len(hextets):
- hextets += ['']
- hextets[best_doublecolon_start:best_doublecolon_end] = ['']
- # For zeros at the beginning of the address.
- if best_doublecolon_start == 0:
- hextets = [''] + hextets
+ @property
+ def is_site_local(self):
+ """Test if the address is reserved for site-local.
- return hextets
+ Note that the site-local address space has been deprecated by RFC 3879.
+ Use is_private to test if this address is in the space of unique local
+ addresses as defined by RFC 4193.
- def _string_from_ip_int(self, ip_int=None):
- """Turns a 128-bit integer into hexadecimal notation.
+ Returns:
+ A boolean, True if the address is reserved per RFC 3513 2.5.6.
- Args:
- ip_int: An integer, the IP address.
+ """
+ return self in IPv6Network('fec0::/10')
+
+ @property
+ def is_private(self):
+ """Test if this address is allocated for private networks.
Returns:
- A string, the hexadecimal representation of the address.
+ A boolean, True if the address is reserved per RFC 4193.
+
+ """
+ return self in IPv6Network('fc00::/7')
+
+ @property
+ def ipv4_mapped(self):
+ """Return the IPv4 mapped address.
+
+ Returns:
+ If the IPv6 address is a v4 mapped address, return the
+ IPv4 mapped address. Return None otherwise.
+
+ """
+ hextets = self._explode_shorthand_ip_string().split(':')
+ if hextets[-3] != 'ffff':
+ return None
+ try:
+ return IPv4Address(int('%s%s' % (hextets[-2], hextets[-1]), 16))
+ except AddressValueError:
+ return None
+
+
+class IPv6Address(_BaseV6, _BaseIP):
+
+ """Represent and manipulate single IPv6 Addresses.
+ """
+
+ def __init__(self, address):
+ """Instantiate a new IPv6 address object.
+
+ Args:
+ address: A string or integer representing the IP
+
+ Additionally, an integer can be passed, so
+ IPv6Address('2001:4860::') ==
+ IPv6Address(42541956101370907050197289607612071936L).
+ or, more generally
+ IPv6Address(IPv6Address('2001:4860::')._ip) ==
+ IPv6Address('2001:4860::')
Raises:
- ValueError: The address is bigger than 128 bits of all ones.
+ AddressValueError: If address isn't a valid IPv6 address.
"""
- if not ip_int and ip_int != 0:
- ip_int = self.ip
+ _BaseIP.__init__(self, address)
+ _BaseV6.__init__(self, address)
- if ip_int > self._ALL_ONES:
- raise ValueError('IPv6 address is too large')
+ # Efficient constructor from integer.
+ if isinstance(address, (int, long)):
+ self._ip = address
+ if address < 0 or address > self._ALL_ONES:
+ raise AddressValueError(address)
+ return
- hex_str = '%032x' % ip_int
- hextets = []
- for x in range(0, 32, 4):
- hextets.append('%x' % int(hex_str[x:x+4], 16))
+ # Constructing from a packed address
+ if _compat_has_real_bytes:
+ if isinstance(address, bytes) and len(address) == 16:
+ tmp = struct.unpack('!QQ', address)
+ self._ip = (tmp[0] << 64) | tmp[1]
+ return
- hextets = self._compress_hextets(hextets)
- return ':'.join(hextets)
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP string.
+ addr_str = str(address)
+ if not addr_str:
+ raise AddressValueError('')
- @property
- def netmask_ext(self):
- """IPv6 extended netmask.
+ self._ip = self._ip_int_from_string(addr_str)
- We don't deal with netmasks in IPv6 like we do in IPv4. This is
- here strictly for IPv4 compatibility. We simply return the
- prefix length.
- Returns:
- An integer.
+class IPv6Network(_BaseV6, _BaseNet):
+
+ """This class represents and manipulates 128-bit IPv6 networks.
+
+ Attributes: [examples for IPv6('2001:658:22A:CAFE:200::1/64')]
+ .ip: IPv6Address('2001:658:22a:cafe:200::1')
+ .network: IPv6Address('2001:658:22a:cafe::')
+ .hostmask: IPv6Address('::ffff:ffff:ffff:ffff')
+ .broadcast: IPv6Address('2001:658:22a:cafe:ffff:ffff:ffff:ffff')
+ .netmask: IPv6Address('ffff:ffff:ffff:ffff::')
+ .prefixlen: 64
+
+ """
+
+
+ def __init__(self, address, strict=False):
+ """Instantiate a new IPv6 Network object.
+
+ Args:
+ address: A string or integer representing the IPv6 network or the IP
+ and prefix/netmask.
+ '2001:4860::/128'
+ '2001:4860:0000:0000:0000:0000:0000:0000/128'
+ '2001:4860::'
+ are all functionally the same in IPv6. That is to say,
+ failing to provide a subnetmask will create an object with
+ a mask of /128.
+
+ Additionally, an integer can be passed, so
+ IPv6Network('2001:4860::') ==
+ IPv6Network(42541956101370907050197289607612071936L).
+ or, more generally
+ IPv6Network(IPv6Network('2001:4860::')._ip) ==
+ IPv6Network('2001:4860::')
+
+ strict: A boolean. If true, ensure that we have been passed
+ A true network address, eg, 192.168.1.0/24 and not an
+ IP address on a network, eg, 192.168.1.1/24.
+
+ Raises:
+ AddressValueError: If address isn't a valid IPv6 address.
+ NetmaskValueError: If the netmask isn't valid for
+ an IPv6 address.
+ ValueError: If strict was True and a network address was not
+ supplied.
"""
- return self.prefixlen
+ _BaseNet.__init__(self, address)
+ _BaseV6.__init__(self, address)
- # backwards compatibility
- Subnet = subnet
- Supernet = supernet
+ # Efficient constructor from integer.
+ if isinstance(address, (int, long)):
+ self._ip = address
+ self.ip = IPv6Address(self._ip)
+ self._prefixlen = 128
+ self.netmask = IPv6Address(self._ALL_ONES)
+ if address < 0 or address > self._ALL_ONES:
+ raise AddressValueError(address)
+ return
+
+ # Constructing from a packed address
+ if _compat_has_real_bytes:
+ if isinstance(address, bytes) and len(address) == 16:
+ tmp = struct.unpack('!QQ', address)
+ self._ip = (tmp[0] << 64) | tmp[1]
+ self.ip = IPv6Address(self._ip)
+ self._prefixlen = 128
+ self.netmask = IPv6Address(self._ALL_ONES)
+ return
+
+ # Assume input argument to be string or any object representation
+ # which converts into a formatted IP prefix string.
+ addr = str(address).split('/')
+
+ if len(addr) > 2:
+ raise AddressValueError(address)
+
+ if not self._is_valid_ip(addr[0]):
+ raise AddressValueError(addr[0])
+
+ if len(addr) == 2:
+ if self._is_valid_netmask(addr[1]):
+ self._prefixlen = int(addr[1])
+ else:
+ raise NetmaskValueError(addr[1])
+ else:
+ self._prefixlen = 128
+
+ self.netmask = IPv6Address(self._ip_int_from_prefix(self._prefixlen))
+
+ self._ip = self._ip_int_from_string(addr[0])
+ self.ip = IPv6Address(self._ip)
+
+ if strict:
+ if self.ip != self.network:
+ raise ValueError('%s has host bits set' %
+ self.ip)
+
+
+ def _is_valid_netmask(self, prefixlen):
+ """Verify that the netmask/prefixlen is valid.
+
+ Args:
+ prefixlen: A string, the netmask in prefix length format.
+
+ Returns:
+ A boolean, True if the prefix represents a valid IPv6
+ netmask.
+
+ """
+ try:
+ prefixlen = int(prefixlen)
+ except ValueError:
+ return False
+ return 0 <= prefixlen <= 128
diff --git a/google-appengine/lib/ipaddr/ipaddr/ipaddr_test.py b/google-appengine/lib/ipaddr/ipaddr/ipaddr_test.py
index 3a85705..f2d98f1 100755
--- a/google-appengine/lib/ipaddr/ipaddr/ipaddr_test.py
+++ b/google-appengine/lib/ipaddr/ipaddr/ipaddr_test.py
@@ -19,6 +19,7 @@
import unittest
+import time
import google3
import ipaddr
@@ -32,73 +33,135 @@ else:
class IpaddrUnitTest(unittest.TestCase):
def setUp(self):
- self.ipv4 = ipaddr.IPv4('1.2.3.4/24')
- self.ipv4_hostmask = ipaddr.IPv4('10.0.0.1/0.255.255.255')
- self.ipv6 = ipaddr.IPv6('2001:658:22a:cafe:200:0:0:1/64')
+ self.ipv4 = ipaddr.IPv4Network('1.2.3.4/24')
+ self.ipv4_hostmask = ipaddr.IPv4Network('10.0.0.1/0.255.255.255')
+ self.ipv6 = ipaddr.IPv6Network('2001:658:22a:cafe:200:0:0:1/64')
+
+ def tearDown(self):
+ del(self.ipv4)
+ del(self.ipv4_hostmask)
+ del(self.ipv6)
+ del(self)
def testRepr(self):
- self.assertEqual("IPv4('1.2.3.4/32')", repr(ipaddr.IPv4('1.2.3.4')))
- self.assertEqual("IPv6('::1/128')", repr(ipaddr.IPv6('::1')))
+ self.assertEqual("IPv4Network('1.2.3.4/32')",
+ repr(ipaddr.IPv4Network('1.2.3.4')))
+ self.assertEqual("IPv6Network('::1/128')",
+ repr(ipaddr.IPv6Network('::1')))
def testInvalidStrings(self):
- self.assertRaises(ValueError, ipaddr.IP, '')
- self.assertRaises(ValueError, ipaddr.IP, 'www.google.com')
- self.assertRaises(ValueError, ipaddr.IP, '1.2.3')
- self.assertRaises(ValueError, ipaddr.IP, '1.2.3.4.5')
- self.assertRaises(ValueError, ipaddr.IP, '301.2.2.2')
- self.assertRaises(ValueError, ipaddr.IP, '1:2:3:4:5:6:7')
- self.assertRaises(ValueError, ipaddr.IP, '1:2:3:4:5:6:7:')
- self.assertRaises(ValueError, ipaddr.IP, ':2:3:4:5:6:7:8')
- self.assertRaises(ValueError, ipaddr.IP, '1:2:3:4:5:6:7:8:9')
- self.assertRaises(ValueError, ipaddr.IP, '1:2:3:4:5:6:7:8:')
- self.assertRaises(ValueError, ipaddr.IP, '1::3:4:5:6::8')
- self.assertRaises(ValueError, ipaddr.IP, 'a:')
- self.assertRaises(ValueError, ipaddr.IP, ':')
- self.assertRaises(ValueError, ipaddr.IP, ':::')
- self.assertRaises(ValueError, ipaddr.IP, '::a:')
- self.assertRaises(ValueError, ipaddr.IP, '1ffff::')
- self.assertRaises(ValueError, ipaddr.IP, '0xa::')
- self.assertRaises(ValueError, ipaddr.IP, '1:2:3:4:5:6:1a.2.3.4')
- self.assertRaises(ValueError, ipaddr.IP, '1:2:3:4:5:1.2.3.4:8')
- self.assertRaises(ipaddr.IPv4IpValidationError, ipaddr.IPv4, '')
- self.assertRaises(ipaddr.IPv4IpValidationError, ipaddr.IPv4,
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, 'www.google.com')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1.2.3')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1.2.3.4.5')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '301.2.2.2')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1:2:3:4:5:6:7')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1:2:3:4:5:6:7:')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, ':2:3:4:5:6:7:8')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1:2:3:4:5:6:7:8:9')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1:2:3:4:5:6:7:8:')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1::3:4:5:6::8')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, 'a:')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, ':')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, ':::')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '::a:')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1ffff::')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '0xa::')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1:2:3:4:5:6:1a.2.3.4')
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '1:2:3:4:5:1.2.3.4:8')
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv4Network, '')
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv4Network,
'google.com')
- self.assertRaises(ipaddr.IPv4IpValidationError, ipaddr.IPv4,
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv4Network,
'::1.2.3.4')
- self.assertRaises(ipaddr.IPv6IpValidationError, ipaddr.IPv6, '')
- self.assertRaises(ipaddr.IPv6IpValidationError, ipaddr.IPv6,
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Network, '')
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Network,
'google.com')
- self.assertRaises(ipaddr.IPv6IpValidationError, ipaddr.IPv6,
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Network,
'1.2.3.4')
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Network,
+ 'cafe:cafe::/128/190')
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Network,
+ '1234:axy::b')
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Address,
+ '1234:axy::b')
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv4Address(1)._ip_int_from_string,
+ '1.a.2.3')
+ self.assertEqual(False, ipaddr.IPv4Network(1)._is_hostmask('1.a.2.3'))
def testGetNetwork(self):
- self.assertEqual(self.ipv4.network, 16909056)
- self.assertEqual(self.ipv4.network_ext, '1.2.3.0')
- self.assertEqual(self.ipv4_hostmask.network_ext, '10.0.0.0')
+ self.assertEqual(int(self.ipv4.network), 16909056)
+ self.assertEqual(str(self.ipv4.network), '1.2.3.0')
+ self.assertEqual(str(self.ipv4_hostmask.network), '10.0.0.0')
- self.assertEqual(self.ipv6.network,
+ self.assertEqual(int(self.ipv6.network),
42540616829182469433403647294022090752)
- self.assertEqual(self.ipv6.network_ext,
+ self.assertEqual(str(self.ipv6.network),
'2001:658:22a:cafe::')
- self.assertEqual(self.ipv6.hostmask_ext,
+ self.assertEqual(str(self.ipv6.hostmask),
'::ffff:ffff:ffff:ffff')
+ def testBadVersionComparison(self):
+ # These should always raise TypeError
+ v4addr = ipaddr.IPAddress('1.1.1.1')
+ v4net = ipaddr.IPNetwork('1.1.1.1')
+ v6addr = ipaddr.IPAddress('::1')
+ v6net = ipaddr.IPAddress('::1')
+
+ self.assertRaises(TypeError, v4addr.__lt__, v6addr)
+ self.assertRaises(TypeError, v4addr.__gt__, v6addr)
+ self.assertRaises(TypeError, v4net.__lt__, v6net)
+ self.assertRaises(TypeError, v4net.__gt__, v6net)
+
+ self.assertRaises(TypeError, v6addr.__lt__, v4addr)
+ self.assertRaises(TypeError, v6addr.__gt__, v4addr)
+ self.assertRaises(TypeError, v6net.__lt__, v4net)
+ self.assertRaises(TypeError, v6net.__gt__, v4net)
+
+ def testMixedTypeComparison(self):
+ v4addr = ipaddr.IPAddress('1.1.1.1')
+ v4net = ipaddr.IPNetwork('1.1.1.1/32')
+ v6addr = ipaddr.IPAddress('::1')
+ v6net = ipaddr.IPNetwork('::1/128')
+
+ self.assertRaises(TypeError, lambda: v4addr < v4net)
+ self.assertRaises(TypeError, lambda: v4addr > v4net)
+ self.assertRaises(TypeError, lambda: v4net < v4addr)
+ self.assertRaises(TypeError, lambda: v4net > v4addr)
+
+ self.assertRaises(TypeError, lambda: v6addr < v6net)
+ self.assertRaises(TypeError, lambda: v6addr > v6net)
+ self.assertRaises(TypeError, lambda: v6net < v6addr)
+ self.assertRaises(TypeError, lambda: v6net > v6addr)
+
+ # with get_mixed_type_key, you can sort addresses and network.
+ self.assertEqual([v4addr, v4net], sorted([v4net, v4addr],
+ key=ipaddr.get_mixed_type_key))
+ self.assertEqual([v6addr, v6net], sorted([v6net, v6addr],
+ key=ipaddr.get_mixed_type_key))
+
def testIpFromInt(self):
- self.assertEqual(self.ipv4.ip, ipaddr.IPv4(16909060).ip)
- self.assertRaises(ipaddr.IPv4IpValidationError,
- ipaddr.IPv4, 2**32)
- self.assertRaises(ipaddr.IPv4IpValidationError,
- ipaddr.IPv4, -1)
-
- self.assertEqual(self.ipv6.ip,
- ipaddr.IPv6(42540616829182469433547762482097946625).ip)
- self.assertRaises(ipaddr.IPv6IpValidationError,
- ipaddr.IPv6, 2**128)
- self.assertRaises(ipaddr.IPv6IpValidationError,
- ipaddr.IPv6, -1)
-
- self.assertEqual(ipaddr.IP(self.ipv4.ip).version, 4)
- self.assertEqual(ipaddr.IP(self.ipv6.ip).version, 6)
+ self.assertEqual(self.ipv4.ip, ipaddr.IPv4Network(16909060).ip)
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv4Network, 2**32)
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv4Network, -1)
+
+ ipv4 = ipaddr.IPNetwork('1.2.3.4')
+ ipv6 = ipaddr.IPNetwork('2001:658:22a:cafe:200:0:0:1')
+ self.assertEqual(ipv4, ipaddr.IPNetwork(int(ipv4)))
+ self.assertEqual(ipv6, ipaddr.IPNetwork(int(ipv6)))
+
+ v6_int = 42540616829182469433547762482097946625
+ self.assertEqual(self.ipv6.ip, ipaddr.IPv6Network(v6_int).ip)
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv6Network, 2**128)
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv6Network, -1)
+
+ self.assertEqual(ipaddr.IPNetwork(self.ipv4.ip).version, 4)
+ self.assertEqual(ipaddr.IPNetwork(self.ipv6.ip).version, 6)
if ipaddr._compat_has_real_bytes: # on python3+
def testIpFromPacked(self):
@@ -122,41 +185,39 @@ class IpaddrUnitTest(unittest.TestCase):
self.assertRaises(ValueError, ip, _cb('\x00' * 17))
def testGetIp(self):
- self.assertEqual(self.ipv4.ip, 16909060)
- self.assertEqual(self.ipv4.ip_ext, '1.2.3.4')
- self.assertEqual(self.ipv4.ip_ext_full, '1.2.3.4')
- self.assertEqual(self.ipv4_hostmask.ip_ext, '10.0.0.1')
+ self.assertEqual(int(self.ipv4.ip), 16909060)
+ self.assertEqual(str(self.ipv4.ip), '1.2.3.4')
+ self.assertEqual(str(self.ipv4_hostmask.ip), '10.0.0.1')
- self.assertEqual(self.ipv6.ip, 42540616829182469433547762482097946625)
- self.assertEqual(self.ipv6.ip_ext,
+ self.assertEqual(int(self.ipv6.ip),
+ 42540616829182469433547762482097946625)
+ self.assertEqual(str(self.ipv6.ip),
'2001:658:22a:cafe:200::1')
- self.assertEqual(self.ipv6.ip_ext_full,
- '2001:0658:022a:cafe:0200:0000:0000:0001')
def testGetNetmask(self):
- self.assertEqual(self.ipv4.netmask, 4294967040L)
- self.assertEqual(self.ipv4.netmask_ext, '255.255.255.0')
- self.assertEqual(self.ipv4_hostmask.netmask_ext, '255.0.0.0')
- self.assertEqual(self.ipv6.netmask,
+ self.assertEqual(int(self.ipv4.netmask), 4294967040L)
+ self.assertEqual(str(self.ipv4.netmask), '255.255.255.0')
+ self.assertEqual(str(self.ipv4_hostmask.netmask), '255.0.0.0')
+ self.assertEqual(int(self.ipv6.netmask),
340282366920938463444927863358058659840)
- self.assertEqual(self.ipv6.netmask_ext, 64)
+ self.assertEqual(self.ipv6.prefixlen, 64)
def testZeroNetmask(self):
- ipv4_zero_netmask = ipaddr.IPv4('1.2.3.4/0')
- self.assertEqual(ipv4_zero_netmask.netmask, 0)
+ ipv4_zero_netmask = ipaddr.IPv4Network('1.2.3.4/0')
+ self.assertEqual(int(ipv4_zero_netmask.netmask), 0)
self.assert_(ipv4_zero_netmask._is_valid_netmask(str(0)))
- ipv6_zero_netmask = ipaddr.IPv6('::1/0')
- self.assertEqual(ipv6_zero_netmask.netmask, 0)
+ ipv6_zero_netmask = ipaddr.IPv6Network('::1/0')
+ self.assertEqual(int(ipv6_zero_netmask.netmask), 0)
self.assert_(ipv6_zero_netmask._is_valid_netmask(str(0)))
def testGetBroadcast(self):
- self.assertEqual(self.ipv4.broadcast, 16909311L)
- self.assertEqual(self.ipv4.broadcast_ext, '1.2.3.255')
+ self.assertEqual(int(self.ipv4.broadcast), 16909311L)
+ self.assertEqual(str(self.ipv4.broadcast), '1.2.3.255')
- self.assertEqual(self.ipv6.broadcast,
+ self.assertEqual(int(self.ipv6.broadcast),
42540616829182469451850391367731642367)
- self.assertEqual(self.ipv6.broadcast_ext,
+ self.assertEqual(str(self.ipv6.broadcast),
'2001:658:22a:cafe:ffff:ffff:ffff:ffff')
def testGetPrefixlen(self):
@@ -166,39 +227,69 @@ class IpaddrUnitTest(unittest.TestCase):
def testGetSupernet(self):
self.assertEqual(self.ipv4.supernet().prefixlen, 23)
- self.assertEqual(self.ipv4.supernet().network_ext, '1.2.2.0')
- self.assertEqual(ipaddr.IPv4('0.0.0.0/0').supernet(),
- ipaddr.IPv4('0.0.0.0/0'))
+ self.assertEqual(str(self.ipv4.supernet().network), '1.2.2.0')
+ self.assertEqual(ipaddr.IPv4Network('0.0.0.0/0').supernet(),
+ ipaddr.IPv4Network('0.0.0.0/0'))
self.assertEqual(self.ipv6.supernet().prefixlen, 63)
- self.assertEqual(self.ipv6.supernet().network_ext,
+ self.assertEqual(str(self.ipv6.supernet().network),
'2001:658:22a:cafe::')
- self.assertEqual(ipaddr.IPv6('::0/0').supernet(), ipaddr.IPv6('::0/0'))
+ self.assertEqual(ipaddr.IPv6Network('::0/0').supernet(),
+ ipaddr.IPv6Network('::0/0'))
def testGetSupernet3(self):
self.assertEqual(self.ipv4.supernet(3).prefixlen, 21)
- self.assertEqual(self.ipv4.supernet(3).network_ext, '1.2.0.0')
+ self.assertEqual(str(self.ipv4.supernet(3).network), '1.2.0.0')
self.assertEqual(self.ipv6.supernet(3).prefixlen, 61)
- self.assertEqual(self.ipv6.supernet(3).network_ext,
+ self.assertEqual(str(self.ipv6.supernet(3).network),
'2001:658:22a:caf8::')
+ def testGetSupernet4(self):
+ self.assertRaises(ValueError, self.ipv4.supernet, prefixlen_diff=2,
+ new_prefix=1)
+ self.assertRaises(ValueError, self.ipv4.supernet, new_prefix=25)
+ self.assertEqual(self.ipv4.supernet(prefixlen_diff=2),
+ self.ipv4.supernet(new_prefix=22))
+
+ self.assertRaises(ValueError, self.ipv6.supernet, prefixlen_diff=2,
+ new_prefix=1)
+ self.assertRaises(ValueError, self.ipv6.supernet, new_prefix=65)
+ self.assertEqual(self.ipv6.supernet(prefixlen_diff=2),
+ self.ipv6.supernet(new_prefix=62))
+
+ def testIterSubnets(self):
+ self.assertEqual(self.ipv4.subnet(), list(self.ipv4.iter_subnets()))
+ self.assertEqual(self.ipv6.subnet(), list(self.ipv6.iter_subnets()))
+
+ def testFancySubnetting(self):
+ self.assertEqual(sorted(self.ipv4.subnet(prefixlen_diff=3)),
+ sorted(self.ipv4.subnet(new_prefix=27)))
+ self.assertRaises(ValueError, self.ipv4.subnet, new_prefix=23)
+ self.assertRaises(ValueError, self.ipv4.subnet,
+ prefixlen_diff=3, new_prefix=27)
+ self.assertEqual(sorted(self.ipv6.subnet(prefixlen_diff=4)),
+ sorted(self.ipv6.subnet(new_prefix=68)))
+ self.assertRaises(ValueError, self.ipv6.subnet, new_prefix=63)
+ self.assertRaises(ValueError, self.ipv6.subnet,
+ prefixlen_diff=4, new_prefix=68)
+
def testGetSubnet(self):
self.assertEqual(self.ipv4.subnet()[0].prefixlen, 25)
- self.assertEqual(self.ipv4.subnet()[0].network_ext, '1.2.3.0')
- self.assertEqual(self.ipv4.subnet()[1].network_ext, '1.2.3.128')
+ self.assertEqual(str(self.ipv4.subnet()[0].network), '1.2.3.0')
+ self.assertEqual(str(self.ipv4.subnet()[1].network), '1.2.3.128')
self.assertEqual(self.ipv6.subnet()[0].prefixlen, 65)
def testGetSubnetForSingle32(self):
- ip = ipaddr.IPv4('1.2.3.4/32')
+ ip = ipaddr.IPv4Network('1.2.3.4/32')
subnets1 = [str(x) for x in ip.subnet()]
subnets2 = [str(x) for x in ip.subnet(2)]
self.assertEqual(subnets1, ['1.2.3.4/32'])
self.assertEqual(subnets1, subnets2)
def testGetSubnetForSingle128(self):
- ip = ipaddr.IPv6('::1/128')
+ ip = ipaddr.IPv6Network('::1/128')
subnets1 = [str(x) for x in ip.subnet()]
subnets2 = [str(x) for x in ip.subnet(2)]
self.assertEqual(subnets1, ['::1/128'])
@@ -219,21 +310,16 @@ class IpaddrUnitTest(unittest.TestCase):
'2001:658:22a:cafe:c000::/66'])
def testSubnetFailsForLargeCidrDiff(self):
- self.assertRaises(ipaddr.PrefixlenDiffInvalidError, self.ipv4.subnet, 9)
- self.assertRaises(ipaddr.PrefixlenDiffInvalidError, self.ipv6.subnet,
- 65)
+ self.assertRaises(ValueError, self.ipv4.subnet, 9)
+ self.assertRaises(ValueError, self.ipv6.subnet, 65)
def testSupernetFailsForLargeCidrDiff(self):
- self.assertRaises(ipaddr.PrefixlenDiffInvalidError, self.ipv4.supernet,
- 25)
- self.assertRaises(ipaddr.PrefixlenDiffInvalidError, self.ipv6.supernet,
- 65)
+ self.assertRaises(ValueError, self.ipv4.supernet, 25)
+ self.assertRaises(ValueError, self.ipv6.supernet, 65)
def testSubnetFailsForNegativeCidrDiff(self):
- self.assertRaises(ipaddr.PrefixlenDiffInvalidError, self.ipv4.subnet,
- -1)
- self.assertRaises(ipaddr.PrefixlenDiffInvalidError, self.ipv6.subnet,
- -1)
+ self.assertRaises(ValueError, self.ipv4.subnet, -1)
+ self.assertRaises(ValueError, self.ipv6.subnet, -1)
def testGetNumHosts(self):
self.assertEqual(self.ipv4.numhosts, 256)
@@ -245,135 +331,221 @@ class IpaddrUnitTest(unittest.TestCase):
self.assertEqual(self.ipv6.supernet().numhosts, 36893488147419103232)
def testContains(self):
- self.assertTrue(ipaddr.IPv4('1.2.3.128/25') in self.ipv4)
- self.assertFalse(ipaddr.IPv4('1.2.4.1/24') in self.ipv4)
+ self.assertTrue(ipaddr.IPv4Network('1.2.3.128/25') in self.ipv4)
+ self.assertFalse(ipaddr.IPv4Network('1.2.4.1/24') in self.ipv4)
self.assertFalse(self.ipv4 in self.ipv6)
self.assertFalse(self.ipv6 in self.ipv4)
self.assertTrue(self.ipv4 in self.ipv4)
self.assertTrue(self.ipv6 in self.ipv6)
+ # We can test addresses and string as well.
+ addr1 = ipaddr.IPv4Address('1.2.3.37')
+ self.assertTrue(addr1 in self.ipv4)
def testBadAddress(self):
- self.assertRaises(ipaddr.IPv4IpValidationError, ipaddr.IPv4, 'poop')
- self.assertRaises(ipaddr.IPv4IpValidationError,
- ipaddr.IPv4, '1.2.3.256')
-
- self.assertRaises(ipaddr.IPv6IpValidationError, ipaddr.IPv6, 'poopv6')
- self.assertRaises(ipaddr.IPv4IpValidationError,
- ipaddr.IPv4, '1.2.3.4/32/24')
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv4Network,
+ 'poop')
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv4Network, '1.2.3.256')
+
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Network,
+ 'poopv6')
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv4Network, '1.2.3.4/32/24')
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv4Network, '10/8')
+ self.assertRaises(ipaddr.AddressValueError,
+ ipaddr.IPv6Network, '10/8')
+
def testBadNetMask(self):
- self.assertRaises(ipaddr.IPv4NetmaskValidationError,
- ipaddr.IPv4, '1.2.3.4/')
- self.assertRaises(ipaddr.IPv4NetmaskValidationError,
- ipaddr.IPv4, '1.2.3.4/33')
- self.assertRaises(ipaddr.IPv4NetmaskValidationError,
- ipaddr.IPv4, '1.2.3.4/254.254.255.256')
-
- self.assertRaises(ipaddr.IPv6NetmaskValidationError,
- ipaddr.IPv6, '::1/')
- self.assertRaises(ipaddr.IPv6NetmaskValidationError,
- ipaddr.IPv6, '::1/129')
+ self.assertRaises(ipaddr.NetmaskValueError,
+ ipaddr.IPv4Network, '1.2.3.4/')
+ self.assertRaises(ipaddr.NetmaskValueError,
+ ipaddr.IPv4Network, '1.2.3.4/33')
+ self.assertRaises(ipaddr.NetmaskValueError,
+ ipaddr.IPv4Network, '1.2.3.4/254.254.255.256')
+ self.assertRaises(ipaddr.NetmaskValueError,
+ ipaddr.IPv4Network, '1.1.1.1/240.255.0.0')
+ self.assertRaises(ipaddr.NetmaskValueError,
+ ipaddr.IPv6Network, '::1/')
+ self.assertRaises(ipaddr.NetmaskValueError,
+ ipaddr.IPv6Network, '::1/129')
def testNth(self):
- self.assertEqual(self.ipv4[5], '1.2.3.5')
+ self.assertEqual(str(self.ipv4[5]), '1.2.3.5')
self.assertRaises(IndexError, self.ipv4.__getitem__, 256)
- self.assertEqual(self.ipv6[5],
+ self.assertEqual(str(self.ipv6[5]),
'2001:658:22a:cafe::5')
def testGetitem(self):
# http://code.google.com/p/ipaddr-py/issues/detail?id=15
- addr = ipaddr.IPv4('172.31.255.128/255.255.255.240')
+ addr = ipaddr.IPv4Network('172.31.255.128/255.255.255.240')
self.assertEqual(28, addr.prefixlen)
addr_list = list(addr)
- self.assertEqual('172.31.255.128', addr_list[0])
- self.assertEqual('172.31.255.128', addr[0])
- self.assertEqual('172.31.255.143', addr_list[-1])
- self.assertEqual('172.31.255.143', addr[-1])
+ self.assertEqual('172.31.255.128', str(addr_list[0]))
+ self.assertEqual('172.31.255.128', str(addr[0]))
+ self.assertEqual('172.31.255.143', str(addr_list[-1]))
+ self.assertEqual('172.31.255.143', str(addr[-1]))
self.assertEqual(addr_list[-1], addr[-1])
def testEquals(self):
- self.assertTrue(self.ipv4 == ipaddr.IPv4('1.2.3.4/24'))
- self.assertFalse(self.ipv4 == ipaddr.IPv4('1.2.3.4/23'))
- self.assertFalse(self.ipv4 == ipaddr.IPv4('1.2.3.5/24'))
- self.assertFalse(self.ipv4 == ipaddr.IPv6('::1.2.3.4/24'))
+ self.assertTrue(self.ipv4 == ipaddr.IPv4Network('1.2.3.4/24'))
+ self.assertFalse(self.ipv4 == ipaddr.IPv4Network('1.2.3.4/23'))
+ self.assertFalse(self.ipv4 == ipaddr.IPv6Network('::1.2.3.4/24'))
self.assertFalse(self.ipv4 == '')
self.assertFalse(self.ipv4 == [])
self.assertFalse(self.ipv4 == 2)
self.assertTrue(self.ipv6 ==
- ipaddr.IPv6('2001:658:22a:cafe:200::1/64'))
+ ipaddr.IPv6Network('2001:658:22a:cafe:200::1/64'))
self.assertFalse(self.ipv6 ==
- ipaddr.IPv6('2001:658:22a:cafe:200::1/63'))
- self.assertFalse(self.ipv6 ==
- ipaddr.IPv6('2001:658:22a:cafe:200::2/64'))
- self.assertFalse(self.ipv6 == ipaddr.IPv4('1.2.3.4/23'))
+ ipaddr.IPv6Network('2001:658:22a:cafe:200::1/63'))
+ self.assertFalse(self.ipv6 == ipaddr.IPv4Network('1.2.3.4/23'))
self.assertFalse(self.ipv6 == '')
self.assertFalse(self.ipv6 == [])
self.assertFalse(self.ipv6 == 2)
def testNotEquals(self):
- self.assertFalse(self.ipv4 != ipaddr.IPv4('1.2.3.4/24'))
- self.assertTrue(self.ipv4 != ipaddr.IPv4('1.2.3.4/23'))
- self.assertTrue(self.ipv4 != ipaddr.IPv4('1.2.3.5/24'))
- self.assertTrue(self.ipv4 != ipaddr.IPv6('::1.2.3.4/24'))
+ self.assertFalse(self.ipv4 != ipaddr.IPv4Network('1.2.3.4/24'))
+ self.assertTrue(self.ipv4 != ipaddr.IPv4Network('1.2.3.4/23'))
+ self.assertTrue(self.ipv4 != ipaddr.IPv6Network('::1.2.3.4/24'))
self.assertTrue(self.ipv4 != '')
self.assertTrue(self.ipv4 != [])
self.assertTrue(self.ipv4 != 2)
self.assertFalse(self.ipv6 !=
- ipaddr.IPv6('2001:658:22a:cafe:200::1/64'))
- self.assertTrue(self.ipv6 !=
- ipaddr.IPv6('2001:658:22a:cafe:200::1/63'))
+ ipaddr.IPv6Network('2001:658:22a:cafe:200::1/64'))
self.assertTrue(self.ipv6 !=
- ipaddr.IPv6('2001:658:22a:cafe:200::2/64'))
- self.assertTrue(self.ipv6 != ipaddr.IPv4('1.2.3.4/23'))
+ ipaddr.IPv6Network('2001:658:22a:cafe:200::1/63'))
+ self.assertTrue(self.ipv6 != ipaddr.IPv4Network('1.2.3.4/23'))
self.assertTrue(self.ipv6 != '')
self.assertTrue(self.ipv6 != [])
self.assertTrue(self.ipv6 != 2)
def testSlash32Constructor(self):
- self.assertEquals(str(ipaddr.IPv4('1.2.3.4/255.255.255.255')),
+ self.assertEquals(str(ipaddr.IPv4Network('1.2.3.4/255.255.255.255')),
'1.2.3.4/32')
def testSlash128Constructor(self):
- self.assertEquals(str(ipaddr.IPv6('::1/128')),
+ self.assertEquals(str(ipaddr.IPv6Network('::1/128')),
'::1/128')
def testSlash0Constructor(self):
- self.assertEquals(str(ipaddr.IPv4('1.2.3.4/0.0.0.0')), '1.2.3.4/0')
+ self.assertEquals(str(ipaddr.IPv4Network('1.2.3.4/0.0.0.0')),
+ '1.2.3.4/0')
def testCollapsing(self):
- ip1 = ipaddr.IPv4('1.1.0.0/24')
- ip2 = ipaddr.IPv4('1.1.1.0/24')
- ip3 = ipaddr.IPv4('1.1.2.0/24')
- ip4 = ipaddr.IPv4('1.1.3.0/24')
- ip5 = ipaddr.IPv4('1.1.4.0/24')
+ # test only IP addresses including some duplicates
+ ip1 = ipaddr.IPv4Address('1.1.1.0')
+ ip2 = ipaddr.IPv4Address('1.1.1.1')
+ ip3 = ipaddr.IPv4Address('1.1.1.2')
+ ip4 = ipaddr.IPv4Address('1.1.1.3')
+ ip5 = ipaddr.IPv4Address('1.1.1.4')
+ ip6 = ipaddr.IPv4Address('1.1.1.0')
+ # check that addreses are subsumed properly.
+ collapsed = ipaddr.collapse_address_list([ip1, ip2, ip3, ip4, ip5, ip6])
+ self.assertEqual(collapsed, [ipaddr.IPv4Network('1.1.1.0/30'),
+ ipaddr.IPv4Network('1.1.1.4/32')])
+
+ # test a mix of IP addresses and networks including some duplicates
+ ip1 = ipaddr.IPv4Address('1.1.1.0')
+ ip2 = ipaddr.IPv4Address('1.1.1.1')
+ ip3 = ipaddr.IPv4Address('1.1.1.2')
+ ip4 = ipaddr.IPv4Address('1.1.1.3')
+ ip5 = ipaddr.IPv4Network('1.1.1.4/30')
+ ip6 = ipaddr.IPv4Network('1.1.1.4/30')
+ # check that addreses are subsumed properly.
+ collapsed = ipaddr.collapse_address_list([ip5, ip1, ip2, ip3, ip4, ip6])
+ self.assertEqual(collapsed, [ipaddr.IPv4Network('1.1.1.0/29')])
+
+ # test only IP networks
+ ip1 = ipaddr.IPv4Network('1.1.0.0/24')
+ ip2 = ipaddr.IPv4Network('1.1.1.0/24')
+ ip3 = ipaddr.IPv4Network('1.1.2.0/24')
+ ip4 = ipaddr.IPv4Network('1.1.3.0/24')
+ ip5 = ipaddr.IPv4Network('1.1.4.0/24')
# stored in no particular order b/c we want CollapseAddr to call [].sort
- ip6 = ipaddr.IPv4('1.1.0.0/22')
- # check that addreses are subsumed properlly.
+ ip6 = ipaddr.IPv4Network('1.1.0.0/22')
+ # check that addreses are subsumed properly.
collapsed = ipaddr.collapse_address_list([ip1, ip2, ip3, ip4, ip5, ip6])
- self.assertEqual(collapsed, [ipaddr.IPv4('1.1.0.0/22'),
- ipaddr.IPv4('1.1.4.0/24')])
- # test that two addresses are supernet'ed properlly
+ self.assertEqual(collapsed, [ipaddr.IPv4Network('1.1.0.0/22'),
+ ipaddr.IPv4Network('1.1.4.0/24')])
+
+ # test that two addresses are supernet'ed properly
collapsed = ipaddr.collapse_address_list([ip1, ip2])
- self.assertEqual(collapsed, [ipaddr.IPv4('1.1.0.0/23')])
+ self.assertEqual(collapsed, [ipaddr.IPv4Network('1.1.0.0/23')])
- ip_same1 = ip_same2 = ipaddr.IPv4('1.1.1.1/32')
+ # test same IP networks
+ ip_same1 = ip_same2 = ipaddr.IPv4Network('1.1.1.1/32')
self.assertEqual(ipaddr.collapse_address_list([ip_same1, ip_same2]),
[ip_same1])
- ip1 = ipaddr.IPv6('::2001:1/100')
- ip2 = ipaddr.IPv6('::2002:1/120')
- ip3 = ipaddr.IPv6('::2001:1/96')
+
+ # test same IP addresses
+ ip_same1 = ip_same2 = ipaddr.IPv4Address('1.1.1.1')
+ self.assertEqual(ipaddr.collapse_address_list([ip_same1, ip_same2]),
+ [ip_same1])
+ ip1 = ipaddr.IPv6Network('::2001:1/100')
+ ip2 = ipaddr.IPv6Network('::2002:1/120')
+ ip3 = ipaddr.IPv6Network('::2001:1/96')
# test that ipv6 addresses are subsumed properly.
collapsed = ipaddr.collapse_address_list([ip1, ip2, ip3])
self.assertEqual(collapsed, [ip3])
+ # the toejam test
+ ip1 = ipaddr.IPAddress('1.1.1.1')
+ ip2 = ipaddr.IPAddress('::1')
+ self.assertRaises(TypeError, ipaddr.collapse_address_list,
+ [ip1, ip2])
+
+ def testSummarizing(self):
+ #ip = ipaddr.IPAddress
+ #ipnet = ipaddr.IPNetwork
+ summarize = ipaddr.summarize_address_range
+ ip1 = ipaddr.IPAddress('1.1.1.0')
+ ip2 = ipaddr.IPAddress('1.1.1.255')
+ # test a /24 is sumamrized properly
+ self.assertEqual(summarize(ip1, ip2)[0], ipaddr.IPNetwork('1.1.1.0/24'))
+ # test an IPv4 range that isn't on a network byte boundary
+ ip2 = ipaddr.IPAddress('1.1.1.8')
+ self.assertEqual(summarize(ip1, ip2), [ipaddr.IPNetwork('1.1.1.0/29'),
+ ipaddr.IPNetwork('1.1.1.8')])
+
+ ip1 = ipaddr.IPAddress('1::')
+ ip2 = ipaddr.IPAddress('1:ffff:ffff:ffff:ffff:ffff:ffff:ffff')
+ # test a IPv6 is sumamrized properly
+ self.assertEqual(summarize(ip1, ip2)[0], ipaddr.IPNetwork('1::/16'))
+ # test an IPv6 range that isn't on a network byte boundary
+ ip2 = ipaddr.IPAddress('2::')
+ self.assertEqual(summarize(ip1, ip2), [ipaddr.IPNetwork('1::/16'),
+ ipaddr.IPNetwork('2::/128')])
+
+ # test exception raised when first is greater than last
+ self.assertRaises(ValueError, summarize, ipaddr.IPAddress('1.1.1.0'),
+ ipaddr.IPAddress('1.1.0.0'))
+ # test exception raised when first and last aren't IP addresses
+ self.assertRaises(TypeError, summarize,
+ ipaddr.IPNetwork('1.1.1.0'),
+ ipaddr.IPNetwork('1.1.0.0'))
+ self.assertRaises(TypeError, summarize,
+ ipaddr.IPNetwork('1.1.1.0'), ipaddr.IPNetwork('1.1.0.0'))
+ # test exception raised when first and last are not same version
+ self.assertRaises(TypeError, summarize, ipaddr.IPAddress('::'),
+ ipaddr.IPNetwork('1.1.0.0'))
+
+ def testAddressComparison(self):
+ self.assertTrue(ipaddr.IPAddress('1.1.1.1') <=
+ ipaddr.IPAddress('1.1.1.1'))
+ self.assertTrue(ipaddr.IPAddress('1.1.1.1') <=
+ ipaddr.IPAddress('1.1.1.2'))
+ self.assertTrue(ipaddr.IPAddress('::1') <= ipaddr.IPAddress('::1'))
+ self.assertTrue(ipaddr.IPAddress('::1') <= ipaddr.IPAddress('::2'))
+
def testNetworkComparison(self):
# ip1 and ip2 have the same network address
- ip1 = ipaddr.IPv4('1.1.1.0/24')
- ip2 = ipaddr.IPv4('1.1.1.1/24')
- ip3 = ipaddr.IPv4('1.1.2.0/24')
+ ip1 = ipaddr.IPv4Network('1.1.1.0/24')
+ ip2 = ipaddr.IPv4Network('1.1.1.1/24')
+ ip3 = ipaddr.IPv4Network('1.1.2.0/24')
self.assertTrue(ip1 < ip3)
self.assertTrue(ip3 > ip2)
@@ -383,9 +555,9 @@ class IpaddrUnitTest(unittest.TestCase):
self.assertEquals(ip1.compare_networks(ip3), -1)
self.assertTrue(ip1._get_networks_key() < ip3._get_networks_key())
- ip1 = ipaddr.IPv6('2001::2000/96')
- ip2 = ipaddr.IPv6('2001::2001/96')
- ip3 = ipaddr.IPv6('2001:ffff::2000/96')
+ ip1 = ipaddr.IPv6Network('2001::2000/96')
+ ip2 = ipaddr.IPv6Network('2001::2001/96')
+ ip3 = ipaddr.IPv6Network('2001:ffff::2000/96')
self.assertTrue(ip1 < ip3)
self.assertTrue(ip3 > ip2)
@@ -394,35 +566,62 @@ class IpaddrUnitTest(unittest.TestCase):
self.assertEquals(ip1.compare_networks(ip3), -1)
self.assertTrue(ip1._get_networks_key() < ip3._get_networks_key())
- # Test comparing different protocols
- ipv6 = ipaddr.IPv6('::/0')
- ipv4 = ipaddr.IPv4('0.0.0.0/0')
- self.assertTrue(ipv6 > ipv4)
- self.assertTrue(ipv4 < ipv6)
+ # Test comparing different protocols.
+ # Should always raise a TypeError.
+ ipv6 = ipaddr.IPv6Network('::/0')
+ ipv4 = ipaddr.IPv4Network('0.0.0.0/0')
+ self.assertRaises(TypeError, ipv4.__lt__, ipv6)
+ self.assertRaises(TypeError, ipv4.__gt__, ipv6)
+ self.assertRaises(TypeError, ipv6.__lt__, ipv4)
+ self.assertRaises(TypeError, ipv6.__gt__, ipv4)
# Regression test for issue 19.
- ip1 = ipaddr.IP('10.1.2.128/25')
+ ip1 = ipaddr.IPNetwork('10.1.2.128/25')
self.assertFalse(ip1 < ip1)
self.assertFalse(ip1 > ip1)
- ip2 = ipaddr.IP('10.1.3.0/24')
+ ip2 = ipaddr.IPNetwork('10.1.3.0/24')
self.assertTrue(ip1 < ip2)
self.assertFalse(ip2 < ip1)
self.assertFalse(ip1 > ip2)
self.assertTrue(ip2 > ip1)
- ip3 = ipaddr.IP('10.1.3.0/25')
+ ip3 = ipaddr.IPNetwork('10.1.3.0/25')
self.assertTrue(ip2 < ip3)
self.assertFalse(ip3 < ip2)
self.assertFalse(ip2 > ip3)
self.assertTrue(ip3 > ip2)
+ # <=, >=
+ self.assertTrue(ipaddr.IPNetwork('1.1.1.1') <=
+ ipaddr.IPNetwork('1.1.1.1'))
+ self.assertTrue(ipaddr.IPNetwork('1.1.1.1') <=
+ ipaddr.IPNetwork('1.1.1.2'))
+ self.assertFalse(ipaddr.IPNetwork('1.1.1.2') <=
+ ipaddr.IPNetwork('1.1.1.1'))
+ self.assertTrue(ipaddr.IPNetwork('::1') <= ipaddr.IPNetwork('::1'))
+ self.assertTrue(ipaddr.IPNetwork('::1') <= ipaddr.IPNetwork('::2'))
+ self.assertFalse(ipaddr.IPNetwork('::2') <= ipaddr.IPNetwork('::1'))
+
+ def testStrictNetworks(self):
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '192.168.1.1/24',
+ strict=True)
+ self.assertRaises(ValueError, ipaddr.IPNetwork, '::1/120', strict=True)
+
+ def testOverlaps(self):
+ other = ipaddr.IPv4Network('1.2.3.0/30')
+ other2 = ipaddr.IPv4Network('1.2.2.0/24')
+ other3 = ipaddr.IPv4Network('1.2.2.64/26')
+ self.assertTrue(self.ipv4.overlaps(other))
+ self.assertFalse(self.ipv4.overlaps(other2))
+ self.assertTrue(other2.overlaps(other3))
+
def testEmbeddedIpv4(self):
ipv4_string = '192.168.0.1'
- ipv4 = ipaddr.IPv4(ipv4_string)
- v4compat_ipv6 = ipaddr.IPv6('::%s' % ipv4_string)
- self.assertEquals(v4compat_ipv6.ip, ipv4.ip)
- v4mapped_ipv6 = ipaddr.IPv6('::ffff:%s' % ipv4_string)
+ ipv4 = ipaddr.IPv4Network(ipv4_string)
+ v4compat_ipv6 = ipaddr.IPv6Network('::%s' % ipv4_string)
+ self.assertEquals(int(v4compat_ipv6.ip), int(ipv4.ip))
+ v4mapped_ipv6 = ipaddr.IPv6Network('::ffff:%s' % ipv4_string)
self.assertNotEquals(v4mapped_ipv6.ip, ipv4.ip)
- self.assertRaises(ipaddr.IPv6IpValidationError, ipaddr.IPv6,
+ self.assertRaises(ipaddr.AddressValueError, ipaddr.IPv6Network,
'2001:1.1.1.1:1.1.1.1')
def testIPVersion(self):
@@ -432,127 +631,186 @@ class IpaddrUnitTest(unittest.TestCase):
def testPacked(self):
self.assertEqual(self.ipv4.packed,
_cb('\x01\x02\x03\x04'))
- self.assertEqual(ipaddr.IPv4('255.254.253.252').packed,
+ self.assertEqual(ipaddr.IPv4Network('255.254.253.252').packed,
_cb('\xff\xfe\xfd\xfc'))
self.assertEqual(self.ipv6.packed,
_cb('\x20\x01\x06\x58\x02\x2a\xca\xfe'
'\x02\x00\x00\x00\x00\x00\x00\x01'))
- self.assertEqual(ipaddr.IPv6('ffff:2:3:4:ffff::').packed,
+ self.assertEqual(ipaddr.IPv6Network('ffff:2:3:4:ffff::').packed,
_cb('\xff\xff\x00\x02\x00\x03\x00\x04\xff\xff'
+ '\x00' * 6))
- self.assertEqual(ipaddr.IPv6('::1:0:0:0:0').packed,
+ self.assertEqual(ipaddr.IPv6Network('::1:0:0:0:0').packed,
_cb('\x00' * 6 + '\x00\x01' + '\x00' * 8))
def testIpStrFromPrefixlen(self):
- ipv4 = ipaddr.IPv4('1.2.3.4/24')
+ ipv4 = ipaddr.IPv4Network('1.2.3.4/24')
self.assertEquals(ipv4._ip_string_from_prefix(), '255.255.255.0')
self.assertEquals(ipv4._ip_string_from_prefix(28), '255.255.255.240')
def testIpType(self):
- ipv4 = ipaddr.IP('1.2.3.4')
- ipv6 = ipaddr.IP('::1.2.3.4')
- self.assertEquals(ipaddr.IPv4, type(ipv4))
- self.assertEquals(ipaddr.IPv6, type(ipv6))
+ ipv4net = ipaddr.IPNetwork('1.2.3.4')
+ ipv4addr = ipaddr.IPAddress('1.2.3.4')
+ ipv6net = ipaddr.IPNetwork('::1.2.3.4')
+ ipv6addr = ipaddr.IPAddress('::1.2.3.4')
+ self.assertEquals(ipaddr.IPv4Network, type(ipv4net))
+ self.assertEquals(ipaddr.IPv4Address, type(ipv4addr))
+ self.assertEquals(ipaddr.IPv6Network, type(ipv6net))
+ self.assertEquals(ipaddr.IPv6Address, type(ipv6addr))
def testReservedIpv4(self):
- self.assertEquals(True, ipaddr.IP('224.1.1.1/31').is_multicast)
- self.assertEquals(False, ipaddr.IP('240.0.0.0').is_multicast)
-
- self.assertEquals(True, ipaddr.IP('192.168.1.1/17').is_private)
- self.assertEquals(False, ipaddr.IP('192.169.0.0').is_private)
- self.assertEquals(True, ipaddr.IP('10.255.255.255').is_private)
- self.assertEquals(False, ipaddr.IP('11.0.0.0').is_private)
- self.assertEquals(True, ipaddr.IP('172.31.255.255').is_private)
- self.assertEquals(False, ipaddr.IP('172.32.0.0').is_private)
-
- self.assertEquals(True, ipaddr.IP('169.254.100.200/24').is_link_local)
- self.assertEquals(False, ipaddr.IP('169.255.100.200/24').is_link_local)
-
- self.assertEquals(True, ipaddr.IP('127.100.200.254/32').is_loopback)
- self.assertEquals(True, ipaddr.IP('127.42.0.0/16').is_loopback)
- self.assertEquals(False, ipaddr.IP('128.0.0.0').is_loopback)
+ # test networks
+ self.assertEquals(True, ipaddr.IPNetwork('224.1.1.1/31').is_multicast)
+ self.assertEquals(False, ipaddr.IPNetwork('240.0.0.0').is_multicast)
+
+ self.assertEquals(True, ipaddr.IPNetwork('192.168.1.1/17').is_private)
+ self.assertEquals(False, ipaddr.IPNetwork('192.169.0.0').is_private)
+ self.assertEquals(True, ipaddr.IPNetwork('10.255.255.255').is_private)
+ self.assertEquals(False, ipaddr.IPNetwork('11.0.0.0').is_private)
+ self.assertEquals(True, ipaddr.IPNetwork('172.31.255.255').is_private)
+ self.assertEquals(False, ipaddr.IPNetwork('172.32.0.0').is_private)
+
+ self.assertEquals(True,
+ ipaddr.IPNetwork('169.254.100.200/24').is_link_local)
+ self.assertEquals(False,
+ ipaddr.IPNetwork('169.255.100.200/24').is_link_local)
+
+ self.assertEquals(True,
+ ipaddr.IPNetwork('127.100.200.254/32').is_loopback)
+ self.assertEquals(True, ipaddr.IPNetwork('127.42.0.0/16').is_loopback)
+ self.assertEquals(False, ipaddr.IPNetwork('128.0.0.0').is_loopback)
+
+ # test addresses
+ self.assertEquals(True, ipaddr.IPAddress('224.1.1.1').is_multicast)
+ self.assertEquals(False, ipaddr.IPAddress('240.0.0.0').is_multicast)
+
+ self.assertEquals(True, ipaddr.IPAddress('192.168.1.1').is_private)
+ self.assertEquals(False, ipaddr.IPAddress('192.169.0.0').is_private)
+ self.assertEquals(True, ipaddr.IPAddress('10.255.255.255').is_private)
+ self.assertEquals(False, ipaddr.IPAddress('11.0.0.0').is_private)
+ self.assertEquals(True, ipaddr.IPAddress('172.31.255.255').is_private)
+ self.assertEquals(False, ipaddr.IPAddress('172.32.0.0').is_private)
+
+ self.assertEquals(True,
+ ipaddr.IPAddress('169.254.100.200').is_link_local)
+ self.assertEquals(False,
+ ipaddr.IPAddress('169.255.100.200').is_link_local)
+
+ self.assertEquals(True,
+ ipaddr.IPAddress('127.100.200.254').is_loopback)
+ self.assertEquals(True, ipaddr.IPAddress('127.42.0.0').is_loopback)
+ self.assertEquals(False, ipaddr.IPAddress('128.0.0.0').is_loopback)
+ self.assertEquals(True, ipaddr.IPNetwork('0.0.0.0').is_unspecified)
def testReservedIpv6(self):
- ip = ipaddr.IP
-
- self.assertEquals(True, ip('ffff::').is_multicast)
- self.assertEquals(True, ip(2**128-1).is_multicast)
- self.assertEquals(True, ip('ff00::').is_multicast)
- self.assertEquals(False, ip('fdff::').is_multicast)
-
- self.assertEquals(True, ip('fecf::').is_site_local)
- self.assertEquals(True, ip('feff:ffff:ffff:ffff::').is_site_local)
- self.assertEquals(False, ip('fbf:ffff::').is_site_local)
- self.assertEquals(False, ip('ff00::').is_site_local)
-
- self.assertEquals(True, ip('fc00::').is_private)
- self.assertEquals(True, ip('fc00:ffff:ffff:ffff::').is_private)
- self.assertEquals(False, ip('fbff:ffff::').is_private)
- self.assertEquals(False, ip('fe00::').is_private)
- self.assertEquals(True, ip('fea0::').is_link_local)
- self.assertEquals(True, ip('febf:ffff::').is_link_local)
- self.assertEquals(False, ip('fe7f:ffff::').is_link_local)
- self.assertEquals(False, ip('fec0::').is_link_local)
-
- self.assertEquals(True, ip('0:0::0:01').is_loopback)
- self.assertEquals(False, ip('::1/127').is_loopback)
- self.assertEquals(False, ip('::').is_loopback)
- self.assertEquals(False, ip('::2').is_loopback)
-
- self.assertEquals(True, ip('0::0').is_unspecified)
- self.assertEquals(False, ip('::1').is_unspecified)
- self.assertEquals(False, ip('::/127').is_unspecified)
+ self.assertEquals(True, ipaddr.IPNetwork('ffff::').is_multicast)
+ self.assertEquals(True, ipaddr.IPNetwork(2**128-1).is_multicast)
+ self.assertEquals(True, ipaddr.IPNetwork('ff00::').is_multicast)
+ self.assertEquals(False, ipaddr.IPNetwork('fdff::').is_multicast)
+
+ self.assertEquals(True, ipaddr.IPNetwork('fecf::').is_site_local)
+ self.assertEquals(True, ipaddr.IPNetwork(
+ 'feff:ffff:ffff:ffff::').is_site_local)
+ self.assertEquals(False, ipaddr.IPNetwork('fbf:ffff::').is_site_local)
+ self.assertEquals(False, ipaddr.IPNetwork('ff00::').is_site_local)
+
+ self.assertEquals(True, ipaddr.IPNetwork('fc00::').is_private)
+ self.assertEquals(True, ipaddr.IPNetwork(
+ 'fc00:ffff:ffff:ffff::').is_private)
+ self.assertEquals(False, ipaddr.IPNetwork('fbff:ffff::').is_private)
+ self.assertEquals(False, ipaddr.IPNetwork('fe00::').is_private)
+
+ self.assertEquals(True, ipaddr.IPNetwork('fea0::').is_link_local)
+ self.assertEquals(True, ipaddr.IPNetwork('febf:ffff::').is_link_local)
+ self.assertEquals(False, ipaddr.IPNetwork('fe7f:ffff::').is_link_local)
+ self.assertEquals(False, ipaddr.IPNetwork('fec0::').is_link_local)
+
+ self.assertEquals(True, ipaddr.IPNetwork('0:0::0:01').is_loopback)
+ self.assertEquals(False, ipaddr.IPNetwork('::1/127').is_loopback)
+ self.assertEquals(False, ipaddr.IPNetwork('::').is_loopback)
+ self.assertEquals(False, ipaddr.IPNetwork('::2').is_loopback)
+
+ self.assertEquals(True, ipaddr.IPNetwork('0::0').is_unspecified)
+ self.assertEquals(False, ipaddr.IPNetwork('::1').is_unspecified)
+ self.assertEquals(False, ipaddr.IPNetwork('::/127').is_unspecified)
+
+ # test addresses
+ self.assertEquals(True, ipaddr.IPAddress('ffff::').is_multicast)
+ self.assertEquals(True, ipaddr.IPAddress(2**128-1).is_multicast)
+ self.assertEquals(True, ipaddr.IPAddress('ff00::').is_multicast)
+ self.assertEquals(False, ipaddr.IPAddress('fdff::').is_multicast)
+
+ self.assertEquals(True, ipaddr.IPAddress('fecf::').is_site_local)
+ self.assertEquals(True, ipaddr.IPAddress(
+ 'feff:ffff:ffff:ffff::').is_site_local)
+ self.assertEquals(False, ipaddr.IPAddress('fbf:ffff::').is_site_local)
+ self.assertEquals(False, ipaddr.IPAddress('ff00::').is_site_local)
+
+ self.assertEquals(True, ipaddr.IPAddress('fc00::').is_private)
+ self.assertEquals(True, ipaddr.IPAddress(
+ 'fc00:ffff:ffff:ffff::').is_private)
+ self.assertEquals(False, ipaddr.IPAddress('fbff:ffff::').is_private)
+ self.assertEquals(False, ipaddr.IPAddress('fe00::').is_private)
+
+ self.assertEquals(True, ipaddr.IPAddress('fea0::').is_link_local)
+ self.assertEquals(True, ipaddr.IPAddress('febf:ffff::').is_link_local)
+ self.assertEquals(False, ipaddr.IPAddress('fe7f:ffff::').is_link_local)
+ self.assertEquals(False, ipaddr.IPAddress('fec0::').is_link_local)
+
+ self.assertEquals(True, ipaddr.IPAddress('0:0::0:01').is_loopback)
+ self.assertEquals(True, ipaddr.IPAddress('::1').is_loopback)
+ self.assertEquals(False, ipaddr.IPAddress('::2').is_loopback)
+
+ self.assertEquals(True, ipaddr.IPAddress('0::0').is_unspecified)
+ self.assertEquals(False, ipaddr.IPAddress('::1').is_unspecified)
+
+ # some generic IETF reserved addresses
+ self.assertEquals(True, ipaddr.IPAddress('100::').is_reserved)
+ self.assertEquals(True, ipaddr.IPNetwork('4000::1/128').is_reserved)
+
+ def testIpv4Mapped(self):
+ self.assertEqual(ipaddr.IPAddress('::ffff:192.168.1.1').ipv4_mapped,
+ ipaddr.IPAddress('192.168.1.1'))
+ self.assertEqual(ipaddr.IPAddress('::c0a8:101').ipv4_mapped, None)
+ self.assertEqual(ipaddr.IPAddress('::ffff:c0a8:101').ipv4_mapped,
+ ipaddr.IPAddress('192.168.1.1'))
def testAddrExclude(self):
- addr1 = ipaddr.IP('10.1.1.0/24')
- addr2 = ipaddr.IP('10.1.1.0/26')
- addr3 = ipaddr.IP('10.2.1.0/24')
+ addr1 = ipaddr.IPNetwork('10.1.1.0/24')
+ addr2 = ipaddr.IPNetwork('10.1.1.0/26')
+ addr3 = ipaddr.IPNetwork('10.2.1.0/24')
self.assertEqual(addr1.address_exclude(addr2),
- [ipaddr.IP('10.1.1.64/26'),
- ipaddr.IP('10.1.1.128/25')])
+ [ipaddr.IPNetwork('10.1.1.64/26'),
+ ipaddr.IPNetwork('10.1.1.128/25')])
self.assertRaises(ValueError, addr1.address_exclude, addr3)
def testHash(self):
- self.assertEquals(hash(ipaddr.IP('10.1.1.0/24')),
- hash(ipaddr.IP('10.1.1.0/24')))
+ self.assertEquals(hash(ipaddr.IPNetwork('10.1.1.0/24')),
+ hash(ipaddr.IPNetwork('10.1.1.0/24')))
+ self.assertEquals(hash(ipaddr.IPAddress('10.1.1.0')),
+ hash(ipaddr.IPAddress('10.1.1.0')))
+ ip1 = ipaddr.IPAddress('10.1.1.0')
+ ip2 = ipaddr.IPAddress('1::')
dummy = {}
dummy[self.ipv4] = None
dummy[self.ipv6] = None
+ dummy[ip1] = None
+ dummy[ip2] = None
self.assertTrue(self.ipv4 in dummy)
-
- def testIPv4PrefixFromInt(self):
- addr1 = ipaddr.IP('10.1.1.0/24')
- addr2 = ipaddr.IPv4(addr1.ip) # clone prefix
- addr2.set_prefix(addr1.prefixlen)
- addr3 = ipaddr.IP(123456)
-
- self.assertEqual(123456, addr3.ip)
- self.assertRaises(ipaddr.IPv4NetmaskValidationError,
- addr2.set_prefix, -1L)
- self.assertEqual(addr1, addr2)
- self.assertEqual(str(addr1), str(addr2))
-
- def testIPv6PrefixFromInt(self):
- addr1 = ipaddr.IP('2001:0658:022a:cafe:0200::1/64')
- addr2 = ipaddr.IPv6(addr1.ip) # clone prefix
- addr2.set_prefix(addr1.prefixlen)
- addr3 = ipaddr.IP(123456)
-
- self.assertEqual(123456, addr3.ip)
- self.assertRaises(ipaddr.IPv6NetmaskValidationError,
- addr2.set_prefix, -1L)
- self.assertEqual(addr1, addr2)
- self.assertEqual(str(addr1), str(addr2))
+ self.assertTrue(ip2 in dummy)
def testCopyConstructor(self):
- addr1 = ipaddr.IP('10.1.1.0/24')
- addr2 = ipaddr.IP(addr1)
- addr3 = ipaddr.IP('2001:658:22a:cafe:200::1/64')
- addr4 = ipaddr.IP(addr3)
+ addr1 = ipaddr.IPNetwork('10.1.1.0/24')
+ addr2 = ipaddr.IPNetwork(addr1)
+ addr3 = ipaddr.IPNetwork('2001:658:22a:cafe:200::1/64')
+ addr4 = ipaddr.IPNetwork(addr3)
+ addr5 = ipaddr.IPv4Address('1.1.1.1')
+ addr6 = ipaddr.IPv6Address('2001:658:22a:cafe:200::1')
self.assertEqual(addr1, addr2)
self.assertEqual(addr3, addr4)
+ self.assertEqual(addr5, ipaddr.IPv4Address(addr5))
+ self.assertEqual(addr6, ipaddr.IPv6Address(addr6))
def testCompressIPv6Address(self):
test_addresses = {
@@ -572,57 +830,124 @@ class IpaddrUnitTest(unittest.TestCase):
'2001:658:22a:cafe::/66',
}
for uncompressed, compressed in test_addresses.items():
- self.assertEquals(compressed, str(ipaddr.IPv6(uncompressed)))
+ self.assertEquals(compressed, str(ipaddr.IPv6Network(uncompressed)))
def testExplodeShortHandIpStr(self):
- addr1 = ipaddr.IPv6('2001::1')
+ addr1 = ipaddr.IPv6Network('2001::1')
self.assertEqual('2001:0000:0000:0000:0000:0000:0000:0001',
- addr1._explode_shorthand_ip_string(addr1.ip_ext))
+ addr1._explode_shorthand_ip_string(str(addr1.ip)))
+ self.assertEqual('0000:0000:0000:0000:0000:0000:0000:0001',
+ ipaddr.IPv6Network('::1/128').exploded)
def testIntRepresentation(self):
self.assertEqual(16909060, int(self.ipv4))
self.assertEqual(42540616829182469433547762482097946625, int(self.ipv6))
def testHexRepresentation(self):
- self.assertEqual(hex(0x1020304), hex(self.ipv4))
+ self.assertEqual(hex(0x1020304),
+ hex(self.ipv4))
self.assertEqual(hex(0x20010658022ACAFE0200000000000001),
hex(self.ipv6))
# backwards compatibility
def testBackwardsCompability(self):
- ip = ipaddr.IP
-
self.assertEqual(ipaddr.CollapseAddrList(
- [ip('1.1.0.0/24'), ip('1.1.1.0/24')]),
- [ip('1.1.0.0/23')])
-
- self.assertEqual(ip('::42:0/112').AddressExclude(ip('::42:8000/113')),
- [ip('::42:0/113')])
-
- self.assertTrue(ip('1::/8').CompareNetworks(ip('2::/9')) < 0)
-
- self.assertEqual(ip('1::/16').Contains(ip('2::/16')), False)
-
- i4 = ip('1.2.3.1/12')
- i4.set_prefix(0)
- self.assertEqual(i4.get_prefix(), 0)
-
- i6 = ip('::1/2')
- i6.set_prefix(0)
- self.assertEqual(i6.get_prefix(), 0)
-
- self.assertEqual(ip('0.0.0.0/0').Subnet(),
- [ip('0.0.0.0/1'), ip('128.0.0.0/1')])
- self.assertEqual(ip('::/127').Subnet(), [ip('::/128'), ip('::1/128')])
-
- self.assertEqual(ip('1.0.0.0/32').Supernet(), ip('1.0.0.0/31'))
- self.assertEqual(ip('::/121').Supernet(), ip('::/120'))
-
- self.assertEqual(ip('10.0.0.02').IsRFC1918(), True)
- self.assertEqual(ip('10.0.0.0').IsMulticast(), False)
- self.assertEqual(ip('127.255.255.255').IsLoopback(), True)
- self.assertEqual(ip('169.255.255.255').IsLinkLocal(), False)
+ [ipaddr.IPNetwork('1.1.0.0/24'), ipaddr.IPNetwork('1.1.1.0/24')]),
+ [ipaddr.IPNetwork('1.1.0.0/23')])
+
+ self.assertEqual(ipaddr.IPNetwork('::42:0/112').AddressExclude(
+ ipaddr.IPNetwork('::42:8000/113')),
+ [ipaddr.IPNetwork('::42:0/113')])
+
+ self.assertTrue(ipaddr.IPNetwork('1::/8').CompareNetworks(
+ ipaddr.IPNetwork('2::/9')) < 0)
+
+ self.assertEqual(ipaddr.IPNetwork('1::/16').Contains(
+ ipaddr.IPNetwork('2::/16')), False)
+
+ self.assertEqual(ipaddr.IPNetwork('0.0.0.0/0').Subnet(),
+ [ipaddr.IPNetwork('0.0.0.0/1'),
+ ipaddr.IPNetwork('128.0.0.0/1')])
+ self.assertEqual(ipaddr.IPNetwork('::/127').Subnet(),
+ [ipaddr.IPNetwork('::/128'),
+ ipaddr.IPNetwork('::1/128')])
+
+ self.assertEqual(ipaddr.IPNetwork('1.0.0.0/32').Supernet(),
+ ipaddr.IPNetwork('1.0.0.0/31'))
+ self.assertEqual(ipaddr.IPNetwork('::/121').Supernet(),
+ ipaddr.IPNetwork('::/120'))
+
+ self.assertEqual(ipaddr.IPNetwork('10.0.0.02').IsRFC1918(), True)
+ self.assertEqual(ipaddr.IPNetwork('10.0.0.0').IsMulticast(), False)
+ self.assertEqual(ipaddr.IPNetwork('127.255.255.255').IsLoopback(), True)
+ self.assertEqual(ipaddr.IPNetwork('169.255.255.255').IsLinkLocal(),
+ False)
+
+ def testForceVersion(self):
+ self.assertEqual(ipaddr.IPNetwork(1).version, 4)
+ self.assertEqual(ipaddr.IPNetwork(1, version=6).version, 6)
+
+ def testWithStar(self):
+ self.assertEqual(str(self.ipv4.with_prefixlen), "1.2.3.4/24")
+ self.assertEqual(str(self.ipv4.with_netmask), "1.2.3.4/255.255.255.0")
+ self.assertEqual(str(self.ipv4.with_hostmask), "1.2.3.4/0.0.0.255")
+
+ self.assertEqual(str(self.ipv6.with_prefixlen),
+ '2001:658:22a:cafe:200::1/64')
+ # these two probably don't make much sense, but they're included for
+ # compatability with ipv4
+ self.assertEqual(str(self.ipv6.with_netmask),
+ '2001:658:22a:cafe:200::1/ffff:ffff:ffff:ffff::')
+ self.assertEqual(str(self.ipv6.with_hostmask),
+ '2001:658:22a:cafe:200::1/::ffff:ffff:ffff:ffff')
+
+ def testNetworkElementCaching(self):
+ # V4 - make sure we're empty
+ self.assertFalse(self.ipv4._cache.has_key('network'))
+ self.assertFalse(self.ipv4._cache.has_key('broadcast'))
+ self.assertFalse(self.ipv4._cache.has_key('hostmask'))
+
+ # V4 - populate and test
+ self.assertEqual(self.ipv4.network, ipaddr.IPv4Address('1.2.3.0'))
+ self.assertEqual(self.ipv4.broadcast, ipaddr.IPv4Address('1.2.3.255'))
+ self.assertEqual(self.ipv4.hostmask, ipaddr.IPv4Address('0.0.0.255'))
+
+ # V4 - check we're cached
+ self.assertTrue(self.ipv4._cache.has_key('network'))
+ self.assertTrue(self.ipv4._cache.has_key('broadcast'))
+ self.assertTrue(self.ipv4._cache.has_key('hostmask'))
+
+ # V6 - make sure we're empty
+ self.assertFalse(self.ipv6._cache.has_key('network'))
+ self.assertFalse(self.ipv6._cache.has_key('broadcast'))
+ self.assertFalse(self.ipv6._cache.has_key('hostmask'))
+
+ # V6 - populate and test
+ self.assertEqual(self.ipv6.network,
+ ipaddr.IPv6Address('2001:658:22a:cafe::'))
+ self.assertEqual(self.ipv6.broadcast, ipaddr.IPv6Address(
+ '2001:658:22a:cafe:ffff:ffff:ffff:ffff'))
+ self.assertEqual(self.ipv6.hostmask,
+ ipaddr.IPv6Address('::ffff:ffff:ffff:ffff'))
+
+ # V6 - check we're cached
+ self.assertTrue(self.ipv6._cache.has_key('network'))
+ self.assertTrue(self.ipv6._cache.has_key('broadcast'))
+ self.assertTrue(self.ipv6._cache.has_key('hostmask'))
+
+ def testIsValidIp(self):
+ ip = ipaddr.IPv6Address('::')
+ self.assertTrue(ip._is_valid_ip('2001:658:22a:cafe:200::1'))
+ self.assertTrue(ip._is_valid_ip('::ffff:10.10.0.0'))
+ self.assertTrue(ip._is_valid_ip('::ffff:192.168.0.0'))
+ self.assertFalse(ip._is_valid_ip('2001:658:22a::::1'))
+ self.assertFalse(ip._is_valid_ip(':658:22a:cafe:200::1'))
+ self.assertFalse(ip._is_valid_ip('2001:658:22a:cafe:200:'))
+ self.assertFalse(ip._is_valid_ip('2001:658:22a:cafe:200:127.0.0.1::1'))
+ self.assertFalse(ip._is_valid_ip('2001:658:22a:cafe:200::127.0.1'))
+ self.assertFalse(ip._is_valid_ip('2001:658:22a:zzzz:200::1'))
+ self.assertFalse(ip._is_valid_ip('2001:658:22a:cafe1:200::1'))
if __name__ == '__main__':
unittest.main()
diff --git a/google-appengine/lib/ipaddr/ipaddr/setup.py b/google-appengine/lib/ipaddr/ipaddr/setup.py
index 6088ced..3356432 100755
--- a/google-appengine/lib/ipaddr/ipaddr/setup.py
+++ b/google-appengine/lib/ipaddr/ipaddr/setup.py
@@ -24,6 +24,7 @@ setup(name='ipaddr',
maintainer_email='ipaddr-py-dev@googlegroups.com',
version=ipaddr.__version__,
url='http://code.google.com/p/ipaddr-py/',
+ license='Apache License, Version 2.0',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
diff --git a/google-appengine/new_project_template/main.py b/google-appengine/new_project_template/main.py
index e293aac..0e8f5ba 100755
--- a/google-appengine/new_project_template/main.py
+++ b/google-appengine/new_project_template/main.py
@@ -14,23 +14,20 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
-
-
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
class MainHandler(webapp.RequestHandler):
-
- def get(self):
- self.response.out.write('Hello world!')
+ def get(self):
+ self.response.out.write('Hello world!')
def main():
- application = webapp.WSGIApplication([('/', MainHandler)],
- debug=True)
- util.run_wsgi_app(application)
+ application = webapp.WSGIApplication([('/', MainHandler)],
+ debug=True)
+ util.run_wsgi_app(application)
if __name__ == '__main__':
- main()
+ main()