From 58718aaeb42b7d7fb5330217da552e97a4a9e829 Mon Sep 17 00:00:00 2001 From: "Jason A. Donenfeld" Date: Wed, 28 Apr 2010 05:37:57 -0400 Subject: Updated google appengine sdk. --- google-appengine/RELEASE_NOTES | 36 + google-appengine/VERSION | 4 +- .../google/appengine/api/api_base_pb.py | 0 google-appengine/google/appengine/api/appinfo.py | 21 +- .../google/appengine/api/blobstore/blobstore.py | 181 +- .../api/blobstore/blobstore_service_pb.py | 242 ++- .../appengine/api/blobstore/blobstore_stub.py | 50 +- .../api/capabilities/capability_service_pb.py | 0 google-appengine/google/appengine/api/croninfo.py | 8 +- google-appengine/google/appengine/api/datastore.py | 137 +- .../google/appengine/api/datastore_admin.py | 7 +- .../google/appengine/api/datastore_errors.py | 14 +- .../google/appengine/api/datastore_file_stub.py | 44 +- .../google/appengine/api/datastore_types.py | 43 +- google-appengine/google/appengine/api/dosinfo.py | 5 +- .../appengine/api/images/images_service_pb.py | 170 +- .../google/appengine/api/images/images_stub.py | 22 + .../appengine/api/labs/taskqueue/taskqueue.py | 254 ++- .../api/labs/taskqueue/taskqueue_service_pb.py | 424 ++++- .../appengine/api/labs/taskqueue/taskqueue_stub.py | 213 ++- google-appengine/google/appengine/api/mail.py | 12 +- .../google/appengine/api/mail_service_pb.py | 0 .../google/appengine/api/memcache/__init__.py | 33 +- .../appengine/api/memcache/memcache_service_pb.py | 0 .../appengine/api/namespace_manager/__init__.py | 70 +- .../api/namespace_manager/namespace_manager.py | 97 + google-appengine/google/appengine/api/queueinfo.py | 51 +- .../google/appengine/api/urlfetch_errors.py | 3 - .../google/appengine/api/urlfetch_service_pb.py | 94 +- .../google/appengine/api/urlfetch_stub.py | 21 +- .../google/appengine/api/user_service_pb.py | 104 +- .../google/appengine/api/validation.py | 81 +- .../google/appengine/api/xmpp/xmpp_service_pb.py | 0 .../google/appengine/base/capabilities_pb.py | 0 .../google/appengine/cron/groctimespecification.py | 17 +- .../google/appengine/datastore/datastore_pb.py | 196 +- .../appengine/datastore/datastore_sqlite_stub.py | 1501 +++++++++++++++ .../google/appengine/datastore/entity_pb.py | 0 .../appengine/datastore/sortable_pb_encoder.py | 282 +++ google-appengine/google/appengine/dist/py_imp.py | 2 +- .../google/appengine/ext/admin/__init__.py | 52 +- .../appengine/ext/admin/templates/datastore.html | 8 +- .../ext/admin/templates/datastore_edit.html | 11 + .../google/appengine/ext/appstats/datamodel_pb.py | 0 .../ext/appstats/sample_appengine_config.py | 8 +- .../appengine/ext/appstats/static/appstats_js.js | 147 +- .../google/appengine/ext/appstats/static/gantt.js | 405 ++-- .../appengine/ext/appstats/templates/details.html | 19 +- .../google/appengine/ext/blobstore/blobstore.py | 58 +- .../google/appengine/ext/bulkload/__init__.py | 422 +---- .../appengine/ext/bulkload/bulkload_deprecated.py | 359 ++++ .../google/appengine/ext/db/__init__.py | 160 +- google-appengine/google/appengine/ext/db/stats.py | 5 +- .../google/appengine/ext/gql/__init__.py | 19 +- .../google/appengine/ext/remote_api/handler.py | 2 + .../appengine/ext/remote_api/remote_api_pb.py | 0 .../appengine/ext/remote_api/remote_api_stub.py | 7 +- .../google/appengine/ext/webapp/__init__.py | 13 +- .../appengine/ext/webapp/blobstore_handlers.py | 157 +- .../google/appengine/tools/adaptive_thread_pool.py | 3 +- google-appengine/google/appengine/tools/appcfg.py | 171 +- .../google/appengine/tools/bulkloader.py | 1 + .../google/appengine/tools/dev_appserver.py | 53 +- .../google/appengine/tools/dev_appserver_main.py | 8 + .../google/appengine/tools/dev_appserver_upload.py | 24 +- .../google/appengine/tools/remote_api_shell.py | 3 +- google-appengine/lib/ipaddr/ipaddr/__init__.py | 1978 ++++++++++++-------- google-appengine/lib/ipaddr/ipaddr/ipaddr_test.py | 923 ++++++--- google-appengine/lib/ipaddr/ipaddr/setup.py | 1 + google-appengine/new_project_template/main.py | 15 +- 70 files changed, 7014 insertions(+), 2457 deletions(-) mode change 100644 => 100755 google-appengine/google/appengine/api/api_base_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/capabilities/capability_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/images/images_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/mail_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/memcache/memcache_service_pb.py create mode 100755 google-appengine/google/appengine/api/namespace_manager/namespace_manager.py mode change 100644 => 100755 google-appengine/google/appengine/api/urlfetch_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/user_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py mode change 100644 => 100755 google-appengine/google/appengine/base/capabilities_pb.py mode change 100644 => 100755 google-appengine/google/appengine/datastore/datastore_pb.py create mode 100644 google-appengine/google/appengine/datastore/datastore_sqlite_stub.py mode change 100644 => 100755 google-appengine/google/appengine/datastore/entity_pb.py create mode 100644 google-appengine/google/appengine/datastore/sortable_pb_encoder.py mode change 100644 => 100755 google-appengine/google/appengine/ext/appstats/datamodel_pb.py create mode 100755 google-appengine/google/appengine/ext/bulkload/bulkload_deprecated.py mode change 100644 => 100755 google-appengine/google/appengine/ext/remote_api/remote_api_pb.py diff --git a/google-appengine/RELEASE_NOTES b/google-appengine/RELEASE_NOTES index 185fe23..a10ffc7 100644 --- a/google-appengine/RELEASE_NOTES +++ b/google-appengine/RELEASE_NOTES @@ -3,6 +3,42 @@ All rights reserved. App Engine Python SDK - Release Notes +Version 1.3.3 +================================= +- A new experimental feature allows you to set dev_appserver datastore file + stub to use sqlite. To enable, set the flag --use_sqlite=true. +- It is now possible to implement properties on db.Expando. +- Fixed a datastore issue where an error was thrown when setting a query offset + to more than the number of results throws an error. + http://code.google.com/p/googleappengine/issues/detail?id=2875 +- Fixed issue not allowing ByteString type to be viewed in the Development + Console datastore viewer. + http://code.google.com/p/googleappengine/issues/detail?id=1176 + +Version 1.3.2 +================================= +- New API to read the contents of uploaded Blobs (fetch_data) + http://code.google.com/p/googleappengine/issues/detail?id=2536 +- URLFetch now supports accessing ports 80-90, 440-450, and 1024-65535 +- Mail API now allows common document formats as attachments + http://code.google.com/p/googleappengine/issues/detail?id=494 +- The Task Queue API now supports adding multiple tasks in a single call to + Queue.add() +- Fixed charset handling for inbound emails + http://code.google.com/p/googleappengine/issues/detail?id=2326 +- Fixed issue with compositing background colors in dev_appserver +- New feature in the datastore to specify whether to use strong or eventually + consistent reads (the default is strong) +- New datastore feature allows setting deadlines for operations +- Increased the maximum Task Queue refill rate from 20/s to 50/s +- Support for IP blacklisting to prevent denial of service (DoS) attacks +- Fix an issue with Mac Launcher in Mac OSX 10.5.5 + http://code.google.com/p/googleappengine/issues/detail?id=778 +- Fix issue with slow updates when there are many skipped files + http://code.google.com/p/googleappengine/issues/detail?id=2492 +- Fix issue with cursor not updating when using a GqlQuery + http://code.google.com/p/googleappengine/issues/detail?id=2757 + Version 1.3.1 ================================ - Datastore Query Cursors diff --git a/google-appengine/VERSION b/google-appengine/VERSION index 4df3b78..626918d 100644 --- a/google-appengine/VERSION +++ b/google-appengine/VERSION @@ -1,3 +1,3 @@ -release: "1.3.1" -timestamp: 1263355585 +release: "1.3.3" +timestamp: 1270494723 api_versions: ['1'] diff --git a/google-appengine/google/appengine/api/api_base_pb.py b/google-appengine/google/appengine/api/api_base_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/api/appinfo.py b/google-appengine/google/appengine/api/appinfo.py index 6ad6fbf..c5ce64c 100755 --- a/google-appengine/google/appengine/api/appinfo.py +++ b/google-appengine/google/appengine/api/appinfo.py @@ -37,10 +37,10 @@ from google.appengine.api import yaml_object _URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).' _FILES_REGEX = r'(?!\^).*(?!\$).' -_DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)' +_DELTA_REGEX = r'([0-9]+)([DdHhMm]|[sS]?)' _EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX) -_SERVICE_RE_STRING = r'(mail|xmpp_message)' +_SERVICE_RE_STRING = r'(mail|xmpp_message|rest)' _PAGE_NAME_REGEX = r'^.+$' @@ -56,7 +56,19 @@ APP_ID_MAX_LEN = 100 MAJOR_VERSION_ID_MAX_LEN = 100 MAX_URL_MAPS = 100 -APPLICATION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % APP_ID_MAX_LEN +PARTITION_SEPARATOR = '~' + +DOMAIN_SEPARATOR = ':' + +PARTITION_RE_STRING = (r'[a-z\d\-]{1,%d}\%s' % + (APP_ID_MAX_LEN, PARTITION_SEPARATOR)) +DOMAIN_RE_STRING = (r'(?!\-)[a-z\d\-\.]{1,%d}%s' % + (APP_ID_MAX_LEN, DOMAIN_SEPARATOR)) +DISPLAY_APP_ID_RE_STRING = (r'(?!-)[a-z\d\-]{1,%d}' % (APP_ID_MAX_LEN)) +APPLICATION_RE_STRING = (r'(?:%s)?(?:%s)?%s' % + (PARTITION_RE_STRING, + DOMAIN_RE_STRING, + DISPLAY_APP_ID_RE_STRING)) VERSION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % MAJOR_VERSION_ID_MAX_LEN RUNTIME_RE_STRING = r'[a-z]{1,30}' @@ -114,6 +126,7 @@ SKIP_FILES = 'skip_files' SERVICES = 'inbound_services' DERIVED_FILE_TYPE = 'derived_file_type' JAVA_PRECOMPILED = 'java_precompiled' +PYTHON_PRECOMPILED = 'python_precompiled' ADMIN_CONSOLE = 'admin_console' PAGES = 'pages' @@ -361,7 +374,7 @@ class AppInfoExternal(validation.Validated): DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX), SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES), DERIVED_FILE_TYPE: validation.Optional(validation.Repeated( - validation.Options(JAVA_PRECOMPILED))), + validation.Options(JAVA_PRECOMPILED, PYTHON_PRECOMPILED))), ADMIN_CONSOLE: validation.Optional(AdminConsole), } diff --git a/google-appengine/google/appengine/api/blobstore/blobstore.py b/google-appengine/google/appengine/api/blobstore/blobstore.py index f22bb78..61d5a4e 100755 --- a/google-appengine/google/appengine/api/blobstore/blobstore.py +++ b/google-appengine/google/appengine/api/blobstore/blobstore.py @@ -36,27 +36,36 @@ from google.appengine.api.blobstore import blobstore_service_pb from google.appengine.runtime import apiproxy_errors -__all__ = ['BASE_CREATION_HEADER_FORMAT', - 'BLOB_INFO_KIND', +__all__ = ['BLOB_INFO_KIND', 'BLOB_KEY_HEADER', - 'BlobKey', - 'CreationFormatError', + 'BLOB_RANGE_HEADER', + 'MAX_BLOB_FETCH_SIZE', 'UPLOAD_INFO_CREATION_HEADER', + 'BlobFetchSizeTooLargeError', + 'BlobKey', + 'BlobNotFoundError', + 'DataIndexOutOfRangeError', 'Error', 'InternalError', 'create_upload_url', 'delete', - 'parse_creation', + 'fetch_data', ] +BlobKey = datastore_types.BlobKey + + BLOB_INFO_KIND = '__BlobInfo__' BLOB_KEY_HEADER = 'X-AppEngine-BlobKey' -UPLOAD_INFO_CREATION_HEADER = 'X-AppEngine-Upload-Creation' +BLOB_RANGE_HEADER = 'X-AppEngine-BlobRange' + +MAX_BLOB_FETCH_SIZE = (1 << 20) - (1 << 15) -BASE_CREATION_HEADER_FORMAT = '%Y-%m-%d %H:%M:%S' +UPLOAD_INFO_CREATION_HEADER = 'X-AppEngine-Upload-Creation' +_BASE_CREATION_HEADER_FORMAT = '%Y-%m-%d %H:%M:%S' class Error(Exception): """Base blobstore error type.""" @@ -66,7 +75,19 @@ class InternalError(Error): """Raised when an internal error occurs within API.""" -class CreationFormatError(Error): +class BlobNotFoundError(Error): + """Raised when attempting to access blob data for non-existant blob.""" + + +class DataIndexOutOfRangeError(Error): + """Raised when attempting to access indexes out of range in wrong order.""" + + +class BlobFetchSizeTooLargeError(Error): + """Raised when attempting to fetch too large a block from a blob.""" + + +class _CreationFormatError(Error): """Raised when attempting to parse bad creation date format.""" @@ -79,6 +100,12 @@ def _ToBlobstoreError(error): error_map = { blobstore_service_pb.BlobstoreServiceError.INTERNAL_ERROR: InternalError, + blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND: + BlobNotFoundError, + blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE: + DataIndexOutOfRangeError, + blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE: + BlobFetchSizeTooLargeError, } if error.application_error in error_map: @@ -87,6 +114,68 @@ def _ToBlobstoreError(error): return error +def _format_creation(stamp): + """Format an upload creation timestamp with milliseconds. + + This method is necessary to format a timestamp with microseconds on Python + versions before 2.6. + + Cannot simply convert datetime objects to str because the microseconds are + stripped from the format when set to 0. The upload creation date format will + always have microseconds padded out to 6 places. + + Args: + stamp: datetime.datetime object to format. + + Returns: + Formatted datetime as Python 2.6 format '%Y-%m-%d %H:%M:%S.%f'. + """ + return '%s.%06d' % (stamp.strftime(_BASE_CREATION_HEADER_FORMAT), + stamp.microsecond) + + +def _parse_creation(creation_string, field_name): + """Parses upload creation string from header format. + + Parse creation date of the format: + + YYYY-mm-dd HH:MM:SS.ffffff + + Y: Year + m: Month (01-12) + d: Day (01-31) + H: Hour (00-24) + M: Minute (00-59) + S: Second (00-59) + f: Microsecond + + Args: + creation_string: String creation date format. + + Returns: + datetime object parsed from creation_string. + + Raises: + _CreationFormatError when the creation string is formatted incorrectly. + """ + split_creation_string = creation_string.split('.', 1) + if len(split_creation_string) != 2: + raise _CreationFormatError( + 'Could not parse creation %s in field %s.' % (creation_string, + field_name)) + timestamp_string, microsecond = split_creation_string + + try: + timestamp = time.strptime(timestamp_string, + _BASE_CREATION_HEADER_FORMAT) + microsecond = int(microsecond) + except ValueError: + raise _CreationFormatError('Could not parse creation %s in field %s.' + % (creation_string, field_name)) + + return datetime.datetime(*timestamp[:6] + tuple([microsecond])) + + def create_upload_url(success_path, _make_sync_call=apiproxy_stub_map.MakeSyncCall): """Create upload URL for POST form. @@ -127,47 +216,63 @@ def delete(blob_keys, _make_sync_call=apiproxy_stub_map.MakeSyncCall): raise _ToBlobstoreError(e) -def parse_creation(creation_string): - """Parses creation string from header format. - - Parse creation date of the format: +def fetch_data(blob_key, start_index, end_index, + _make_sync_call=apiproxy_stub_map.MakeSyncCall): + """Fetch data for blob. - YYYY-mm-dd HH:MM:SS.ffffff - - Y: Year - m: Month (01-12) - d: Day (01-31) - H: Hour (00-24) - M: Minute (00-59) - S: Second (00-59) - f: Microsecond + See docstring for ext.blobstore.fetch_data for more details. Args: - creation_string: String creation date format. + blob: BlobKey, str or unicode representation of BlobKey of + blob to fetch data from. + start_index: Start index of blob data to fetch. May not be negative. + end_index: End index (exclusive) of blob data to fetch. Must be + >= start_index. Returns: - datetime object parsed from creation_string. + str containing partial data of blob. See docstring for + ext.blobstore.fetch_data for more details. Raises: - CreationFormatError when the creation string is formatted incorrectly. + See docstring for ext.blobstore.fetch_data for more details. """ + if not isinstance(start_index, (int, long)): + raise TypeError('start_index must be integer.') - def split(string, by, count): - result = string.split(by, count) - if len(result) != count + 1: - raise CreationFormatError( - 'Could not parse creation %s.' % creation_string) - return result + if not isinstance(end_index, (int, long)): + raise TypeError('end_index must be integer.') - timestamp_string, microsecond = split(creation_string, '.', 1) + if isinstance(blob_key, BlobKey): + blob_key = str(blob_key).decode('utf-8') + elif isinstance(blob_key, str): + blob_key = blob_key.decode('utf-8') + elif not isinstance(blob_key, unicode): + raise TypeError('Blob-key must be str, unicode or BlobKey: %s' % blob_key) - try: - timestamp = time.strptime(timestamp_string, BASE_CREATION_HEADER_FORMAT) - microsecond = int(microsecond) - except ValueError: - raise CreationFormatError('Could not parse creation %s.' % creation_string) + if start_index < 0: + raise DataIndexOutOfRangeError( + 'May not fetch blob at negative index.') - return datetime.datetime(*timestamp[:6] + tuple([microsecond])) + if end_index < start_index: + raise DataIndexOutOfRangeError( + 'Start index %d > end index %d' % (start_index, end_index)) + fetch_size = end_index - start_index + 1 -BlobKey = datastore_types.BlobKey + if fetch_size > MAX_BLOB_FETCH_SIZE: + raise BlobFetchSizeTooLargeError( + 'Blob fetch size is too large: %d' % fetch_size) + + request = blobstore_service_pb.FetchDataRequest() + response = blobstore_service_pb.FetchDataResponse() + + request.set_blob_key(blob_key) + request.set_start_index(start_index) + request.set_end_index(end_index) + + try: + _make_sync_call('blobstore', 'FetchData', request, response) + except apiproxy_errors.ApplicationError, e: + raise _ToBlobstoreError(e) + + return response.data() diff --git a/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py b/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py old mode 100644 new mode 100755 index 6efecbb..a0f8bc4 --- a/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py +++ b/google-appengine/google/appengine/api/blobstore/blobstore_service_pb.py @@ -29,12 +29,18 @@ class BlobstoreServiceError(ProtocolBuffer.ProtocolMessage): INTERNAL_ERROR = 1 URL_TOO_LONG = 2 PERMISSION_DENIED = 3 + BLOB_NOT_FOUND = 4 + DATA_INDEX_OUT_OF_RANGE = 5 + BLOB_FETCH_SIZE_TOO_LARGE = 6 _ErrorCode_NAMES = { 0: "OK", 1: "INTERNAL_ERROR", 2: "URL_TOO_LONG", 3: "PERMISSION_DENIED", + 4: "BLOB_NOT_FOUND", + 5: "DATA_INDEX_OUT_OF_RANGE", + 6: "BLOB_FETCH_SIZE_TOO_LARGE", } def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "") @@ -348,6 +354,240 @@ class DeleteBlobRequest(ProtocolBuffer.ProtocolMessage): 1: ProtocolBuffer.Encoder.STRING, }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" +class FetchDataRequest(ProtocolBuffer.ProtocolMessage): + has_blob_key_ = 0 + blob_key_ = "" + has_start_index_ = 0 + start_index_ = 0 + has_end_index_ = 0 + end_index_ = 0 + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def blob_key(self): return self.blob_key_ + + def set_blob_key(self, x): + self.has_blob_key_ = 1 + self.blob_key_ = x + + def clear_blob_key(self): + if self.has_blob_key_: + self.has_blob_key_ = 0 + self.blob_key_ = "" + + def has_blob_key(self): return self.has_blob_key_ + + def start_index(self): return self.start_index_ + + def set_start_index(self, x): + self.has_start_index_ = 1 + self.start_index_ = x + + def clear_start_index(self): + if self.has_start_index_: + self.has_start_index_ = 0 + self.start_index_ = 0 + + def has_start_index(self): return self.has_start_index_ + + def end_index(self): return self.end_index_ + + def set_end_index(self, x): + self.has_end_index_ = 1 + self.end_index_ = x + + def clear_end_index(self): + if self.has_end_index_: + self.has_end_index_ = 0 + self.end_index_ = 0 + + def has_end_index(self): return self.has_end_index_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_blob_key()): self.set_blob_key(x.blob_key()) + if (x.has_start_index()): self.set_start_index(x.start_index()) + if (x.has_end_index()): self.set_end_index(x.end_index()) + + def Equals(self, x): + if x is self: return 1 + if self.has_blob_key_ != x.has_blob_key_: return 0 + if self.has_blob_key_ and self.blob_key_ != x.blob_key_: return 0 + if self.has_start_index_ != x.has_start_index_: return 0 + if self.has_start_index_ and self.start_index_ != x.start_index_: return 0 + if self.has_end_index_ != x.has_end_index_: return 0 + if self.has_end_index_ and self.end_index_ != x.end_index_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_blob_key_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: blob_key not set.') + if (not self.has_start_index_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: start_index not set.') + if (not self.has_end_index_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: end_index not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.blob_key_)) + n += self.lengthVarInt64(self.start_index_) + n += self.lengthVarInt64(self.end_index_) + return n + 3 + + def Clear(self): + self.clear_blob_key() + self.clear_start_index() + self.clear_end_index() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.blob_key_) + out.putVarInt32(16) + out.putVarInt64(self.start_index_) + out.putVarInt32(24) + out.putVarInt64(self.end_index_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_blob_key(d.getPrefixedString()) + continue + if tt == 16: + self.set_start_index(d.getVarInt64()) + continue + if tt == 24: + self.set_end_index(d.getVarInt64()) + continue + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_blob_key_: res+=prefix+("blob_key: %s\n" % self.DebugFormatString(self.blob_key_)) + if self.has_start_index_: res+=prefix+("start_index: %s\n" % self.DebugFormatInt64(self.start_index_)) + if self.has_end_index_: res+=prefix+("end_index: %s\n" % self.DebugFormatInt64(self.end_index_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kblob_key = 1 + kstart_index = 2 + kend_index = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "blob_key", + 2: "start_index", + 3: "end_index", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + 2: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.NUMERIC, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" +class FetchDataResponse(ProtocolBuffer.ProtocolMessage): + has_data_ = 0 + data_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def data(self): return self.data_ + + def set_data(self, x): + self.has_data_ = 1 + self.data_ = x + + def clear_data(self): + if self.has_data_: + self.has_data_ = 0 + self.data_ = "" + + def has_data(self): return self.has_data_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_data()): self.set_data(x.data()) + + def Equals(self, x): + if x is self: return 1 + if self.has_data_ != x.has_data_: return 0 + if self.has_data_ and self.data_ != x.data_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_data_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: data not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.data_)) + return n + 2 + + def Clear(self): + self.clear_data() + + def OutputUnchecked(self, out): + out.putVarInt32(8002) + out.putPrefixedString(self.data_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 8002: + self.set_data(d.getPrefixedString()) + continue + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_data_: res+=prefix+("data: %s\n" % self.DebugFormatString(self.data_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kdata = 1000 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1000: "data", + }, 1000) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1000: ProtocolBuffer.Encoder.STRING, + }, 1000, ProtocolBuffer.Encoder.MAX_TYPE) + _STYLE = """""" _STYLE_CONTENT_TYPE = """""" class DecodeBlobKeyRequest(ProtocolBuffer.ProtocolMessage): @@ -529,4 +769,4 @@ class DecodeBlobKeyResponse(ProtocolBuffer.ProtocolMessage): _STYLE = """""" _STYLE_CONTENT_TYPE = """""" -__all__ = ['BlobstoreServiceError','CreateUploadURLRequest','CreateUploadURLResponse','DeleteBlobRequest','DecodeBlobKeyRequest','DecodeBlobKeyResponse'] +__all__ = ['BlobstoreServiceError','CreateUploadURLRequest','CreateUploadURLResponse','DeleteBlobRequest','FetchDataRequest','FetchDataResponse','DecodeBlobKeyRequest','DecodeBlobKeyResponse'] diff --git a/google-appengine/google/appengine/api/blobstore/blobstore_stub.py b/google-appengine/google/appengine/api/blobstore/blobstore_stub.py index 3150bd6..3855341 100755 --- a/google-appengine/google/appengine/api/blobstore/blobstore_stub.py +++ b/google-appengine/google/appengine/api/blobstore/blobstore_stub.py @@ -34,6 +34,7 @@ from google.appengine.api import datastore from google.appengine.api import datastore_errors from google.appengine.api import datastore_types from google.appengine.api import users +from google.appengine.api import blobstore from google.appengine.api.blobstore import blobstore_service_pb from google.appengine.runtime import apiproxy_errors @@ -232,7 +233,54 @@ class BlobstoreServiceStub(apiproxy_stub.APIProxyStub): response: Not used but should be a VoidProto. """ for blob_key in request.blob_key_list(): - key = datastore_types.Key.from_path('__BlobInfo__', str(blob_key)) + key = datastore_types.Key.from_path(blobstore.BLOB_INFO_KIND, + str(blob_key)) datastore.Delete(key) self.__storage.DeleteBlob(blob_key) + + def _Dynamic_FetchData(self, request, response): + """Fetch a blob fragment from a blob by its blob-key. + + Fetches a blob fragment using its blob-key. Start index is inclusive, + end index is inclusive. Valid requests for information outside of + the range of the blob return a partial string or empty string if entirely + out of range. + + Args: + request: A fully initialized FetchDataRequest instance. + response: A FetchDataResponse instance. + + Raises: + ApplicationError when application has the following errors: + INDEX_OUT_OF_RANGE: Index is negative or end > start. + BLOB_FETCH_SIZE_TOO_LARGE: Request blob fragment is larger than + MAX_BLOB_FRAGMENT_SIZE. + BLOB_NOT_FOUND: If invalid blob-key is provided or is not found. + """ + start_index = request.start_index() + if start_index < 0: + raise apiproxy_errors.ApplicationError( + blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE) + + end_index = request.end_index() + if end_index < start_index: + raise apiproxy_errors.ApplicationError( + blobstore_service_pb.BlobstoreServiceError.DATA_INDEX_OUT_OF_RANGE) + + fetch_size = end_index - start_index + 1 + if fetch_size > blobstore.MAX_BLOB_FETCH_SIZE: + raise apiproxy_errors.ApplicationError( + blobstore_service_pb.BlobstoreServiceError.BLOB_FETCH_SIZE_TOO_LARGE) + + blob_key = request.blob_key() + blob_info_key = datastore.Key.from_path(blobstore.BLOB_INFO_KIND, blob_key) + try: + datastore.Get(blob_info_key) + except datastore_errors.EntityNotFoundError, err: + raise apiproxy_errors.ApplicationError( + blobstore_service_pb.BlobstoreServiceError.BLOB_NOT_FOUND) + + blob_file = self.__storage.OpenBlob(blob_key) + blob_file.seek(start_index) + response.set_data(blob_file.read(fetch_size)) diff --git a/google-appengine/google/appengine/api/capabilities/capability_service_pb.py b/google-appengine/google/appengine/api/capabilities/capability_service_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/api/croninfo.py b/google-appengine/google/appengine/api/croninfo.py index 0eab26e..6967d06 100755 --- a/google-appengine/google/appengine/api/croninfo.py +++ b/google-appengine/google/appengine/api/croninfo.py @@ -33,6 +33,7 @@ except ImportError: pytz = None from google.appengine.cron import groc +from google.appengine.cron import groctimespecification from google.appengine.api import validation from google.appengine.api import yaml_builder from google.appengine.api import yaml_listener @@ -46,15 +47,14 @@ _DESCRIPTION_REGEX = r'^.{0,499}$' class GrocValidator(validation.Validator): """Checks that a schedule is in valid groc format.""" - def Validate(self, value): + def Validate(self, value, key=None): """Validates a schedule.""" if value is None: raise validation.MissingAttribute('schedule must be specified') if not isinstance(value, basestring): raise TypeError('schedule must be a string, not \'%r\''%type(value)) - schedule = groc.CreateParser(value) try: - schedule.timespec() + groctimespecification.GrocTimeSpecification(value) except groc.GrocException, e: raise validation.ValidationError('schedule \'%s\' failed to parse: %s'%( value, e.args[0])) @@ -64,7 +64,7 @@ class GrocValidator(validation.Validator): class TimezoneValidator(validation.Validator): """Checks that a timezone can be correctly parsed and is known.""" - def Validate(self, value): + def Validate(self, value, key=None): """Validates a timezone.""" if value is None: return diff --git a/google-appengine/google/appengine/api/datastore.py b/google-appengine/google/appengine/api/datastore.py index bcd4aea..0d2e83e 100755 --- a/google-appengine/google/appengine/api/datastore.py +++ b/google-appengine/google/appengine/api/datastore.py @@ -81,6 +81,15 @@ _txes = {} _ALLOWED_API_KWARGS = frozenset(['rpc']) +_ALLOWED_FAILOVER_READ_METHODS = set( + ('Get', 'RunQuery', 'RunCompiledQuery', 'Count', 'Next')) + +ARBITRARY_FAILOVER_MS = -1 + +STRONG_CONSISTENCY = 0 +EVENTUAL_CONSISTENCY = 1 + + def NormalizeAndTypeCheck(arg, types): """Normalizes and type checks the given argument. @@ -178,18 +187,21 @@ def _MakeSyncCall(service, call, request, response, rpc=None): return response -def CreateRPC(service='datastore_v3', deadline=None, callback=None): +def CreateRPC(service='datastore_v3', deadline=None, callback=None, + read_policy=STRONG_CONSISTENCY): """Create an rpc for use in configuring datastore calls. Args: deadline: float, deadline for calls in seconds. callback: callable, a callback triggered when this rpc completes, accepts one argument: the returned rpc. + read_policy: flag, set to EVENTUAL_CONSISTENCY to enable eventually + consistent reads Returns: A datastore.DatastoreRPC instance. """ - return DatastoreRPC(service, deadline, callback) + return DatastoreRPC(service, deadline, callback, read_policy) class DatastoreRPC(apiproxy_stub_map.UserRPC): @@ -203,6 +215,20 @@ class DatastoreRPC(apiproxy_stub_map.UserRPC): deadline, on API calls. It will be used to make the actual call. """ + def __init__(self, service='datastore_v3', deadline=None, callback=None, + read_policy=STRONG_CONSISTENCY): + super(DatastoreRPC, self).__init__(service, deadline, callback) + self.read_policy = read_policy + + def make_call(self, call, request, response): + if self.read_policy == EVENTUAL_CONSISTENCY: + if call not in _ALLOWED_FAILOVER_READ_METHODS: + raise datastore_errors.BadRequestError( + 'read_policy is only supported on read operations.') + if call != 'Next': + request.set_failover_ms(ARBITRARY_FAILOVER_MS) + super(DatastoreRPC, self).make_call(call, request, response) + def clone(self): """Make a shallow copy of this instance. @@ -211,7 +237,8 @@ class DatastoreRPC(apiproxy_stub_map.UserRPC): developer's easy control. """ assert self.state == apiproxy_rpc.RPC.IDLE - return self.__class__(self.service, self.deadline, self.callback) + return self.__class__( + self.service, self.deadline, self.callback, self.read_policy) def Put(entities, **kwargs): @@ -368,7 +395,7 @@ class Entity(dict): provides dictionary-style access to properties. """ def __init__(self, kind, parent=None, _app=None, name=None, id=None, - unindexed_properties=[], _namespace=None): + unindexed_properties=[], namespace=None, **kwds): """Constructor. Takes the kind and transaction root, which cannot be changed after the entity is constructed, and an optional parent. Raises BadArgumentError or BadKeyError if kind is invalid or parent is not an @@ -386,12 +413,23 @@ class Entity(dict): # if provided, a sequence of property names that should not be indexed # by the built-in single property indices. unindexed_properties: list or tuple of strings + namespace: string + # if provided, overrides the default namespace_manager setting. """ ref = entity_pb.Reference() _app = datastore_types.ResolveAppId(_app) - _namespace = datastore_types.ResolveNamespace(_namespace) ref.set_app(_app) - datastore_types.SetNamespace(ref, _namespace) + + _namespace = kwds.pop('_namespace', None) + if kwds: + raise datastore_errors.BadArgumentError( + 'Excess keyword arguments ' + repr(kwds)) + + if namespace is None: + namespace = _namespace + elif _namespace is not None: + raise datastore_errors.BadArgumentError( + "Must not set both _namespace and namespace parameters.") datastore_types.ValidateString(kind, 'kind', datastore_errors.BadArgumentError) @@ -401,12 +439,17 @@ class Entity(dict): raise datastore_errors.BadArgumentError( " %s doesn't match parent's app %s" % (_app, parent.app())) - if _namespace != parent.namespace(): + if namespace is None: + namespace = parent.namespace() + elif namespace != parent.namespace(): raise datastore_errors.BadArgumentError( " %s doesn't match parent's namespace %s" % - (_namespace, parent.namespace())) + (namespace, parent.namespace())) ref.CopyFrom(parent._Key__reference) + namespace = datastore_types.ResolveNamespace(namespace) + datastore_types.SetNamespace(ref, namespace) + last_path = ref.mutable_path().add_element() last_path.set_type(kind.encode('utf-8')) @@ -699,12 +742,13 @@ class Entity(dict): unindexed_properties = [p.name() for p in pb.raw_property_list()] - namespace = pb.key().name_space() - if not namespace: - namespace = None + if pb.key().has_name_space(): + namespace = pb.key().name_space() + else: + namespace = '' e = Entity(unicode(last_path.type().decode('utf-8')), unindexed_properties=unindexed_properties, - _app=pb.key().app(), _namespace=namespace) + _app=pb.key().app(), namespace=namespace) ref = e.__key._Key__reference ref.CopyFrom(pb.key()) @@ -716,8 +760,8 @@ class Entity(dict): value = datastore_types.FromPropertyPb(prop) except (AssertionError, AttributeError, TypeError, ValueError), e: raise datastore_errors.Error( - 'Property %s is corrupt in the datastore. %s: %s' % - (e.__class__, prop.name(), e)) + 'Property %s is corrupt in the datastore:\n%s' % + (prop.name(), traceback.format_exc())) multiple = prop.multiple() if multiple: @@ -727,7 +771,7 @@ class Entity(dict): cur_value = temporary_values.get(name) if cur_value is None: temporary_values[name] = value - elif not multiple: + elif not multiple or not isinstance(cur_value, list): raise datastore_errors.Error( 'Property %s is corrupt in the datastore; it has multiple ' 'values, but is not marked as multiply valued.' % name) @@ -858,7 +902,7 @@ class Query(dict): __inequality_count = 0 def __init__(self, kind=None, filters={}, _app=None, keys_only=False, - compile=True, cursor=None, _namespace=None): + compile=True, cursor=None, namespace=None, **kwds): """Constructor. Raises BadArgumentError if kind is not a string. Raises BadValueError or @@ -870,7 +914,20 @@ class Query(dict): kind: string filters: dict keys_only: boolean + namespace: string """ + + _namespace = kwds.pop('_namespace', None) + if kwds: + raise datastore_errors.BadArgumentError( + 'Excess keyword arguments ' + repr(kwds)) + + if namespace is None: + namespace = _namespace + elif _namespace is not None: + raise datastore_errors.BadArgumentError( + "Must not set both _namespace and namespace parameters.") + if kind is not None: datastore_types.ValidateString(kind, 'kind', datastore_errors.BadArgumentError) @@ -881,7 +938,7 @@ class Query(dict): self.update(filters) self.__app = datastore_types.ResolveAppId(_app) - self.__namespace = datastore_types.ResolveNamespace(_namespace) + self.__namespace = datastore_types.ResolveNamespace(namespace) self.__keys_only = keys_only self.__compile = compile self.__cursor = cursor @@ -2172,9 +2229,8 @@ def RunInTransactionCustomRetries(retries, function, *args, **kwargs): _MakeSyncCall('datastore_v3', 'Rollback', tx.handle, api_base_pb.VoidProto()) except: - exc_info = sys.exc_info() logging.info('Exception sending Rollback:\n' + - ''.join(traceback.format_exception(*exc_info))) + traceback.format_exc()) type, value, trace = original_exception if type is datastore_errors.Rollback: @@ -2410,16 +2466,35 @@ def _ToDatastoreError(err): Returns: a subclass of datastore_errors.Error """ - errors = { - datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError, - datastore_pb.Error.CONCURRENT_TRANSACTION: - datastore_errors.TransactionFailedError, - datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError, - datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError, - datastore_pb.Error.TIMEOUT: datastore_errors.Timeout, - } - - if err.application_error in errors: - return errors[err.application_error](err.error_detail) + return _DatastoreExceptionFromErrorCodeAndDetail(err.application_error, + err.error_detail) + + +def _DatastoreExceptionFromErrorCodeAndDetail(error, detail): + """Converts a datastore_pb.Error into a datastore_errors.Error. + + Args: + error: A member of the datastore_pb.Error enumeration. + detail: A string providing extra details about the error. + + Returns: + A subclass of datastore_errors.Error. + """ + exception_class = { + datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError, + datastore_pb.Error.CONCURRENT_TRANSACTION: + datastore_errors.TransactionFailedError, + datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError, + datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError, + datastore_pb.Error.TIMEOUT: datastore_errors.Timeout, + datastore_pb.Error.BIGTABLE_ERROR: datastore_errors.Timeout, + datastore_pb.Error.COMMITTED_BUT_STILL_APPLYING: + datastore_errors.CommittedButStillApplying, + datastore_pb.Error.CAPABILITY_DISABLED: + apiproxy_errors.CapabilityDisabledError, + }.get(error, datastore_errors.Error) + + if detail is None: + return exception_class() else: - return datastore_errors.Error(err.error_detail) + return exception_class(detail) diff --git a/google-appengine/google/appengine/api/datastore_admin.py b/google-appengine/google/appengine/api/datastore_admin.py index 4b26081..d6a80d4 100755 --- a/google-appengine/google/appengine/api/datastore_admin.py +++ b/google-appengine/google/appengine/api/datastore_admin.py @@ -29,7 +29,8 @@ from google.appengine.datastore import datastore_index from google.appengine.datastore import datastore_pb from google.appengine.runtime import apiproxy_errors -def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None): +def GetSchema(_app=None, namespace=None, properties=True, start_kind=None, + end_kind=None): """Infers an app's schema from the entities in the datastore. Note that the PropertyValue PBs in the returned EntityProtos are empty @@ -42,12 +43,16 @@ def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None): properties: boolean, whether to include property names and types start_kind, end_kind: optional range endpoints for the kinds to return, compared lexicographically + namespace: string, specified namespace of schema to be fetched Returns: list of entity_pb.EntityProto, with kind and property names and types """ req = datastore_pb.GetSchemaRequest() req.set_app(datastore_types.ResolveAppId(_app)) + namespace = datastore_types.ResolveNamespace(namespace) + if namespace: + req.set_name_space(namespace) req.set_properties(properties) if start_kind is not None: req.set_start_kind(start_kind) diff --git a/google-appengine/google/appengine/api/datastore_errors.py b/google-appengine/google/appengine/api/datastore_errors.py index ff53ba2..f1acdf3 100755 --- a/google-appengine/google/appengine/api/datastore_errors.py +++ b/google-appengine/google/appengine/api/datastore_errors.py @@ -99,7 +99,15 @@ class NeedIndexError(Error): """ class Timeout(Error): - """The datastore operation timed out. This can happen when you attempt to - put, get, or delete too many entities or an entity with too many properties, - or if the datastore is overloaded or having trouble. + """The datastore operation timed out, or the data was temporarily + unavailable. This can happen when you attempt to put, get, or delete too + many entities or an entity with too many properties, or if the datastore is + overloaded or having trouble. + """ + +class CommittedButStillApplying(Timeout): + """The write or transaction was committed, but some entities or index rows + may not have been fully updated. Those updates should automatically be + applied soon. You can roll them forward immediately by reading one of the + entities inside a transaction. """ diff --git a/google-appengine/google/appengine/api/datastore_file_stub.py b/google-appengine/google/appengine/api/datastore_file_stub.py index 611624b..f0a809b 100755 --- a/google-appengine/google/appengine/api/datastore_file_stub.py +++ b/google-appengine/google/appengine/api/datastore_file_stub.py @@ -162,7 +162,7 @@ class _Cursor(object): offset += query.offset() if offset > 0: - self.__last_result = results[offset - 1] + self.__last_result = results[min(len(results), offset) - 1] else: self.__last_result = cursor_entity @@ -208,7 +208,7 @@ class _Cursor(object): while lo < hi: mid = (lo + hi) // 2 if compare(results[mid], cursor_entity) < 0: - lo = mid + 1 + lo = mid + 1 else: hi = mid else: @@ -313,8 +313,6 @@ class _Cursor(object): self.__last_result.ToPb().Encode())) position.set_start_key(str(start_key)) position.set_start_inclusive(False) - elif self.__query.has_compiled_cursor: - compiled_cursor.CopyFrom(self.__query.compiled_cursor()) def PopulateQueryResult(self, result, count, compile=False): """Populates a QueryResult with this cursor and the given number of results. @@ -342,7 +340,8 @@ class _Cursor(object): result.set_more_results(self.__offset < self.count) if compile: - self._EncodeCompiledCursor(self.__query, result.mutable_compiled_cursor()) + self._EncodeCompiledCursor( + self.__query, result.mutable_compiled_cursor()) class DatastoreFileStub(apiproxy_stub.APIProxyStub): @@ -1063,18 +1062,30 @@ class DatastoreFileStub(apiproxy_stub.APIProxyStub): self.__tx_snapshot = dict(snapshot) self.__tx_actions = [] - def _Dynamic_AddAction(self, request, void): - self.__ValidateTransaction(request.transaction()) + def _Dynamic_AddActions(self, request, _): + """Associates the creation of one or more tasks with a transaction. + + Args: + request: A taskqueue_service_pb.TaskQueueBulkAddRequest containing the + tasks that should be created when the transaction is comitted. + """ + - if len(self.__tx_actions) >= _MAX_ACTIONS_PER_TXN: + if ((len(self.__tx_actions) + request.add_request_size()) > + _MAX_ACTIONS_PER_TXN): raise apiproxy_errors.ApplicationError( datastore_pb.Error.BAD_REQUEST, 'Too many messages, maximum allowed %s' % _MAX_ACTIONS_PER_TXN) - clone = taskqueue_service_pb.TaskQueueAddRequest() - clone.CopyFrom(request) - clone.clear_transaction() - self.__tx_actions.append(clone) + new_actions = [] + for add_request in request.add_request_list(): + self.__ValidateTransaction(add_request.transaction()) + clone = taskqueue_service_pb.TaskQueueAddRequest() + clone.CopyFrom(add_request) + clone.clear_transaction() + new_actions.append(clone) + + self.__tx_actions.extend(new_actions) def _Dynamic_Commit(self, transaction, transaction_response): self.__ValidateTransaction(transaction) @@ -1108,15 +1119,18 @@ class DatastoreFileStub(apiproxy_stub.APIProxyStub): app_str = req.app() self.__ValidateAppId(app_str) + namespace_str = req.name_space() + app_namespace_str = datastore_types.EncodeAppIdNamespace(app_str, + namespace_str) kinds = [] - for app, kind in self.__entities: - if (app != app_str or + for app_namespace, kind in self.__entities: + if (app_namespace != app_namespace_str or (req.has_start_kind() and kind < req.start_kind()) or (req.has_end_kind() and kind > req.end_kind())): continue - app_kind = (app, kind) + app_kind = (app_namespace_str, kind) if app_kind in self.__schema_cache: kinds.append(self.__schema_cache[app_kind]) continue diff --git a/google-appengine/google/appengine/api/datastore_types.py b/google-appengine/google/appengine/api/datastore_types.py index c7a7c20..9c80104 100755 --- a/google-appengine/google/appengine/api/datastore_types.py +++ b/google-appengine/google/appengine/api/datastore_types.py @@ -175,10 +175,11 @@ def ResolveNamespace(namespace): Raises: BadArgumentError if the value is not a string. """ - if not namespace: - namespace = namespace_manager.get_namespace(); - ValidateString( - namespace, 'namespace', datastore_errors.BadArgumentError, empty_ok=True) + if namespace is None: + namespace = namespace_manager.get_namespace() + else: + namespace_manager.validate_namespace( + namespace, datastore_errors.BadArgumentError) return namespace @@ -215,9 +216,9 @@ def PartitionString(value, separator): value: String to be partitioned separator: Separator string """ - index = value.find(separator); + index = value.find(separator) if index == -1: - return (value, '', value[0:0]); + return (value, '', value[0:0]) else: return (value[0:index], separator, value[index+len(separator):len(value)]) @@ -312,12 +313,9 @@ class Key(object): Args: kind: the entity kind (a str or unicode instance) id_or_name: the id (an int or long) or name (a str or unicode instance) - - Additional positional arguments are allowed and should be - alternating kind and id/name. - - Keyword args: parent: optional parent Key; default None. + namespace: optional namespace to use otherwise namespace_manager's + default namespace is used. Returns: A new Key instance whose .kind() and .id() or .name() methods return @@ -329,7 +327,8 @@ class Key(object): """ parent = kwds.pop('parent', None) app_id = ResolveAppId(kwds.pop('_app', None)) - namespace = ResolveNamespace(kwds.pop('namespace', None)) + + namespace = kwds.pop('namespace', None) if kwds: raise datastore_errors.BadArgumentError( @@ -345,14 +344,18 @@ class Key(object): raise datastore_errors.BadArgumentError( 'Expected None or a Key as parent; received %r (a %s).' % (parent, typename(parent))) + if namespace is None: + namespace = parent.namespace() if not parent.has_id_or_name(): raise datastore_errors.BadKeyError( 'The parent Key is incomplete.') if app_id != parent.app() or namespace != parent.namespace(): raise datastore_errors.BadArgumentError( - 'The app/namespace arguments (%r) should match ' + - 'parent.app/namespace() (%s)' % - ((app_id, namespace), (parent.app(), parent.namespace()))) + 'The app/namespace arguments (%s/%s) should match ' + 'parent.app/namespace() (%s/%s)' % + (app_id, namespace, parent.app(), parent.namespace())) + + namespace = ResolveNamespace(namespace) key = Key() ref = key.__reference @@ -394,11 +397,11 @@ class Key(object): return None def namespace(self): - """Returns this entity's app id, a string.""" + """Returns this entity's namespace, a string.""" if self.__reference.has_name_space(): return self.__reference.name_space().decode('utf-8') else: - return None + return '' def kind(self): """Returns this entity's kind, as a string.""" @@ -581,7 +584,7 @@ class Key(object): args.append('_app=%r' % self.__reference.app().decode('utf-8')) if self.__reference.has_name_space(): - args.append('_namespace=%r' % + args.append('namespace=%r' % self.__reference.name_space().decode('utf-8')) return u'datastore_types.Key.from_path(%s)' % ', '.join(args) @@ -602,10 +605,10 @@ class Key(object): if not isinstance(other, Key): return -2 - self_args = [self.__reference.app()] + self_args = [self.__reference.app(), self.__reference.name_space()] self_args += self.to_path(_default_id=0) - other_args = [other.__reference.app()] + other_args = [other.__reference.app(), other.__reference.name_space()] other_args += other.to_path(_default_id=0) for self_component, other_component in zip(self_args, other_args): diff --git a/google-appengine/google/appengine/api/dosinfo.py b/google-appengine/google/appengine/api/dosinfo.py index 6fefeb7..c1c01ae 100755 --- a/google-appengine/google/appengine/api/dosinfo.py +++ b/google-appengine/google/appengine/api/dosinfo.py @@ -21,6 +21,7 @@ Library for parsing dos.yaml files and working with these in memory. """ + import google import ipaddr @@ -39,12 +40,12 @@ SUBNET = 'subnet' class SubnetValidator(validation.Validator): """Checks that a subnet can be parsed and is a valid IPv4 or IPv6 subnet.""" - def Validate(self, value): + def Validate(self, value, key=None): """Validates a subnet.""" if value is None: raise validation.MissingAttribute('subnet must be specified') try: - ipaddr.IP(value) + ipaddr.IPNetwork(value) except ValueError: raise validation.ValidationError('%s is not a valid IPv4 or IPv6 subnet' % value) diff --git a/google-appengine/google/appengine/api/images/images_service_pb.py b/google-appengine/google/appengine/api/images/images_service_pb.py old mode 100644 new mode 100755 index f5ecb31..62bae52 --- a/google-appengine/google/appengine/api/images/images_service_pb.py +++ b/google-appengine/google/appengine/api/images/images_service_pb.py @@ -2014,7 +2014,175 @@ class ImagesHistogramResponse(ProtocolBuffer.ProtocolMessage): 1: ProtocolBuffer.Encoder.STRING, }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" +class ImagesGetUrlBaseRequest(ProtocolBuffer.ProtocolMessage): + has_blob_key_ = 0 + blob_key_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def blob_key(self): return self.blob_key_ + + def set_blob_key(self, x): + self.has_blob_key_ = 1 + self.blob_key_ = x + + def clear_blob_key(self): + if self.has_blob_key_: + self.has_blob_key_ = 0 + self.blob_key_ = "" + + def has_blob_key(self): return self.has_blob_key_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_blob_key()): self.set_blob_key(x.blob_key()) + + def Equals(self, x): + if x is self: return 1 + if self.has_blob_key_ != x.has_blob_key_: return 0 + if self.has_blob_key_ and self.blob_key_ != x.blob_key_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_blob_key_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: blob_key not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.blob_key_)) + return n + 1 + + def Clear(self): + self.clear_blob_key() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.blob_key_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_blob_key(d.getPrefixedString()) + continue + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_blob_key_: res+=prefix+("blob_key: %s\n" % self.DebugFormatString(self.blob_key_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kblob_key = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "blob_key", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" +class ImagesGetUrlBaseResponse(ProtocolBuffer.ProtocolMessage): + has_url_ = 0 + url_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def url(self): return self.url_ + + def set_url(self, x): + self.has_url_ = 1 + self.url_ = x + + def clear_url(self): + if self.has_url_: + self.has_url_ = 0 + self.url_ = "" + + def has_url(self): return self.has_url_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_url()): self.set_url(x.url()) + + def Equals(self, x): + if x is self: return 1 + if self.has_url_ != x.has_url_: return 0 + if self.has_url_ and self.url_ != x.url_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_url_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: url not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthString(len(self.url_)) + return n + 1 + + def Clear(self): + self.clear_url() + + def OutputUnchecked(self, out): + out.putVarInt32(10) + out.putPrefixedString(self.url_) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + self.set_url(d.getPrefixedString()) + continue + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_)) + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kurl = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "url", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + _STYLE = """""" _STYLE_CONTENT_TYPE = """""" -__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','OutputSettings','ImagesTransformRequest','ImagesTransformResponse','CompositeImageOptions','ImagesCanvas','ImagesCompositeRequest','ImagesCompositeResponse','ImagesHistogramRequest','ImagesHistogram','ImagesHistogramResponse'] +__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','OutputSettings','ImagesTransformRequest','ImagesTransformResponse','CompositeImageOptions','ImagesCanvas','ImagesCompositeRequest','ImagesCompositeResponse','ImagesHistogramRequest','ImagesHistogram','ImagesHistogramResponse','ImagesGetUrlBaseRequest','ImagesGetUrlBaseResponse'] diff --git a/google-appengine/google/appengine/api/images/images_stub.py b/google-appengine/google/appengine/api/images/images_stub.py index a9b0b4d..be7ceab 100755 --- a/google-appengine/google/appengine/api/images/images_stub.py +++ b/google-appengine/google/appengine/api/images/images_stub.py @@ -57,6 +57,27 @@ def _ArgbToRgbaTuple(argb): (unsigned_argb >> 24) & 0xFF) +def _BackendPremultiplication(color): + """Apply premultiplication and unpremultiplication to match production. + + Args: + color: color tuple as returned by _ArgbToRgbaTuple. + + Returns: + RGBA tuple. + """ + alpha = color[3] + rgb = color[0:3] + multiplied = [(x * (alpha + 1)) >> 8 for x in rgb] + if alpha: + alpha_inverse = 0xffffff / alpha + unmultiplied = [(x * alpha_inverse) >> 16 for x in multiplied] + else: + unmultiplied = [0] * 3 + + return tuple(unmultiplied + [alpha]) + + class ImagesServiceStub(apiproxy_stub.APIProxyStub): """Stub version of images API to be used with the dev_appserver.""" @@ -82,6 +103,7 @@ class ImagesServiceStub(apiproxy_stub.APIProxyStub): width = request.canvas().width() height = request.canvas().height() color = _ArgbToRgbaTuple(request.canvas().color()) + color = _BackendPremultiplication(color) canvas = Image.new("RGBA", (width, height), color) sources = [] if (not request.canvas().width() or request.canvas().width() > 4000 or diff --git a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py index 213e3f8..fc2774e 100755 --- a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py +++ b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue.py @@ -30,6 +30,7 @@ base path. A default queue is also provided for simple usage. import datetime +import os import re import time import urllib @@ -109,10 +110,22 @@ class PermissionDeniedError(Error): """The requested operation is not allowed for this app.""" +class DuplicateTaskNameError(Error): + """The add arguments contain tasks with identical names.""" + + +class TooManyTasksError(Error): + """Too many tasks were present in a single function call.""" + + class DatastoreError(Error): """There was a datastore error while accessing the queue.""" +class BadTransactionState(Error): + """The state of the current transaction does not permit this operation.""" + + MAX_QUEUE_NAME_LENGTH = 100 MAX_TASK_NAME_LENGTH = 500 @@ -145,6 +158,42 @@ _QUEUE_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_QUEUE_NAME_LENGTH _QUEUE_NAME_RE = re.compile(_QUEUE_NAME_PATTERN) +_ERROR_MAPPING = { + taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE: UnknownQueueError, + taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR: + TransientError, + taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR: InternalError, + taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE: + TaskTooLargeError, + taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME: + InvalidTaskNameError, + taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME: + InvalidQueueNameError, + taskqueue_service_pb.TaskQueueServiceError.INVALID_URL: InvalidUrlError, + taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE: + InvalidQueueError, + taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED: + PermissionDeniedError, + taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS: + TaskAlreadyExistsError, + taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK: + TombstonedTaskError, + taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA: InvalidTaskError, + taskqueue_service_pb.TaskQueueServiceError.INVALID_REQUEST: Error, + taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_TASK: Error, + taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE: Error, + taskqueue_service_pb.TaskQueueServiceError.DUPLICATE_TASK_NAME: + DuplicateTaskNameError, + + taskqueue_service_pb.TaskQueueServiceError.TOO_MANY_TASKS: + TooManyTasksError, + +} + +_PRESERVE_ENVIRONMENT_HEADERS = ( + ('X-AppEngine-Default-Namespace', 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'), + ('X-AppEngine-Current-Namespace', 'HTTP_X_APPENGINE_CURRENT_NAMESPACE')) + class _UTCTimeZone(datetime.tzinfo): """UTC timezone.""" @@ -284,6 +333,11 @@ class Task(object): self.__payload = None params = kwargs.get('params', {}) + for header_name, environ_name in _PRESERVE_ENVIRONMENT_HEADERS: + value = os.environ.get(environ_name) + if value is not None: + self.__headers.setdefault(header_name, value) + if query and params: raise InvalidTaskError('Query string and parameters both present; ' 'only one of these may be supplied') @@ -509,65 +563,140 @@ class Queue(object): self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name) def add(self, task, transactional=False): - """Adds a Task to this Queue. + """Adds a Task or list of Tasks to this Queue. + + If a list of more than one Tasks is given, a raised exception does not + guarantee that no tasks were added to the queue (unless transactional is set + to True). To determine which tasks were successfully added when an exception + is raised, check the Task.was_enqueued property. Args: - task: The Task to add. - transactional: If false adds the task to a queue irrespectively to the - enclosing transaction success or failure. (optional) + task: A Task instance or a list of Task instances that will added to the + queue. + transactional: If False adds the Task(s) to a queue irrespectively to the + enclosing transaction success or failure. An exception is raised if True + and called outside of a transaction. (optional) Returns: - The Task that was supplied to this method. + The Task or list of tasks that was supplied to this method. Raises: - BadTaskStateError if the Task has already been added to a queue. + BadTaskStateError: if the Task(s) has already been added to a queue. + BadTransactionState: if the transactional argument is true but this call + is being made outside of the context of a transaction. Error-subclass on application errors. """ + try: + tasks = list(iter(task)) + except TypeError: + tasks = [task] + multiple = False + else: + multiple = True + + self.__AddTasks(tasks, transactional) + + if multiple: + return tasks + else: + assert len(tasks) == 1 + return tasks[0] + + def __AddTasks(self, tasks, transactional): + """Internal implementation of .add() where tasks must be a list.""" + + request = taskqueue_service_pb.TaskQueueBulkAddRequest() + response = taskqueue_service_pb.TaskQueueBulkAddResponse() + + task_names = set() + for task in tasks: + if task.name: + if task.name in task_names: + raise DuplicateTaskNameError( + 'The task name %r is used more than once in the request' % + task.name) + task_names.add(task.name) + + self.__FillAddRequest(task, request.add_add_request(), transactional) + + try: + apiproxy_stub_map.MakeSyncCall('taskqueue', 'BulkAdd', request, response) + except apiproxy_errors.ApplicationError, e: + raise self.__TranslateError(e.application_error, e.error_detail) + + assert response.taskresult_size() == len(tasks), ( + 'expected %d results from BulkAdd(), got %d' % ( + len(tasks), response.taskresult_size())) + + exception = None + for task, task_result in zip(tasks, response.taskresult_list()): + if task_result.result() == taskqueue_service_pb.TaskQueueServiceError.OK: + if task_result.has_chosen_task_name(): + task._Task__name = task_result.chosen_task_name() + task._Task__enqueued = True + elif (task_result.result() == + taskqueue_service_pb.TaskQueueServiceError.SKIPPED): + pass + elif exception is None: + exception = self.__TranslateError(task_result.result()) + + if exception is not None: + raise exception + + return tasks + + def __FillAddRequest(self, task, task_request, transactional): + """Populates a TaskQueueAddRequest with the data from a Task instance. + + Args: + task: The Task instance to use as a source for the data to be added to + task_request. + task_request: The taskqueue_service_pb.TaskQueueAddRequest to populate. + transactional: If true then populates the task_request.transaction message + with information from the enclosing transaction (if any). + + Raises: + BadTaskStateError: If the task was already added to a Queue. + BadTransactionState: If the transactional argument is True and there is no + enclosing transaction. + InvalidTaskNameError: If the transactional argument is True and the task + is named. + """ if task.was_enqueued: raise BadTaskStateError('Task has already been enqueued') - request = taskqueue_service_pb.TaskQueueAddRequest() - response = taskqueue_service_pb.TaskQueueAddResponse() - adjusted_url = task.url if task.on_queue_url: adjusted_url = self.__url + task.url - request.set_queue_name(self.__name) - request.set_eta_usec(int(time.mktime(task.eta.utctimetuple())) * 10**6) - request.set_method(_METHOD_MAP.get(task.method)) - request.set_url(adjusted_url) + task_request.set_queue_name(self.__name) + task_request.set_eta_usec( + int(time.mktime(task.eta.utctimetuple())) * 10**6) + task_request.set_method(_METHOD_MAP.get(task.method)) + task_request.set_url(adjusted_url) if task.name: - request.set_task_name(task.name) + task_request.set_task_name(task.name) else: - request.set_task_name('') + task_request.set_task_name('') if task.payload: - request.set_body(task.payload) + task_request.set_body(task.payload) for key, value in _flatten_params(task.headers): - header = request.add_header() + header = task_request.add_header() header.set_key(key) header.set_value(value) if transactional: from google.appengine.api import datastore - datastore._MaybeSetupTransaction(request, []) - - if request.has_transaction() and task.name: - raise InvalidTaskNameError('Task bound to a transaction cannot be named.') - - call_tuple = ('taskqueue', 'Add', request, response) - try: - apiproxy_stub_map.MakeSyncCall(*call_tuple) - except apiproxy_errors.ApplicationError, e: - self.__TranslateError(e) + if not datastore._MaybeSetupTransaction(task_request, []): + raise BadTransactionState( + 'Transactional adds are not allowed outside of transactions') - if response.has_chosen_task_name(): - task._Task__name = response.chosen_task_name() - task._Task__enqueued = True - return task + if task_request.has_transaction() and task.name: + raise InvalidTaskNameError( + 'Task bound to a transaction cannot be named.') @property def name(self): @@ -575,70 +704,37 @@ class Queue(object): return self.__name @staticmethod - def __TranslateError(error): + def __TranslateError(error, detail=''): """Translates a TaskQueueServiceError into an exception. Args: error: Value from TaskQueueServiceError enum. + detail: A human-readable description of the error. - Raises: + Returns: The corresponding Exception sub-class for that error code. """ - if (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE): - raise UnknownQueueError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR): - raise TransientError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR): - raise InternalError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE): - raise TaskTooLargeError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME): - raise InvalidTaskNameError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME): - raise InvalidQueueNameError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.INVALID_URL): - raise InvalidUrlError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE): - raise InvalidQueueError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED): - raise PermissionDeniedError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS): - raise TaskAlreadyExistsError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK): - raise TombstonedTaskError(error.error_detail) - elif (error.application_error == - taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA): - raise InvalidTaskError(error.error_detail) - elif ((error.application_error >= - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR) and - isinstance(error.application_error, int)): + if (error >= taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR + and isinstance(error, int)): from google.appengine.api import datastore - error.application_error = (error.application_error - - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR) - datastore_exception = datastore._ToDatastoreError(error) + datastore_exception = datastore._DatastoreExceptionFromErrorCodeAndDetail( + error - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR, + detail) class JointException(datastore_exception.__class__, DatastoreError): """There was a datastore error while accessing the queue.""" __msg = (u'taskqueue.DatastoreError caused by: %s %s' % - (datastore_exception.__class__, error.error_detail)) + (datastore_exception.__class__, detail)) def __str__(self): return JointException.__msg - raise JointException + return JointException() else: - raise Error('Application error %s: %s' % - (error.application_error, error.error_detail)) + exception_class = _ERROR_MAPPING.get(error, None) + if exception_class: + return exception_class(detail) + else: + return Error('Application error %s: %s' % (error, detail)) def add(*args, **kwargs): diff --git a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py old mode 100644 new mode 100755 index 4fe5c89..af28df3 --- a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py +++ b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py @@ -23,6 +23,7 @@ __pychecker__ = """maxreturns=0 maxbranches=0 no-callinit unusednames=printElemNumber,debug_strs no-special""" from google.appengine.datastore.datastore_v3_pb import * +from google.net.proto.message_set import MessageSet class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage): OK = 0 @@ -41,6 +42,10 @@ class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage): INVALID_REQUEST = 13 UNKNOWN_TASK = 14 TOMBSTONED_QUEUE = 15 + DUPLICATE_TASK_NAME = 16 + SKIPPED = 17 + TOO_MANY_TASKS = 18 + INVALID_PAYLOAD = 19 DATASTORE_ERROR = 10000 _ErrorCode_NAMES = { @@ -60,6 +65,10 @@ class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage): 13: "INVALID_REQUEST", 14: "UNKNOWN_TASK", 15: "TOMBSTONED_QUEUE", + 16: "DUPLICATE_TASK_NAME", + 17: "SKIPPED", + 18: "TOO_MANY_TASKS", + 19: "INVALID_PAYLOAD", 10000: "DATASTORE_ERROR", } @@ -352,6 +361,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): crontimetable_ = None has_description_ = 0 description_ = "" + has_payload_ = 0 + payload_ = None def __init__(self, contents=None): self.header_ = [] @@ -514,6 +525,24 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): def has_description(self): return self.has_description_ + def payload(self): + if self.payload_ is None: + self.lazy_init_lock_.acquire() + try: + if self.payload_ is None: self.payload_ = MessageSet() + finally: + self.lazy_init_lock_.release() + return self.payload_ + + def mutable_payload(self): self.has_payload_ = 1; return self.payload() + + def clear_payload(self): + if self.has_payload_: + self.has_payload_ = 0; + if self.payload_ is not None: self.payload_.Clear() + + def has_payload(self): return self.has_payload_ + def MergeFrom(self, x): assert x is not self @@ -528,6 +557,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): if (x.has_app_id()): self.set_app_id(x.app_id()) if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable()) if (x.has_description()): self.set_description(x.description()) + if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload()) def Equals(self, x): if x is self: return 1 @@ -554,6 +584,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): if self.has_crontimetable_ and self.crontimetable_ != x.crontimetable_: return 0 if self.has_description_ != x.has_description_: return 0 if self.has_description_ and self.description_ != x.description_: return 0 + if self.has_payload_ != x.has_payload_: return 0 + if self.has_payload_ and self.payload_ != x.payload_: return 0 return 1 def IsInitialized(self, debug_strs=None): @@ -570,14 +602,11 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: eta_usec not set.') - if (not self.has_url_): - initialized = 0 - if debug_strs is not None: - debug_strs.append('Required field: url not set.') for p in self.header_: if not p.IsInitialized(debug_strs): initialized=0 if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0 if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0 + if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): @@ -586,7 +615,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): n += self.lengthString(len(self.task_name_)) n += self.lengthVarInt64(self.eta_usec_) if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_) - n += self.lengthString(len(self.url_)) + if (self.has_url_): n += 1 + self.lengthString(len(self.url_)) n += 2 * len(self.header_) for i in xrange(len(self.header_)): n += self.header_[i].ByteSize() if (self.has_body_): n += 1 + self.lengthString(len(self.body_)) @@ -594,7 +623,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_)) if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize() if (self.has_description_): n += 1 + self.lengthString(len(self.description_)) - return n + 4 + if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize()) + return n + 3 def Clear(self): self.clear_queue_name() @@ -608,6 +638,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): self.clear_app_id() self.clear_crontimetable() self.clear_description() + self.clear_payload() def OutputUnchecked(self, out): out.putVarInt32(10) @@ -616,8 +647,9 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): out.putPrefixedString(self.task_name_) out.putVarInt32(24) out.putVarInt64(self.eta_usec_) - out.putVarInt32(34) - out.putPrefixedString(self.url_) + if (self.has_url_): + out.putVarInt32(34) + out.putPrefixedString(self.url_) if (self.has_method_): out.putVarInt32(40) out.putVarInt32(self.method_) @@ -642,6 +674,10 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): if (self.has_description_): out.putVarInt32(122) out.putPrefixedString(self.description_) + if (self.has_payload_): + out.putVarInt32(130) + out.putVarInt32(self.payload_.ByteSize()) + self.payload_.OutputUnchecked(out) def TryMerge(self, d): while d.avail() > 0: @@ -682,6 +718,12 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): if tt == 122: self.set_description(d.getPrefixedString()) continue + if tt == 130: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_payload().TryMerge(tmp) + continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) @@ -712,6 +754,10 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): res+=self.crontimetable_.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_)) + if self.has_payload_: + res+=prefix+"payload <\n" + res+=self.payload_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" return res @@ -733,6 +779,7 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): kCronTimetableschedule = 13 kCronTimetabletimezone = 14 kdescription = 15 + kpayload = 16 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", @@ -751,7 +798,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): 13: "schedule", 14: "timezone", 15: "description", - }, 15) + 16: "payload", + }, 16) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, @@ -770,7 +818,8 @@ class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage): 13: ProtocolBuffer.Encoder.STRING, 14: ProtocolBuffer.Encoder.STRING, 15: ProtocolBuffer.Encoder.STRING, - }, 15, ProtocolBuffer.Encoder.MAX_TYPE) + 16: ProtocolBuffer.Encoder.STRING, + }, 16, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" @@ -853,6 +902,299 @@ class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage): 1: ProtocolBuffer.Encoder.STRING, }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" +class TaskQueueBulkAddRequest(ProtocolBuffer.ProtocolMessage): + + def __init__(self, contents=None): + self.add_request_ = [] + if contents is not None: self.MergeFromString(contents) + + def add_request_size(self): return len(self.add_request_) + def add_request_list(self): return self.add_request_ + + def add_request(self, i): + return self.add_request_[i] + + def mutable_add_request(self, i): + return self.add_request_[i] + + def add_add_request(self): + x = TaskQueueAddRequest() + self.add_request_.append(x) + return x + + def clear_add_request(self): + self.add_request_ = [] + + def MergeFrom(self, x): + assert x is not self + for i in xrange(x.add_request_size()): self.add_add_request().CopyFrom(x.add_request(i)) + + def Equals(self, x): + if x is self: return 1 + if len(self.add_request_) != len(x.add_request_): return 0 + for e1, e2 in zip(self.add_request_, x.add_request_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + for p in self.add_request_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += 1 * len(self.add_request_) + for i in xrange(len(self.add_request_)): n += self.lengthString(self.add_request_[i].ByteSize()) + return n + 0 + + def Clear(self): + self.clear_add_request() + + def OutputUnchecked(self, out): + for i in xrange(len(self.add_request_)): + out.putVarInt32(10) + out.putVarInt32(self.add_request_[i].ByteSize()) + self.add_request_[i].OutputUnchecked(out) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 10: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.add_add_request().TryMerge(tmp) + continue + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + cnt=0 + for e in self.add_request_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("add_request%s <\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kadd_request = 1 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "add_request", + }, 1) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STRING, + }, 1, ProtocolBuffer.Encoder.MAX_TYPE) + + _STYLE = """""" + _STYLE_CONTENT_TYPE = """""" +class TaskQueueBulkAddResponse_TaskResult(ProtocolBuffer.ProtocolMessage): + has_result_ = 0 + result_ = 0 + has_chosen_task_name_ = 0 + chosen_task_name_ = "" + + def __init__(self, contents=None): + if contents is not None: self.MergeFromString(contents) + + def result(self): return self.result_ + + def set_result(self, x): + self.has_result_ = 1 + self.result_ = x + + def clear_result(self): + if self.has_result_: + self.has_result_ = 0 + self.result_ = 0 + + def has_result(self): return self.has_result_ + + def chosen_task_name(self): return self.chosen_task_name_ + + def set_chosen_task_name(self, x): + self.has_chosen_task_name_ = 1 + self.chosen_task_name_ = x + + def clear_chosen_task_name(self): + if self.has_chosen_task_name_: + self.has_chosen_task_name_ = 0 + self.chosen_task_name_ = "" + + def has_chosen_task_name(self): return self.has_chosen_task_name_ + + + def MergeFrom(self, x): + assert x is not self + if (x.has_result()): self.set_result(x.result()) + if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name()) + + def Equals(self, x): + if x is self: return 1 + if self.has_result_ != x.has_result_: return 0 + if self.has_result_ and self.result_ != x.result_: return 0 + if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0 + if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + if (not self.has_result_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: result not set.') + return initialized + + def ByteSize(self): + n = 0 + n += self.lengthVarInt64(self.result_) + if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_)) + return n + 1 + + def Clear(self): + self.clear_result() + self.clear_chosen_task_name() + + def OutputUnchecked(self, out): + out.putVarInt32(16) + out.putVarInt32(self.result_) + if (self.has_chosen_task_name_): + out.putVarInt32(26) + out.putPrefixedString(self.chosen_task_name_) + + def TryMerge(self, d): + while 1: + tt = d.getVarInt32() + if tt == 12: break + if tt == 16: + self.set_result(d.getVarInt32()) + continue + if tt == 26: + self.set_chosen_task_name(d.getPrefixedString()) + continue + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + if self.has_result_: res+=prefix+("result: %s\n" % self.DebugFormatInt32(self.result_)) + if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_)) + return res + +class TaskQueueBulkAddResponse(ProtocolBuffer.ProtocolMessage): + + def __init__(self, contents=None): + self.taskresult_ = [] + if contents is not None: self.MergeFromString(contents) + + def taskresult_size(self): return len(self.taskresult_) + def taskresult_list(self): return self.taskresult_ + + def taskresult(self, i): + return self.taskresult_[i] + + def mutable_taskresult(self, i): + return self.taskresult_[i] + + def add_taskresult(self): + x = TaskQueueBulkAddResponse_TaskResult() + self.taskresult_.append(x) + return x + + def clear_taskresult(self): + self.taskresult_ = [] + + def MergeFrom(self, x): + assert x is not self + for i in xrange(x.taskresult_size()): self.add_taskresult().CopyFrom(x.taskresult(i)) + + def Equals(self, x): + if x is self: return 1 + if len(self.taskresult_) != len(x.taskresult_): return 0 + for e1, e2 in zip(self.taskresult_, x.taskresult_): + if e1 != e2: return 0 + return 1 + + def IsInitialized(self, debug_strs=None): + initialized = 1 + for p in self.taskresult_: + if not p.IsInitialized(debug_strs): initialized=0 + return initialized + + def ByteSize(self): + n = 0 + n += 2 * len(self.taskresult_) + for i in xrange(len(self.taskresult_)): n += self.taskresult_[i].ByteSize() + return n + 0 + + def Clear(self): + self.clear_taskresult() + + def OutputUnchecked(self, out): + for i in xrange(len(self.taskresult_)): + out.putVarInt32(11) + self.taskresult_[i].OutputUnchecked(out) + out.putVarInt32(12) + + def TryMerge(self, d): + while d.avail() > 0: + tt = d.getVarInt32() + if tt == 11: + self.add_taskresult().TryMerge(d) + continue + if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError + d.skipData(tt) + + + def __str__(self, prefix="", printElemNumber=0): + res="" + cnt=0 + for e in self.taskresult_: + elm="" + if printElemNumber: elm="(%d)" % cnt + res+=prefix+("TaskResult%s {\n" % elm) + res+=e.__str__(prefix + " ", printElemNumber) + res+=prefix+"}\n" + cnt+=1 + return res + + + def _BuildTagLookupTable(sparse, maxtag, default=None): + return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) + + kTaskResultGroup = 1 + kTaskResultresult = 2 + kTaskResultchosen_task_name = 3 + + _TEXT = _BuildTagLookupTable({ + 0: "ErrorCode", + 1: "TaskResult", + 2: "result", + 3: "chosen_task_name", + }, 3) + + _TYPES = _BuildTagLookupTable({ + 0: ProtocolBuffer.Encoder.NUMERIC, + 1: ProtocolBuffer.Encoder.STARTGROUP, + 2: ProtocolBuffer.Encoder.NUMERIC, + 3: ProtocolBuffer.Encoder.STRING, + }, 3, ProtocolBuffer.Encoder.MAX_TYPE) + _STYLE = """""" _STYLE_CONTENT_TYPE = """""" class TaskQueueDeleteRequest(ProtocolBuffer.ProtocolMessage): @@ -3222,6 +3564,8 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): runlog_ = None has_description_ = 0 description_ = "" + has_payload_ = 0 + payload_ = None def __init__(self, contents=None): self.header_ = [] @@ -3397,6 +3741,24 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): def has_description(self): return self.has_description_ + def payload(self): + if self.payload_ is None: + self.lazy_init_lock_.acquire() + try: + if self.payload_ is None: self.payload_ = MessageSet() + finally: + self.lazy_init_lock_.release() + return self.payload_ + + def mutable_payload(self): self.has_payload_ = 1; return self.payload() + + def clear_payload(self): + if self.has_payload_: + self.has_payload_ = 0; + if self.payload_ is not None: self.payload_.Clear() + + def has_payload(self): return self.has_payload_ + def MergeFrom(self, x): assert x is not self @@ -3412,6 +3774,7 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): if (x.has_crontimetable()): self.mutable_crontimetable().MergeFrom(x.crontimetable()) if (x.has_runlog()): self.mutable_runlog().MergeFrom(x.runlog()) if (x.has_description()): self.set_description(x.description()) + if (x.has_payload()): self.mutable_payload().MergeFrom(x.payload()) def Equals(self, x): if x is self: return 1 @@ -3440,6 +3803,8 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): if self.has_runlog_ and self.runlog_ != x.runlog_: return 0 if self.has_description_ != x.has_description_: return 0 if self.has_description_ and self.description_ != x.description_: return 0 + if self.has_payload_ != x.has_payload_: return 0 + if self.has_payload_ and self.payload_ != x.payload_: return 0 return 1 def IsInitialized(self, debug_strs=None): @@ -3452,10 +3817,6 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): initialized = 0 if debug_strs is not None: debug_strs.append('Required field: eta_usec not set.') - if (not self.has_url_): - initialized = 0 - if debug_strs is not None: - debug_strs.append('Required field: url not set.') if (not self.has_method_): initialized = 0 if debug_strs is not None: @@ -3468,13 +3829,14 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): debug_strs.append('Required field: creation_time_usec not set.') if (self.has_crontimetable_ and not self.crontimetable_.IsInitialized(debug_strs)): initialized = 0 if (self.has_runlog_ and not self.runlog_.IsInitialized(debug_strs)): initialized = 0 + if (self.has_payload_ and not self.payload_.IsInitialized(debug_strs)): initialized = 0 return initialized def ByteSize(self): n = 0 n += self.lengthString(len(self.task_name_)) n += self.lengthVarInt64(self.eta_usec_) - n += self.lengthString(len(self.url_)) + if (self.has_url_): n += 1 + self.lengthString(len(self.url_)) n += self.lengthVarInt64(self.method_) if (self.has_retry_count_): n += 1 + self.lengthVarInt64(self.retry_count_) n += 2 * len(self.header_) @@ -3485,7 +3847,8 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): if (self.has_crontimetable_): n += 2 + self.crontimetable_.ByteSize() if (self.has_runlog_): n += 4 + self.runlog_.ByteSize() if (self.has_description_): n += 2 + self.lengthString(len(self.description_)) - return n + 5 + if (self.has_payload_): n += 2 + self.lengthString(self.payload_.ByteSize()) + return n + 4 def Clear(self): self.clear_task_name() @@ -3500,14 +3863,16 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): self.clear_crontimetable() self.clear_runlog() self.clear_description() + self.clear_payload() def OutputUnchecked(self, out): out.putVarInt32(18) out.putPrefixedString(self.task_name_) out.putVarInt32(24) out.putVarInt64(self.eta_usec_) - out.putVarInt32(34) - out.putPrefixedString(self.url_) + if (self.has_url_): + out.putVarInt32(34) + out.putPrefixedString(self.url_) out.putVarInt32(40) out.putVarInt32(self.method_) if (self.has_retry_count_): @@ -3536,6 +3901,10 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): if (self.has_description_): out.putVarInt32(170) out.putPrefixedString(self.description_) + if (self.has_payload_): + out.putVarInt32(178) + out.putVarInt32(self.payload_.ByteSize()) + self.payload_.OutputUnchecked(out) def TryMerge(self, d): while 1: @@ -3577,6 +3946,12 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): if tt == 170: self.set_description(d.getPrefixedString()) continue + if tt == 178: + length = d.getVarInt32() + tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) + d.skip(length) + self.mutable_payload().TryMerge(tmp) + continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) @@ -3608,6 +3983,10 @@ class TaskQueueQueryTasksResponse_Task(ProtocolBuffer.ProtocolMessage): res+=self.runlog_.__str__(prefix + " ", printElemNumber) res+=prefix+"}\n" if self.has_description_: res+=prefix+("description: %s\n" % self.DebugFormatString(self.description_)) + if self.has_payload_: + res+=prefix+"payload <\n" + res+=self.payload_.__str__(prefix + " ", printElemNumber) + res+=prefix+">\n" return res class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage): @@ -3712,6 +4091,7 @@ class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage): kTaskRunLogelapsed_usec = 19 kTaskRunLogresponse_code = 20 kTaskdescription = 21 + kTaskpayload = 22 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", @@ -3736,7 +4116,8 @@ class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage): 19: "elapsed_usec", 20: "response_code", 21: "description", - }, 21) + 22: "payload", + }, 22) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, @@ -3761,9 +4142,10 @@ class TaskQueueQueryTasksResponse(ProtocolBuffer.ProtocolMessage): 19: ProtocolBuffer.Encoder.NUMERIC, 20: ProtocolBuffer.Encoder.NUMERIC, 21: ProtocolBuffer.Encoder.STRING, - }, 21, ProtocolBuffer.Encoder.MAX_TYPE) + 22: ProtocolBuffer.Encoder.STRING, + }, 22, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" -__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task'] +__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddRequest_CronTimetable','TaskQueueAddResponse','TaskQueueBulkAddRequest','TaskQueueBulkAddResponse','TaskQueueBulkAddResponse_TaskResult','TaskQueueDeleteRequest','TaskQueueDeleteResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueScannerQueueInfo','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats','TaskQueuePurgeQueueRequest','TaskQueuePurgeQueueResponse','TaskQueueDeleteQueueRequest','TaskQueueDeleteQueueResponse','TaskQueueQueryTasksRequest','TaskQueueQueryTasksResponse','TaskQueueQueryTasksResponse_TaskHeader','TaskQueueQueryTasksResponse_TaskCronTimetable','TaskQueueQueryTasksResponse_TaskRunLog','TaskQueueQueryTasksResponse_Task'] diff --git a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py index c9de267..1ab31f8 100755 --- a/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py +++ b/google-appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py @@ -221,6 +221,17 @@ class _DummyTaskStore(object): task.set_method( taskqueue_service_pb.TaskQueueQueryTasksResponse_Task.GET) task.set_retry_count(max(0, random.randint(-10, 5))) + if random.random() < 0.3: + random_headers = [('nexus', 'one'), + ('foo', 'bar'), + ('content-type', 'text/plain'), + ('from', 'user@email.com')] + for _ in xrange(random.randint(1, 4)): + elem = random.randint(0, len(random_headers)-1) + key, value = random_headers.pop(elem) + header_proto = task.add_header() + header_proto.set_key(key) + header_proto.set_value(value) return task for _ in range(num_tasks): @@ -312,55 +323,164 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub): self._app_queues = {} - def _Dynamic_Add(self, request, response): - """Local implementation of the Add RPC in TaskQueueService. + def _ChooseTaskName(self): + """Returns a string containing a unique task name.""" + self._next_task_id += 1 + return 'task%d' % (self._next_task_id - 1) - Must adhere to the '_Dynamic_' naming convention for stubbing to work. - See taskqueue_service.proto for a full description of the RPC. + def _VerifyTaskQueueAddRequest(self, request): + """Checks that a TaskQueueAddRequest is valid. + + Checks that a TaskQueueAddRequest specifies a valid eta and a valid queue. Args: - request: A taskqueue_service_pb.TaskQueueAddRequest. - response: A taskqueue_service_pb.TaskQueueAddResponse. + request: The taskqueue_service_pb.TaskQueueAddRequest to validate. + + Returns: + A taskqueue_service_pb.TaskQueueServiceError indicating any problems with + the request or taskqueue_service_pb.TaskQueueServiceError.OK if it is + valid. """ if request.eta_usec() < 0: - raise apiproxy_errors.ApplicationError( - taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA) + return taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA eta = datetime.datetime.utcfromtimestamp(request.eta_usec() / 1e6) max_eta = (datetime.datetime.utcnow() + datetime.timedelta(days=MAX_ETA_DELTA_DAYS)) if eta > max_eta: - raise apiproxy_errors.ApplicationError( - taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA) + return taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA + + return taskqueue_service_pb.TaskQueueServiceError.OK + + def _Dynamic_Add(self, request, response): + bulk_request = taskqueue_service_pb.TaskQueueBulkAddRequest() + bulk_response = taskqueue_service_pb.TaskQueueBulkAddResponse() - if not self._IsValidQueue(request.queue_name()): + bulk_request.add_add_request().CopyFrom(request) + self._Dynamic_BulkAdd(bulk_request, bulk_response) + + assert bulk_response.taskresult_size() == 1 + result = bulk_response.taskresult(0).result() + + if result != taskqueue_service_pb.TaskQueueServiceError.OK: + raise apiproxy_errors.ApplicationError(result) + elif bulk_response.taskresult(0).has_chosen_task_name(): + response.set_chosen_task_name( + bulk_response.taskresult(0).chosen_task_name()) + + def _Dynamic_BulkAdd(self, request, response): + """Add many tasks to a queue using a single request. + + Args: + request: The taskqueue_service_pb.TaskQueueBulkAddRequest. See + taskqueue_service.proto. + response: The taskqueue_service_pb.TaskQueueBulkAddResponse. See + taskqueue_service.proto. + """ + + assert request.add_request_size(), 'taskqueue should prevent empty requests' + + if not self._IsValidQueue(request.add_request(0).queue_name()): raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE) - if not request.task_name(): - request.set_task_name('task%d' % self._next_task_id) - response.set_chosen_task_name(request.task_name()) - self._next_task_id += 1 + error_found = False + task_results_with_chosen_names = [] + + for add_request in request.add_request_list(): + task_result = response.add_taskresult() + error = self._VerifyTaskQueueAddRequest(add_request) + if error == taskqueue_service_pb.TaskQueueServiceError.OK: + if not add_request.task_name(): + chosen_name = self._ChooseTaskName() + add_request.set_task_name(chosen_name) + task_results_with_chosen_names.append(task_result) + task_result.set_result( + taskqueue_service_pb.TaskQueueServiceError.SKIPPED) + else: + error_found = True + task_result.set_result(error) + + if error_found: + return + + if request.add_request(0).has_transaction(): + self._TransactionalBulkAdd(request) + elif request.add_request(0).has_app_id(): + self._DummyTaskStoreBulkAdd(request, response) + else: + self._NonTransactionalBulkAdd(request, response) + + for add_request, task_result in zip(request.add_request_list(), + response.taskresult_list()): + if (task_result.result() == + taskqueue_service_pb.TaskQueueServiceError.SKIPPED): + task_result.set_result(taskqueue_service_pb.TaskQueueServiceError.OK) + if task_result in task_results_with_chosen_names: + task_result.set_chosen_task_name(add_request.task_name()) - if request.has_transaction(): + def _TransactionalBulkAdd(self, request): + """Uses datastore.AddActions to associate tasks with a transaction. + + Args: + request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the + tasks to add. N.B. all tasks in the request have been validated and + assigned unique names. + """ + try: + apiproxy_stub_map.MakeSyncCall( + 'datastore_v3', 'AddActions', request, api_base_pb.VoidProto()) + except apiproxy_errors.ApplicationError, e: + raise apiproxy_errors.ApplicationError( + e.application_error + + taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR, + e.error_detail) + + def _DummyTaskStoreBulkAdd(self, request, response): + """Adds tasks to the appropriate DummyTaskStore. + + Args: + request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the + tasks to add. N.B. all tasks in the request have been validated and + those with empty names have been assigned unique names. + response: The taskqueue_service_pb.TaskQueueBulkAddResponse to populate + with the results. N.B. the chosen_task_name field in the response will + not be filled-in. + """ + store = self.GetDummyTaskStore(request.add_request(0).app_id(), + request.add_request(0).queue_name()) + for add_request, task_result in zip(request.add_request_list(), + response.taskresult_list()): try: - apiproxy_stub_map.MakeSyncCall( - 'datastore_v3', 'AddAction', request, api_base_pb.VoidProto()) + store.Add(add_request) except apiproxy_errors.ApplicationError, e: - e.application_error = (e.application_error + - taskqueue_service_pb.TaskQueueServiceError.DATASTORE_ERROR) - raise e - elif request.has_app_id(): - store = self.GetDummyTaskStore(request.app_id(), request.queue_name()) - store.Add(request) - else: - tasks = self._taskqueues.setdefault(request.queue_name(), []) - for task in tasks: - if task.task_name() == request.task_name(): - raise apiproxy_errors.ApplicationError( - taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS) - tasks.append(request) - tasks.sort(_CompareTasksByEta) + task_result.set_result(e.application_error) + else: + task_result.set_result(taskqueue_service_pb.TaskQueueServiceError.OK) + + def _NonTransactionalBulkAdd(self, request, response): + """Adds tasks to the appropriate list in in self._taskqueues. + + Args: + request: The taskqueue_service_pb.TaskQueueBulkAddRequest containing the + tasks to add. N.B. all tasks in the request have been validated and + those with empty names have been assigned unique names. + response: The taskqueue_service_pb.TaskQueueBulkAddResponse to populate + with the results. N.B. the chosen_task_name field in the response will + not be filled-in. + """ + existing_tasks = self._taskqueues.setdefault( + request.add_request(0).queue_name(), []) + existing_task_names = set(task.task_name() for task in existing_tasks) + + for add_request, task_result in zip(request.add_request_list(), + response.taskresult_list()): + if add_request.task_name() in existing_task_names: + task_result.set_result( + taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS) + else: + existing_tasks.append(add_request) + existing_tasks.sort(_CompareTasksByEta) def _IsValidQueue(self, queue_name): """Determines whether a queue is valid, i.e. tasks can be added to it. @@ -536,6 +656,10 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub): Not used. """ queues = self._app_queues.setdefault(request.app_id(), {}) + if request.queue_name() in queues and queues[request.queue_name()] is None: + raise apiproxy_errors.ApplicationError( + taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE) + defensive_copy = taskqueue_service_pb.TaskQueueUpdateQueueRequest() defensive_copy.CopyFrom(request) queues[request.queue_name()] = defensive_copy @@ -551,7 +675,13 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub): response: A taskqueue_service_pb.TaskQueueFetchQueuesResponse. """ queues = self._app_queues.get(request.app_id(), {}) - for unused_key, queue in sorted(queues.items()[:request.max_rows()]): + for unused_key, queue in sorted(queues.items()): + if request.max_rows() == response.queue_size(): + break + + if queue is None: + continue + response_queue = response.add_queue() response_queue.set_queue_name(queue.queue_name()) response_queue.set_bucket_refill_per_second( @@ -668,7 +798,11 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub): if request.queue_name() not in queues: raise apiproxy_errors.ApplicationError( taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE) - del queues[request.queue_name()] + elif queues[request.queue_name()] is None: + raise apiproxy_errors.ApplicationError( + taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE) + + queues[request.queue_name()] = None def _Dynamic_PurgeQueue(self, request, response): """Local purge implementation of TaskQueueService.PurgeQueue. @@ -682,10 +816,13 @@ class TaskQueueServiceStub(apiproxy_stub.APIProxyStub): taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME) queues = self._app_queues.get(request.app_id(), {}) - if (request.queue_name() != DEFAULT_QUEUE_NAME and - request.queue_name() not in queues): - raise apiproxy_errors.ApplicationError( - taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE) + if request.queue_name() != DEFAULT_QUEUE_NAME: + if request.queue_name() not in queues: + raise apiproxy_errors.ApplicationError( + taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE) + elif queues[request.queue_name()] is None: + raise apiproxy_errors.ApplicationError( + taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_QUEUE) store = self.GetDummyTaskStore(request.app_id(), request.queue_name()) for task in store.Lookup(store.Count()): diff --git a/google-appengine/google/appengine/api/mail.py b/google-appengine/google/appengine/api/mail.py index 2895bdc..10ee784 100755 --- a/google-appengine/google/appengine/api/mail.py +++ b/google-appengine/google/appengine/api/mail.py @@ -64,6 +64,7 @@ EXTENSION_MIME_MAP = { 'bmp': 'image/x-ms-bmp', 'css': 'text/css', 'csv': 'text/csv', + 'doc': 'application/msword', 'diff': 'text/plain', 'flac': 'audio/flac', 'gif': 'image/gif', @@ -81,16 +82,23 @@ EXTENSION_MIME_MAP = { 'mpe': 'video/mpeg', 'mpeg': 'video/mpeg', 'mpg': 'video/mpeg', + 'odp': 'application/vnd.oasis.opendocument.presentation', + 'ods': 'application/vnd.oasis.opendocument.spreadsheet', + 'odt': 'application/vnd.oasis.opendocument.text', 'oga': 'audio/ogg', 'ogg': 'audio/ogg', 'ogv': 'video/ogg', 'pdf': 'application/pdf', 'png': 'image/png', 'pot': 'text/plain', + 'pps': 'application/vnd.ms-powerpoint', + 'ppt': 'application/vnd.ms-powerpoint', 'qt': 'video/quicktime', 'rmi': 'audio/mid', 'rss': 'text/rss+xml', 'snd': 'audio/basic', + 'sxc': 'application/vnd.sun.xml.calc', + 'sxw': 'application/vnd.sun.xml.writer', 'text': 'text/plain', 'tif': 'image/tiff', 'tiff': 'image/tiff', @@ -98,6 +106,7 @@ EXTENSION_MIME_MAP = { 'vcf': 'text/directory', 'wav': 'audio/x-wav', 'wbmp': 'image/vnd.wap.wbmp', + 'xls': 'application/vnd.ms-excel', } EXTENSION_WHITELIST = frozenset(EXTENSION_MIME_MAP.iterkeys()) @@ -831,7 +840,8 @@ class _EmailMessageBase(object): filename = mime_message.get_param('name') payload = EncodedPayload(payload, - mime_message.get_charset(), + (mime_message.get_content_charset() or + mime_message.get_charset()), mime_message['content-transfer-encoding']) if filename: diff --git a/google-appengine/google/appengine/api/mail_service_pb.py b/google-appengine/google/appengine/api/mail_service_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/api/memcache/__init__.py b/google-appengine/google/appengine/api/memcache/__init__.py index 4ae08fa..6a9f92a 100755 --- a/google-appengine/google/appengine/api/memcache/__init__.py +++ b/google-appengine/google/appengine/api/memcache/__init__.py @@ -85,6 +85,23 @@ TYPE_BOOL = 5 CAPABILITY = capabilities.CapabilitySet('memcache') +def _add_name_space(message, namespace=None): + """Populate the name_space field in a messagecol buffer. + + Args: + message: A messagecol buffer supporting the set_name_space() operation. + namespace: The name of the namespace part. If None, use the + default namespace. The empty namespace (i.e. '') will clear + the name_space field. + """ + if namespace is None: + namespace = namespace_manager.get_namespace() + if not namespace: + message.clear_name_space() + else: + message.set_name_space(namespace) + + def _key_string(key, key_prefix='', server_to_user_dict=None): """Utility function to handle different ways of requesting keys. @@ -405,7 +422,7 @@ class Client(object): """ request = MemcacheGetRequest() request.add_key(_key_string(key)) - namespace_manager._add_name_space(request, namespace) + _add_name_space(request, namespace) response = MemcacheGetResponse() try: self._make_sync_call('memcache', 'Get', request, response) @@ -441,7 +458,7 @@ class Client(object): the keys in the returned dictionary. """ request = MemcacheGetRequest() - namespace_manager._add_name_space(request, namespace) + _add_name_space(request, namespace) response = MemcacheGetResponse() user_key = {} for key in keys: @@ -486,7 +503,7 @@ class Client(object): raise ValueError('Delete timeout must be non-negative.') request = MemcacheDeleteRequest() - namespace_manager._add_name_space(request, namespace) + _add_name_space(request, namespace) response = MemcacheDeleteResponse() delete_item = request.add_item() @@ -530,7 +547,7 @@ class Client(object): raise ValueError('Delete timeout must not be negative.') request = MemcacheDeleteRequest() - namespace_manager._add_name_space(request, namespace) + _add_name_space(request, namespace) response = MemcacheDeleteResponse() for key in keys: @@ -641,7 +658,7 @@ class Client(object): item.set_flags(flags) item.set_set_policy(policy) item.set_expiration_time(int(math.ceil(time))) - namespace_manager._add_name_space(request, namespace) + _add_name_space(request, namespace) response = MemcacheSetResponse() try: self._make_sync_call('memcache', 'Set', request, response) @@ -682,6 +699,7 @@ class Client(object): raise ValueError('Expiration must not be negative.') request = MemcacheSetRequest() + _add_name_space(request, namespace) user_key = {} server_keys = [] for key, value in mapping.iteritems(): @@ -695,7 +713,6 @@ class Client(object): item.set_flags(flags) item.set_set_policy(policy) item.set_expiration_time(int(math.ceil(time))) - namespace_manager._add_name_space(request, namespace) response = MemcacheSetResponse() try: @@ -902,7 +919,7 @@ class Client(object): pass request = MemcacheIncrementRequest() - namespace_manager._add_name_space(request, namespace) + _add_name_space(request, namespace) response = MemcacheIncrementResponse() request.set_key(_key_string(key)) request.set_delta(delta) @@ -950,7 +967,7 @@ class Client(object): request = MemcacheBatchIncrementRequest() response = MemcacheBatchIncrementResponse() - namespace_manager._add_name_space(request, namespace) + _add_name_space(request, namespace) for key, delta in mapping.iteritems(): if not isinstance(delta, (int, long)): diff --git a/google-appengine/google/appengine/api/memcache/memcache_service_pb.py b/google-appengine/google/appengine/api/memcache/memcache_service_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/api/namespace_manager/__init__.py b/google-appengine/google/appengine/api/namespace_manager/__init__.py index 3d00b7b..fe3e795 100755 --- a/google-appengine/google/appengine/api/namespace_manager/__init__.py +++ b/google-appengine/google/appengine/api/namespace_manager/__init__.py @@ -15,73 +15,7 @@ # limitations under the License. # -"""Control the namespacing system used by various APIs. +"""Namespace Manager Module.""" -Each API call can specify an alternate namespace, but the functions -here can be used to change the default namespace. The default is set -before user code begins executing. -""" - - -import os - -ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE' -ENV_CURRENT_NAMESPACE = '__INTERNAL_CURRENT_NAMESPACE' - - -def set_namespace(namespace): - """Set the default namespace to use for future calls, for this request only. - - Args: - namespace: A string naming the new namespace to use. None - string specifies the root namespace for this app. - """ - if namespace: - os.environ[ENV_CURRENT_NAMESPACE] = namespace - else: - os.environ.pop(ENV_CURRENT_NAMESPACE, None) - -def set_request_namespace(namespace): - """Deprecated. Use set_namespace(namespace).""" - return set_namespace(namespace) - -def get_namespace(): - """Get the name of the current default namespace. - - None indicates that the root namespace is the default. - """ - return os.getenv(ENV_CURRENT_NAMESPACE, None) - -def get_request_namespace(): - """Deprecated. Use get_namespace().""" - return get_namespace() - -def _enable_request_namespace(): - """Automatically enable namespace to default for domain. - - Calling this function will automatically default the namespace to the - chosen Google Apps domain for the current request. - """ - if ENV_CURRENT_NAMESPACE not in os.environ: - if ENV_DEFAULT_NAMESPACE in os.environ: - os.environ[ENV_CURRENT_NAMESPACE] = os.environ[ENV_DEFAULT_NAMESPACE] - else: - os.environ.pop(ENV_CURRENT_NAMESPACE, None) - - -def _add_name_space(request, namespace=None): - """Add a name_space field to a request. - - Args: - request: A protocol buffer supporting the set_name_space() operation. - namespace: The name of the namespace part. If None, use the - default namespace. - """ - _ns = namespace - if not _ns: - _ns = get_namespace() - if not _ns: - request.clear_name_space() - else: - request.set_name_space(_ns) +from namespace_manager import * diff --git a/google-appengine/google/appengine/api/namespace_manager/namespace_manager.py b/google-appengine/google/appengine/api/namespace_manager/namespace_manager.py new file mode 100755 index 0000000..cb9fe54 --- /dev/null +++ b/google-appengine/google/appengine/api/namespace_manager/namespace_manager.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""Control the namespacing system used by various APIs. + +A namespace may be specified in various API calls exemplified +by the datastore and memcache interfaces. The default can be +specified using this module. +""" + + + +import os +import re + +__all__ = ['BadValueError', + 'set_namespace', + 'get_namespace', + 'enable_request_namespace', + 'validate_namespace', + ] + + +_ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE' +_ENV_CURRENT_NAMESPACE = 'HTTP_X_APPENGINE_CURRENT_NAMESPACE' + +_NAMESPACE_MAX_LENGTH = 100 +_NAMESPACE_PATTERN = r'^[0-9A-Za-z._-]{0,%s}$' % _NAMESPACE_MAX_LENGTH +_NAMESPACE_RE = re.compile(_NAMESPACE_PATTERN) + +def set_namespace(namespace): + """Set the default namespace for the current HTTP request. + + Args: + namespace: A string naming the new namespace to use. A value of None + will unset the default namespace value. + """ + if namespace is None: + os.environ.pop(_ENV_CURRENT_NAMESPACE, None) + else: + validate_namespace(namespace) + os.environ[_ENV_CURRENT_NAMESPACE] = namespace + + +def get_namespace(): + """Get the the current default namespace or ('') namespace if unset.""" + return os.environ.get(_ENV_CURRENT_NAMESPACE, '') + + +def enable_request_namespace(): + """Set the default namespace to the Google Apps domain referring this request. + + Calling this function will set the default namespace to the + Google Apps domain that was used to create the url used for this request + and only for the current request and only if the current default namespace + is unset. + """ + if _ENV_CURRENT_NAMESPACE not in os.environ: + if _ENV_DEFAULT_NAMESPACE in os.environ: + os.environ[_ENV_CURRENT_NAMESPACE] = os.environ[_ENV_DEFAULT_NAMESPACE] + + +class BadValueError(Exception): + """Raised by ValidateNamespaceString.""" + + +def validate_namespace(value, exception=BadValueError): + """Raises an exception if value is not a valid Namespace string. + + A Namespace string must be of a string class and + may only contain lower case alphabetic characters or digits or '-' + but must additionally not start or end with a '-'. + ([0-9A-Za-z._-]{0,100}) + + Args: + value: the value to validate. + exception: exception type to raise. + """ + if not isinstance(value, basestring): + raise exception('value should be a string; received %r (a %s):' % + (value, type(value))) + if not _NAMESPACE_RE.match(value): + raise exception('value does not match pattern "%s"' % _NAMESPACE_PATTERN) diff --git a/google-appengine/google/appengine/api/queueinfo.py b/google-appengine/google/appengine/api/queueinfo.py index bdaa358..b8d0dc5 100755 --- a/google-appengine/google/appengine/api/queueinfo.py +++ b/google-appengine/google/appengine/api/queueinfo.py @@ -41,6 +41,19 @@ queue: If this queue had been idle for a while before some jobs were submitted to it, the first 10 jobs submitted would be run immediately, then subsequent ones would be run once every 40s or so. The limit of 2000 per day would still apply. + +An app's queues are also subject to storage quota limits for their stored tasks, +i.e. those tasks that have been added to queues but not yet executed. This quota +is part of their total storage quota (including datastore and blobstore quota). +We allow an app to override the default portion of this quota available for +taskqueue storage (100M) with a top level field "total_storage_quota". + +taskqueue_storage_limit: 1.2G + +If no suffix is specified, the number is interpreted as bytes. Supported +suffices are B (bytes), K (kilobytes), M (megabytes), G (gigabytes) and +T (terabytes). If taskqueue_storage_quota exceeds the total storage quota +available to an app, it is clamped. """ @@ -52,12 +65,16 @@ from google.appengine.api import yaml_object _NAME_REGEX = r'^[A-Za-z0-9-]{0,499}$' _RATE_REGEX = r'^(0|[0-9]+(\.[0-9]*)?/[smhd])' +_TOTAL_STORAGE_LIMIT_REGEX = r'^([0-9]+(\.[0-9]*)?[BKMGT]?)' QUEUE = 'queue' NAME = 'name' RATE = 'rate' BUCKET_SIZE = 'bucket_size' +TOTAL_STORAGE_LIMIT = 'total_storage_limit' + +BYTE_SUFFIXES = 'BKMGT' class MalformedQueueConfiguration(Exception): @@ -76,7 +93,8 @@ class QueueEntry(validation.Validated): class QueueInfoExternal(validation.Validated): """QueueInfoExternal describes all queue entries for an application.""" ATTRIBUTES = { - QUEUE: validation.Optional(validation.Repeated(QueueEntry)) + TOTAL_STORAGE_LIMIT: validation.Optional(_TOTAL_STORAGE_LIMIT_REGEX), + QUEUE: validation.Optional(validation.Repeated(QueueEntry)), } @@ -141,3 +159,34 @@ def ParseRate(rate): return number/(60 * 60) if unit == 'd': return number/(24 * 60 * 60) + +def ParseTotalStorageLimit(limit): + """Parses a string representing the storage bytes limit. + + Optional limit suffixes are: + B (bytes), K (kilobytes), M (megabytes), G (gigabytes), T (terabytes) + + Args: + limit: The storage bytes limit string. + + Returns: + An int representing the storage limit in bytes. + + Raises: + MalformedQueueConfiguration: if the limit argument isn't a valid python + double followed by an optional suffix. + """ + try: + if limit[-1] in BYTE_SUFFIXES: + number = float(limit[0:-1]) + for c in BYTE_SUFFIXES: + if limit[-1] != c: + number = number * 1024 + else: + return int(number) + else: + return int(limit) + except ValueError: + raise MalformedQueueConfiguration('Total Storage Limit "%s" is invalid.' % + limit) + diff --git a/google-appengine/google/appengine/api/urlfetch_errors.py b/google-appengine/google/appengine/api/urlfetch_errors.py index e71ca5d..13bfac0 100755 --- a/google-appengine/google/appengine/api/urlfetch_errors.py +++ b/google-appengine/google/appengine/api/urlfetch_errors.py @@ -55,6 +55,3 @@ class ResponseTooLargeError(Error): class InvalidMethodError(Error): """Raised when an invalid value for 'method' is provided""" - -class InvalidMethodError(Error): - """Raised when an invalid value for 'method' is provided""" diff --git a/google-appengine/google/appengine/api/urlfetch_service_pb.py b/google-appengine/google/appengine/api/urlfetch_service_pb.py old mode 100644 new mode 100755 index 0254f09..34b6d5c --- a/google-appengine/google/appengine/api/urlfetch_service_pb.py +++ b/google-appengine/google/appengine/api/urlfetch_service_pb.py @@ -578,6 +578,12 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): externalbytesreceived_ = 0 has_finalurl_ = 0 finalurl_ = "" + has_apicpumilliseconds_ = 0 + apicpumilliseconds_ = 0 + has_apibytessent_ = 0 + apibytessent_ = 0 + has_apibytesreceived_ = 0 + apibytesreceived_ = 0 def __init__(self, contents=None): self.header_ = [] @@ -677,6 +683,45 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): def has_finalurl(self): return self.has_finalurl_ + def apicpumilliseconds(self): return self.apicpumilliseconds_ + + def set_apicpumilliseconds(self, x): + self.has_apicpumilliseconds_ = 1 + self.apicpumilliseconds_ = x + + def clear_apicpumilliseconds(self): + if self.has_apicpumilliseconds_: + self.has_apicpumilliseconds_ = 0 + self.apicpumilliseconds_ = 0 + + def has_apicpumilliseconds(self): return self.has_apicpumilliseconds_ + + def apibytessent(self): return self.apibytessent_ + + def set_apibytessent(self, x): + self.has_apibytessent_ = 1 + self.apibytessent_ = x + + def clear_apibytessent(self): + if self.has_apibytessent_: + self.has_apibytessent_ = 0 + self.apibytessent_ = 0 + + def has_apibytessent(self): return self.has_apibytessent_ + + def apibytesreceived(self): return self.apibytesreceived_ + + def set_apibytesreceived(self, x): + self.has_apibytesreceived_ = 1 + self.apibytesreceived_ = x + + def clear_apibytesreceived(self): + if self.has_apibytesreceived_: + self.has_apibytesreceived_ = 0 + self.apibytesreceived_ = 0 + + def has_apibytesreceived(self): return self.has_apibytesreceived_ + def MergeFrom(self, x): assert x is not self @@ -687,6 +732,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): if (x.has_externalbytessent()): self.set_externalbytessent(x.externalbytessent()) if (x.has_externalbytesreceived()): self.set_externalbytesreceived(x.externalbytesreceived()) if (x.has_finalurl()): self.set_finalurl(x.finalurl()) + if (x.has_apicpumilliseconds()): self.set_apicpumilliseconds(x.apicpumilliseconds()) + if (x.has_apibytessent()): self.set_apibytessent(x.apibytessent()) + if (x.has_apibytesreceived()): self.set_apibytesreceived(x.apibytesreceived()) def Equals(self, x): if x is self: return 1 @@ -705,6 +753,12 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): if self.has_externalbytesreceived_ and self.externalbytesreceived_ != x.externalbytesreceived_: return 0 if self.has_finalurl_ != x.has_finalurl_: return 0 if self.has_finalurl_ and self.finalurl_ != x.finalurl_: return 0 + if self.has_apicpumilliseconds_ != x.has_apicpumilliseconds_: return 0 + if self.has_apicpumilliseconds_ and self.apicpumilliseconds_ != x.apicpumilliseconds_: return 0 + if self.has_apibytessent_ != x.has_apibytessent_: return 0 + if self.has_apibytessent_ and self.apibytessent_ != x.apibytessent_: return 0 + if self.has_apibytesreceived_ != x.has_apibytesreceived_: return 0 + if self.has_apibytesreceived_ and self.apibytesreceived_ != x.apibytesreceived_: return 0 return 1 def IsInitialized(self, debug_strs=None): @@ -727,6 +781,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): if (self.has_externalbytessent_): n += 1 + self.lengthVarInt64(self.externalbytessent_) if (self.has_externalbytesreceived_): n += 1 + self.lengthVarInt64(self.externalbytesreceived_) if (self.has_finalurl_): n += 1 + self.lengthString(len(self.finalurl_)) + if (self.has_apicpumilliseconds_): n += 1 + self.lengthVarInt64(self.apicpumilliseconds_) + if (self.has_apibytessent_): n += 1 + self.lengthVarInt64(self.apibytessent_) + if (self.has_apibytesreceived_): n += 1 + self.lengthVarInt64(self.apibytesreceived_) return n + 1 def Clear(self): @@ -737,6 +794,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): self.clear_externalbytessent() self.clear_externalbytesreceived() self.clear_finalurl() + self.clear_apicpumilliseconds() + self.clear_apibytessent() + self.clear_apibytesreceived() def OutputUnchecked(self, out): if (self.has_content_): @@ -760,6 +820,15 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): if (self.has_finalurl_): out.putVarInt32(74) out.putPrefixedString(self.finalurl_) + if (self.has_apicpumilliseconds_): + out.putVarInt32(80) + out.putVarInt64(self.apicpumilliseconds_) + if (self.has_apibytessent_): + out.putVarInt32(88) + out.putVarInt64(self.apibytessent_) + if (self.has_apibytesreceived_): + out.putVarInt32(96) + out.putVarInt64(self.apibytesreceived_) def TryMerge(self, d): while d.avail() > 0: @@ -785,6 +854,15 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): if tt == 74: self.set_finalurl(d.getPrefixedString()) continue + if tt == 80: + self.set_apicpumilliseconds(d.getVarInt64()) + continue + if tt == 88: + self.set_apibytessent(d.getVarInt64()) + continue + if tt == 96: + self.set_apibytesreceived(d.getVarInt64()) + continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) @@ -805,6 +883,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): if self.has_externalbytessent_: res+=prefix+("ExternalBytesSent: %s\n" % self.DebugFormatInt64(self.externalbytessent_)) if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_)) if self.has_finalurl_: res+=prefix+("FinalUrl: %s\n" % self.DebugFormatString(self.finalurl_)) + if self.has_apicpumilliseconds_: res+=prefix+("ApiCpuMilliseconds: %s\n" % self.DebugFormatInt64(self.apicpumilliseconds_)) + if self.has_apibytessent_: res+=prefix+("ApiBytesSent: %s\n" % self.DebugFormatInt64(self.apibytessent_)) + if self.has_apibytesreceived_: res+=prefix+("ApiBytesReceived: %s\n" % self.DebugFormatInt64(self.apibytesreceived_)) return res @@ -820,6 +901,9 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): kExternalBytesSent = 7 kExternalBytesReceived = 8 kFinalUrl = 9 + kApiCpuMilliseconds = 10 + kApiBytesSent = 11 + kApiBytesReceived = 12 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", @@ -832,7 +916,10 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): 7: "ExternalBytesSent", 8: "ExternalBytesReceived", 9: "FinalUrl", - }, 9) + 10: "ApiCpuMilliseconds", + 11: "ApiBytesSent", + 12: "ApiBytesReceived", + }, 12) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, @@ -845,7 +932,10 @@ class URLFetchResponse(ProtocolBuffer.ProtocolMessage): 7: ProtocolBuffer.Encoder.NUMERIC, 8: ProtocolBuffer.Encoder.NUMERIC, 9: ProtocolBuffer.Encoder.STRING, - }, 9, ProtocolBuffer.Encoder.MAX_TYPE) + 10: ProtocolBuffer.Encoder.NUMERIC, + 11: ProtocolBuffer.Encoder.NUMERIC, + 12: ProtocolBuffer.Encoder.NUMERIC, + }, 12, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" diff --git a/google-appengine/google/appengine/api/urlfetch_stub.py b/google-appengine/google/appengine/api/urlfetch_stub.py index 0684c7c..a7775f2 100755 --- a/google-appengine/google/appengine/api/urlfetch_stub.py +++ b/google-appengine/google/appengine/api/urlfetch_stub.py @@ -45,10 +45,6 @@ REDIRECT_STATUSES = frozenset([ httplib.TEMPORARY_REDIRECT, ]) -PORTS_ALLOWED_IN_PRODUCTION = ( - None, '80', '443', '4443', '8080', '8081', '8082', '8083', '8084', '8085', - '8086', '8087', '8088', '8089', '8188', '8444', '8990') - _API_CALL_DEADLINE = 5.0 @@ -60,6 +56,21 @@ _UNTRUSTED_REQUEST_HEADERS = frozenset([ 'x-forwarded-for', ]) + +def _IsAllowedPort(port): + if port is None: + return True + try: + port = int(port) + except ValueError, e: + return False + if ((port >= 80 and port <= 90) or + (port >= 440 and port <= 450) or + port >= 1024): + return True + return False + + class URLFetchServiceStub(apiproxy_stub.APIProxyStub): """Stub version of the urlfetch API to be used with apiproxy_stub_map.""" @@ -151,7 +162,7 @@ class URLFetchServiceStub(apiproxy_stub.APIProxyStub): port = urllib.splitport(urllib.splituser(host)[1])[1] - if port not in PORTS_ALLOWED_IN_PRODUCTION: + if not _IsAllowedPort(port): logging.warning( 'urlfetch received %s ; port %s is not allowed in production!' % (url, port)) diff --git a/google-appengine/google/appengine/api/user_service_pb.py b/google-appengine/google/appengine/api/user_service_pb.py old mode 100644 new mode 100755 index 2037cdd..12c7dfe --- a/google-appengine/google/appengine/api/user_service_pb.py +++ b/google-appengine/google/appengine/api/user_service_pb.py @@ -552,6 +552,10 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage): email_ = "" has_user_id_ = 0 user_id_ = "" + has_auth_domain_ = 0 + auth_domain_ = "" + has_user_organization_ = 0 + user_organization_ = "" def __init__(self, contents=None): if contents is not None: self.MergeFromString(contents) @@ -582,11 +586,39 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage): def has_user_id(self): return self.has_user_id_ + def auth_domain(self): return self.auth_domain_ + + def set_auth_domain(self, x): + self.has_auth_domain_ = 1 + self.auth_domain_ = x + + def clear_auth_domain(self): + if self.has_auth_domain_: + self.has_auth_domain_ = 0 + self.auth_domain_ = "" + + def has_auth_domain(self): return self.has_auth_domain_ + + def user_organization(self): return self.user_organization_ + + def set_user_organization(self, x): + self.has_user_organization_ = 1 + self.user_organization_ = x + + def clear_user_organization(self): + if self.has_user_organization_: + self.has_user_organization_ = 0 + self.user_organization_ = "" + + def has_user_organization(self): return self.has_user_organization_ + def MergeFrom(self, x): assert x is not self if (x.has_email()): self.set_email(x.email()) if (x.has_user_id()): self.set_user_id(x.user_id()) + if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain()) + if (x.has_user_organization()): self.set_user_organization(x.user_organization()) def Equals(self, x): if x is self: return 1 @@ -594,29 +626,52 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage): if self.has_email_ and self.email_ != x.email_: return 0 if self.has_user_id_ != x.has_user_id_: return 0 if self.has_user_id_ and self.user_id_ != x.user_id_: return 0 + if self.has_auth_domain_ != x.has_auth_domain_: return 0 + if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0 + if self.has_user_organization_ != x.has_user_organization_: return 0 + if self.has_user_organization_ and self.user_organization_ != x.user_organization_: return 0 return 1 def IsInitialized(self, debug_strs=None): initialized = 1 + if (not self.has_email_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: email not set.') + if (not self.has_user_id_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: user_id not set.') + if (not self.has_auth_domain_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: auth_domain not set.') return initialized def ByteSize(self): n = 0 - if (self.has_email_): n += 1 + self.lengthString(len(self.email_)) - if (self.has_user_id_): n += 1 + self.lengthString(len(self.user_id_)) - return n + 0 + n += self.lengthString(len(self.email_)) + n += self.lengthString(len(self.user_id_)) + n += self.lengthString(len(self.auth_domain_)) + if (self.has_user_organization_): n += 1 + self.lengthString(len(self.user_organization_)) + return n + 3 def Clear(self): self.clear_email() self.clear_user_id() + self.clear_auth_domain() + self.clear_user_organization() def OutputUnchecked(self, out): - if (self.has_email_): - out.putVarInt32(10) - out.putPrefixedString(self.email_) - if (self.has_user_id_): - out.putVarInt32(18) - out.putPrefixedString(self.user_id_) + out.putVarInt32(10) + out.putPrefixedString(self.email_) + out.putVarInt32(18) + out.putPrefixedString(self.user_id_) + out.putVarInt32(26) + out.putPrefixedString(self.auth_domain_) + if (self.has_user_organization_): + out.putVarInt32(34) + out.putPrefixedString(self.user_organization_) def TryMerge(self, d): while d.avail() > 0: @@ -627,6 +682,12 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage): if tt == 18: self.set_user_id(d.getPrefixedString()) continue + if tt == 26: + self.set_auth_domain(d.getPrefixedString()) + continue + if tt == 34: + self.set_user_organization(d.getPrefixedString()) + continue if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError d.skipData(tt) @@ -635,6 +696,8 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage): res="" if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_)) if self.has_user_id_: res+=prefix+("user_id: %s\n" % self.DebugFormatString(self.user_id_)) + if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_)) + if self.has_user_organization_: res+=prefix+("user_organization: %s\n" % self.DebugFormatString(self.user_organization_)) return res @@ -643,18 +706,24 @@ class GetOAuthUserResponse(ProtocolBuffer.ProtocolMessage): kemail = 1 kuser_id = 2 + kauth_domain = 3 + kuser_organization = 4 _TEXT = _BuildTagLookupTable({ 0: "ErrorCode", 1: "email", 2: "user_id", - }, 2) + 3: "auth_domain", + 4: "user_organization", + }, 4) _TYPES = _BuildTagLookupTable({ 0: ProtocolBuffer.Encoder.NUMERIC, 1: ProtocolBuffer.Encoder.STRING, 2: ProtocolBuffer.Encoder.STRING, - }, 2, ProtocolBuffer.Encoder.MAX_TYPE) + 3: ProtocolBuffer.Encoder.STRING, + 4: ProtocolBuffer.Encoder.STRING, + }, 4, ProtocolBuffer.Encoder.MAX_TYPE) _STYLE = """""" _STYLE_CONTENT_TYPE = """""" @@ -745,20 +814,23 @@ class CheckOAuthSignatureResponse(ProtocolBuffer.ProtocolMessage): def IsInitialized(self, debug_strs=None): initialized = 1 + if (not self.has_oauth_consumer_key_): + initialized = 0 + if debug_strs is not None: + debug_strs.append('Required field: oauth_consumer_key not set.') return initialized def ByteSize(self): n = 0 - if (self.has_oauth_consumer_key_): n += 1 + self.lengthString(len(self.oauth_consumer_key_)) - return n + 0 + n += self.lengthString(len(self.oauth_consumer_key_)) + return n + 1 def Clear(self): self.clear_oauth_consumer_key() def OutputUnchecked(self, out): - if (self.has_oauth_consumer_key_): - out.putVarInt32(10) - out.putPrefixedString(self.oauth_consumer_key_) + out.putVarInt32(10) + out.putPrefixedString(self.oauth_consumer_key_) def TryMerge(self, d): while d.avail() > 0: diff --git a/google-appengine/google/appengine/api/validation.py b/google-appengine/google/appengine/api/validation.py index 00833e6..db969a7 100755 --- a/google-appengine/google/appengine/api/validation.py +++ b/google-appengine/google/appengine/api/validation.py @@ -218,7 +218,7 @@ class Validated(object): """ if key in self.ATTRIBUTES: - value = self.GetAttribute(key)(value) + value = self.GetAttribute(key)(value, key) object.__setattr__(self, key, value) else: raise ValidationError('Class \'%s\' does not have attribute \'%s\'' @@ -358,15 +358,16 @@ class Validator(object): """ self.default = default - def __call__(self, value): + def __call__(self, value, key='???'): """Main interface to validator is call mechanism.""" - return self.Validate(value) + return self.Validate(value, key) - def Validate(self, value): + def Validate(self, value, key='???'): """Override this method to customize sub-class behavior. Args: value: Value to validate. + key: Name of the field being validated. Returns: Value if value is valid, or a valid representation of value. @@ -425,11 +426,12 @@ class Type(Validator): self.expected_type = expected_type self.convert = convert - def Validate(self, value): + def Validate(self, value, key): """Validate that value is correct type. Args: value: Value to validate. + key: Name of the field being validated. Returns: None if value is None, value if value is of correct type, converted @@ -444,12 +446,11 @@ class Type(Validator): try: return self.expected_type(value) except ValueError, e: - raise ValidationError('Type conversion failed for value \'%s\'.' - % value, - e) + raise ValidationError('Type conversion failed for value \'%s\' ' + 'key %s.' % (value, key), e) except TypeError, e: - raise ValidationError('Expected value of type %s, but got \'%s\'.' - % (self.expected_type, value)) + raise ValidationError('Expected value of type %s for key %s, but got ' + '\'%s\'.' % (self.expected_type, key, value)) else: raise MissingAttribute('Missing value is required.') else: @@ -534,7 +535,7 @@ class Options(Validator): super(Options, self).__init__(default) self.options = alias_map - def Validate(self, value): + def Validate(self, value, key): """Validate options. Returns: @@ -547,8 +548,8 @@ class Options(Validator): raise ValidationError('Value for options field must not be None.') value = str(value) if value not in self.options: - raise ValidationError('Value \'%s\' not in %s.' - % (value, self.options)) + raise ValidationError('Value \'%s\' for key %s not in %s.' + % (value, key, self.options)) return self.options[value] @@ -583,7 +584,7 @@ class Optional(Validator): self.expected_type = self.validator.expected_type self.default = default - def Validate(self, value): + def Validate(self, value, key): """Optionally require a value. Normal validators do not accept None. This will accept none on @@ -591,13 +592,20 @@ class Optional(Validator): Args: value: Value to be validated as optional. + key: Name of the field being validated. Returns: None if value is None, else results of contained validation. """ if value is None: return None - return self.validator(value) + return self.validator(value, key) + + def ToValue(self, value): + """Convert 'value' to a simplified collection or basic type.""" + if value is None: + return None + return self.validator.ToValue(value) class Regex(Validator): @@ -647,11 +655,12 @@ class Regex(Validator): self.expected_type = string_type - def Validate(self, value): + def Validate(self, value, key): """Does validation of a string against a regular expression. Args: value: String to match against regular expression. + key: Name of the field being validated. Raises: ValidationError when value does not match regular expression or @@ -663,8 +672,8 @@ class Regex(Validator): cast_value = TYPE_UNICODE(value) if self.re.match(cast_value) is None: - raise ValidationError('Value \'%s\' does not match expression \'%s\'' - % (value, self.re.pattern)) + raise ValidationError('Value \'%s\' for key %s does not match expression ' + '\'%s\'' % (value, key, self.re.pattern)) return cast_value @@ -680,17 +689,19 @@ class _RegexStrValue(object): is a list of strings, the strings are joined in to a single 'or' expression. """ - def __init__(self, attribute, value): + def __init__(self, attribute, value, key): """Initialize recompilable regex value. Args: attribute: Attribute validator associated with this regex value. value: Initial underlying python value for regex string. Either a single regex string or a list of regex strings. + key: Name of the field. """ self.__attribute = attribute self.__value = value self.__regex = None + self.__key = key def __AsString(self, value): """Convert a value to appropriate string. @@ -741,7 +752,8 @@ class _RegexStrValue(object): try: return re.compile(regex) except re.error, e: - raise ValidationError('Value \'%s\' does not compile: %s' % (regex, e), e) + raise ValidationError('Value \'%s\' for key %s does not compile: %s' % + (regex, self.__key, e), e) @property def regex(self): @@ -790,7 +802,7 @@ class RegexStr(Validator): AttributeDefinitionError if string_type is not a kind of string. """ if default is not None: - default = _RegexStrValue(self, default) + default = _RegexStrValue(self, default, None) re.compile(str(default)) super(RegexStr, self).__init__(default) if (not issubclass(string_type, basestring) or @@ -800,7 +812,7 @@ class RegexStr(Validator): self.expected_type = string_type - def Validate(self, value): + def Validate(self, value, key): """Validates that the string compiles as a regular expression. Because the regular expression might have been expressed as a multiline @@ -808,6 +820,7 @@ class RegexStr(Validator): Args: value: String to compile as a regular expression. + key: Name of the field being validated. Raises: ValueError when value does not compile as a regular expression. TypeError @@ -815,7 +828,7 @@ class RegexStr(Validator): """ if isinstance(value, _RegexStrValue): return value - value = _RegexStrValue(self, value) + value = _RegexStrValue(self, value, key) value.Validate() return value @@ -862,22 +875,24 @@ class Range(Validator): self.expected_type = range_type self._type_validator = Type(range_type) - def Validate(self, value): + def Validate(self, value, key): """Validate that value is within range. Validates against range-type then checks the range. Args: value: Value to validate. + key: Name of the field being validated. Raises: ValidationError when value is out of range. ValidationError when value is notd of the same range type. """ - cast_value = self._type_validator.Validate(value) + cast_value = self._type_validator.Validate(value, key) if cast_value < self.minimum or cast_value > self.maximum: - raise ValidationError('Value \'%s\' is out of range %s - %s' + raise ValidationError('Value \'%s\' for %s is out of range %s - %s' % (str(value), + key, str(self.minimum), str(self.maximum))) return cast_value @@ -902,27 +917,29 @@ class Repeated(Validator): self.constructor = constructor self.expected_type = list - def Validate(self, value): + def Validate(self, value, key): """Do validation of sequence. Value must be a list and all elements must be of type 'constructor'. Args: value: Value to validate. + key: Name of the field being validated. Raises: ValidationError if value is None, not a list or one of its elements is the wrong type. """ if not isinstance(value, list): - raise ValidationError('Repeated fields must be sequence, ' - 'but found \'%s\'.' % value) + raise ValidationError('Repeated fields for %s must be sequence, ' + 'but found \'%s\'.' % (key, value)) for item in value: if isinstance(self.constructor, Validator): - item = self.constructor.Validate(item) + item = self.constructor.Validate(item, key) elif not isinstance(item, self.constructor): - raise ValidationError('Repeated items must be %s, but found \'%s\'.' - % (str(self.constructor), str(item))) + raise ValidationError('Repeated items for %s must be %s, but found ' + '\'%s\'.' % + (key, str(self.constructor), str(item))) return value diff --git a/google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py b/google-appengine/google/appengine/api/xmpp/xmpp_service_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/base/capabilities_pb.py b/google-appengine/google/appengine/base/capabilities_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/cron/groctimespecification.py b/google-appengine/google/appengine/cron/groctimespecification.py index 37d168f..9c030b3 100755 --- a/google-appengine/google/appengine/cron/groctimespecification.py +++ b/google-appengine/google/appengine/cron/groctimespecification.py @@ -57,20 +57,26 @@ except ImportError: pass -def GrocTimeSpecification(schedule): +def GrocTimeSpecification(schedule, timezone=None): """Factory function. Turns a schedule specification into a TimeSpecification. Arguments: schedule: the schedule specification, as a string - + timezone: the optional timezone as a string for this specification. + Defaults to 'UTC' - valid entries are things like 'Australia/Victoria' + or 'PST8PDT'. Returns: a TimeSpecification instance """ parser = groc.CreateParser(schedule) parser.timespec() + if parser.getTokenStream().LT(1).getText(): + raise groc.GrocException( + 'Extra token %r' % parser.getTokenStream().LT(1).getText()) + if parser.period_string: return IntervalTimeSpecification(parser.interval_mins, parser.period_string, @@ -79,7 +85,8 @@ def GrocTimeSpecification(schedule): return SpecificTimeSpecification(parser.ordinal_set, parser.weekday_set, parser.month_set, parser.monthday_set, - parser.time_string) + parser.time_string, + timezone) class TimeSpecification(object): @@ -186,7 +193,7 @@ class SpecificTimeSpecification(TimeSpecification): def __init__(self, ordinals=None, weekdays=None, months=None, monthdays=None, timestr='00:00', timezone=None): - super(SpecificTimeSpecification, self).__init__(self) + super(SpecificTimeSpecification, self).__init__() if weekdays and monthdays: raise ValueError('cannot supply both monthdays and weekdays') if ordinals is None: @@ -304,8 +311,6 @@ class SpecificTimeSpecification(TimeSpecification): day_matches.pop(0) while day_matches: out = candidate_month.replace(day=day_matches[0], hour=self.time.hour, - - minute=self.time.minute, second=0, microsecond=0) if self.timezone and pytz is not None: diff --git a/google-appengine/google/appengine/datastore/datastore_pb.py b/google-appengine/google/appengine/datastore/datastore_pb.py old mode 100644 new mode 100755 index 58e2400..f8d2424 --- a/google-appengine/google/appengine/datastore/datastore_pb.py +++ b/google-appengine/google/appengine/datastore/datastore_pb.py @@ -2350,194 +2350,6 @@ class RunCompiledQueryRequest(ProtocolBuffer.ProtocolMessage): 6: ProtocolBuffer.Encoder.STRING, }, 6, ProtocolBuffer.Encoder.MAX_TYPE) - _STYLE = """""" - _STYLE_CONTENT_TYPE = """""" -class QueryExplanation(ProtocolBuffer.ProtocolMessage): - has_native_ancestor_ = 0 - native_ancestor_ = 0 - has_native_offset_ = 0 - native_offset_ = 0 - has_native_limit_ = 0 - native_limit_ = 0 - - def __init__(self, contents=None): - self.native_index_ = [] - if contents is not None: self.MergeFromString(contents) - - def native_ancestor(self): return self.native_ancestor_ - - def set_native_ancestor(self, x): - self.has_native_ancestor_ = 1 - self.native_ancestor_ = x - - def clear_native_ancestor(self): - if self.has_native_ancestor_: - self.has_native_ancestor_ = 0 - self.native_ancestor_ = 0 - - def has_native_ancestor(self): return self.has_native_ancestor_ - - def native_index_size(self): return len(self.native_index_) - def native_index_list(self): return self.native_index_ - - def native_index(self, i): - return self.native_index_[i] - - def mutable_native_index(self, i): - return self.native_index_[i] - - def add_native_index(self): - x = Index() - self.native_index_.append(x) - return x - - def clear_native_index(self): - self.native_index_ = [] - def native_offset(self): return self.native_offset_ - - def set_native_offset(self, x): - self.has_native_offset_ = 1 - self.native_offset_ = x - - def clear_native_offset(self): - if self.has_native_offset_: - self.has_native_offset_ = 0 - self.native_offset_ = 0 - - def has_native_offset(self): return self.has_native_offset_ - - def native_limit(self): return self.native_limit_ - - def set_native_limit(self, x): - self.has_native_limit_ = 1 - self.native_limit_ = x - - def clear_native_limit(self): - if self.has_native_limit_: - self.has_native_limit_ = 0 - self.native_limit_ = 0 - - def has_native_limit(self): return self.has_native_limit_ - - - def MergeFrom(self, x): - assert x is not self - if (x.has_native_ancestor()): self.set_native_ancestor(x.native_ancestor()) - for i in xrange(x.native_index_size()): self.add_native_index().CopyFrom(x.native_index(i)) - if (x.has_native_offset()): self.set_native_offset(x.native_offset()) - if (x.has_native_limit()): self.set_native_limit(x.native_limit()) - - def Equals(self, x): - if x is self: return 1 - if self.has_native_ancestor_ != x.has_native_ancestor_: return 0 - if self.has_native_ancestor_ and self.native_ancestor_ != x.native_ancestor_: return 0 - if len(self.native_index_) != len(x.native_index_): return 0 - for e1, e2 in zip(self.native_index_, x.native_index_): - if e1 != e2: return 0 - if self.has_native_offset_ != x.has_native_offset_: return 0 - if self.has_native_offset_ and self.native_offset_ != x.native_offset_: return 0 - if self.has_native_limit_ != x.has_native_limit_: return 0 - if self.has_native_limit_ and self.native_limit_ != x.native_limit_: return 0 - return 1 - - def IsInitialized(self, debug_strs=None): - initialized = 1 - for p in self.native_index_: - if not p.IsInitialized(debug_strs): initialized=0 - return initialized - - def ByteSize(self): - n = 0 - if (self.has_native_ancestor_): n += 2 - n += 1 * len(self.native_index_) - for i in xrange(len(self.native_index_)): n += self.lengthString(self.native_index_[i].ByteSize()) - if (self.has_native_offset_): n += 1 + self.lengthVarInt64(self.native_offset_) - if (self.has_native_limit_): n += 1 + self.lengthVarInt64(self.native_limit_) - return n + 0 - - def Clear(self): - self.clear_native_ancestor() - self.clear_native_index() - self.clear_native_offset() - self.clear_native_limit() - - def OutputUnchecked(self, out): - if (self.has_native_ancestor_): - out.putVarInt32(8) - out.putBoolean(self.native_ancestor_) - for i in xrange(len(self.native_index_)): - out.putVarInt32(18) - out.putVarInt32(self.native_index_[i].ByteSize()) - self.native_index_[i].OutputUnchecked(out) - if (self.has_native_offset_): - out.putVarInt32(24) - out.putVarInt32(self.native_offset_) - if (self.has_native_limit_): - out.putVarInt32(32) - out.putVarInt32(self.native_limit_) - - def TryMerge(self, d): - while d.avail() > 0: - tt = d.getVarInt32() - if tt == 8: - self.set_native_ancestor(d.getBoolean()) - continue - if tt == 18: - length = d.getVarInt32() - tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length) - d.skip(length) - self.add_native_index().TryMerge(tmp) - continue - if tt == 24: - self.set_native_offset(d.getVarInt32()) - continue - if tt == 32: - self.set_native_limit(d.getVarInt32()) - continue - if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError - d.skipData(tt) - - - def __str__(self, prefix="", printElemNumber=0): - res="" - if self.has_native_ancestor_: res+=prefix+("native_ancestor: %s\n" % self.DebugFormatBool(self.native_ancestor_)) - cnt=0 - for e in self.native_index_: - elm="" - if printElemNumber: elm="(%d)" % cnt - res+=prefix+("native_index%s <\n" % elm) - res+=e.__str__(prefix + " ", printElemNumber) - res+=prefix+">\n" - cnt+=1 - if self.has_native_offset_: res+=prefix+("native_offset: %s\n" % self.DebugFormatInt32(self.native_offset_)) - if self.has_native_limit_: res+=prefix+("native_limit: %s\n" % self.DebugFormatInt32(self.native_limit_)) - return res - - - def _BuildTagLookupTable(sparse, maxtag, default=None): - return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)]) - - knative_ancestor = 1 - knative_index = 2 - knative_offset = 3 - knative_limit = 4 - - _TEXT = _BuildTagLookupTable({ - 0: "ErrorCode", - 1: "native_ancestor", - 2: "native_index", - 3: "native_offset", - 4: "native_limit", - }, 4) - - _TYPES = _BuildTagLookupTable({ - 0: ProtocolBuffer.Encoder.NUMERIC, - 1: ProtocolBuffer.Encoder.NUMERIC, - 2: ProtocolBuffer.Encoder.STRING, - 3: ProtocolBuffer.Encoder.NUMERIC, - 4: ProtocolBuffer.Encoder.NUMERIC, - }, 4, ProtocolBuffer.Encoder.MAX_TYPE) - _STYLE = """""" _STYLE_CONTENT_TYPE = """""" class Cursor(ProtocolBuffer.ProtocolMessage): @@ -2661,6 +2473,9 @@ class Error(ProtocolBuffer.ProtocolMessage): NEED_INDEX = 4 TIMEOUT = 5 PERMISSION_DENIED = 6 + BIGTABLE_ERROR = 7 + COMMITTED_BUT_STILL_APPLYING = 8 + CAPABILITY_DISABLED = 9 _ErrorCode_NAMES = { 1: "BAD_REQUEST", @@ -2669,6 +2484,9 @@ class Error(ProtocolBuffer.ProtocolMessage): 4: "NEED_INDEX", 5: "TIMEOUT", 6: "PERMISSION_DENIED", + 7: "BIGTABLE_ERROR", + 8: "COMMITTED_BUT_STILL_APPLYING", + 9: "CAPABILITY_DISABLED", } def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "") @@ -5343,4 +5161,4 @@ class CommitResponse(ProtocolBuffer.ProtocolMessage): _STYLE = """""" _STYLE_CONTENT_TYPE = """""" -__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_Position','RunCompiledQueryRequest','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','GetSchemaRequest','Schema','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse'] +__all__ = ['Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_Position','RunCompiledQueryRequest','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','GetSchemaRequest','Schema','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse'] diff --git a/google-appengine/google/appengine/datastore/datastore_sqlite_stub.py b/google-appengine/google/appengine/datastore/datastore_sqlite_stub.py new file mode 100644 index 0000000..8994d85 --- /dev/null +++ b/google-appengine/google/appengine/datastore/datastore_sqlite_stub.py @@ -0,0 +1,1501 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""SQlite-based stub for the Python datastore API. + +Entities are stored in an sqlite database in a similar fashion to the production +datastore. + +Transactions are serialized through __tx_lock. Each transaction acquires it +when it begins and releases it when it commits or rolls back. +""" + + + + + + +import array +import itertools +import logging +import md5 +import sys +import threading + +from google.appengine.datastore import entity_pb +from google.appengine.api import api_base_pb +from google.appengine.api import apiproxy_stub +from google.appengine.api import apiproxy_stub_map +from google.appengine.api import datastore_errors +from google.appengine.datastore import datastore_index +from google.appengine.datastore import datastore_pb +from google.appengine.datastore import sortable_pb_encoder +from google.appengine.runtime import apiproxy_errors + +try: + import pysqlite2.dbapi2 as sqlite3 +except ImportError: + import sqlite3 + +try: + __import__('google.appengine.api.labs.taskqueue.taskqueue_service_pb') + taskqueue_service_pb = sys.modules.get( + 'google.appengine.api.labs.taskqueue.taskqueue_service_pb') +except ImportError: + from google.appengine.api.taskqueue import taskqueue_service_pb + + +import __builtin__ +buffer = __builtin__.buffer + + +entity_pb.Reference.__hash__ = lambda self: hash(self.Encode()) +datastore_pb.Query.__hash__ = lambda self: hash(self.Encode()) +datastore_pb.Transaction.__hash__ = lambda self: hash(self.Encode()) +datastore_pb.Cursor.__hash__ = lambda self: hash(self.Encode()) + + +_MAXIMUM_RESULTS = 1000 + + +_MAX_QUERY_COMPONENTS = 63 + + +_BATCH_SIZE = 20 + + +_MAX_ACTIONS_PER_TXN = 5 + + +_MAX_TIMEOUT = 5.0 + + +_OPERATOR_MAP = { + datastore_pb.Query_Filter.LESS_THAN: '<', + datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=', + datastore_pb.Query_Filter.EQUAL: '=', + datastore_pb.Query_Filter.GREATER_THAN: '>', + datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=', +} + + +_ORDER_MAP = { + datastore_pb.Query_Order.ASCENDING: 'ASC', + datastore_pb.Query_Order.DESCENDING: 'DESC', +} + +_CORE_SCHEMA = """ +CREATE TABLE IF NOT EXISTS Apps ( + app_id TEXT NOT NULL PRIMARY KEY, + indexes BLOB); + +CREATE TABLE IF NOT EXISTS Namespaces ( + app_id TEXT NOT NULL, + name_space TEXT NOT NULL, + PRIMARY KEY (app_id, name_space)); + +CREATE TABLE IF NOT EXISTS IdSeq ( + prefix TEXT NOT NULL PRIMARY KEY, + next_id INT NOT NULL); +""" + +_NAMESPACE_SCHEMA = """ +CREATE TABLE "%(prefix)s!Entities" ( + __path__ BLOB NOT NULL PRIMARY KEY, + kind TEXT NOT NULL, + entity BLOB NOT NULL); +CREATE INDEX "%(prefix)s!EntitiesByKind" ON "%(prefix)s!Entities" ( + kind ASC, + __path__ ASC); + +CREATE TABLE "%(prefix)s!EntitiesByProperty" ( + kind TEXT NOT NULL, + name TEXT NOT NULL, + value BLOB NOT NULL, + __path__ BLOB NOT NULL REFERENCES Entities, + PRIMARY KEY(kind ASC, name ASC, value ASC, __path__ ASC) ON CONFLICT IGNORE); +CREATE INDEX "%(prefix)s!EntitiesByPropertyDesc" + ON "%(prefix)s!EntitiesByProperty" ( + kind ASC, + name ASC, + value DESC, + __path__ ASC); +CREATE INDEX "%(prefix)s!EntitiesByPropertyKey" + ON "%(prefix)s!EntitiesByProperty" ( + __path__ ASC); + +INSERT OR IGNORE INTO Apps (app_id) VALUES ('%(app_id)s'); +INSERT INTO Namespaces (app_id, name_space) + VALUES ('%(app_id)s', '%(name_space)s'); +INSERT OR IGNORE INTO IdSeq VALUES ('%(prefix)s', 1); +""" + + +def ReferencePropertyToReference(refprop): + ref = entity_pb.Reference() + ref.set_app(refprop.app()) + if refprop.has_name_space(): + ref.set_name_space(refprop.name_space()) + for pathelem in refprop.pathelement_list(): + ref.mutable_path().add_element().CopyFrom(pathelem) + return ref + + +class QueryCursor(object): + """Encapsulates a database cursor and provides methods to fetch results.""" + + def __init__(self, query, db_cursor): + """Constructor. + + Args: + query: A Query PB. + db_cursor: An SQLite cursor returning n+2 columns. The first 2 columns + must be the path of the entity and the entity itself, while the + remaining columns must be the sort columns for the query. + """ + self.__query = query + self.app = query.app() + self.__cursor = db_cursor + self.__seen = set() + + self.__position = '' + + self.__next_result = (None, None) + + if query.has_limit(): + self.limit = query.limit() + query.offset() + else: + self.limit = None + + def Count(self): + """Counts results, up to the query's limit. + + Note this method does not deduplicate results, so the query it was generated + from should have the 'distinct' clause applied. + + Returns: + int: Result count. + """ + count = 0 + while self.limit is None or count < self.limit: + row = self.__cursor.fetchone() + if not row: + break + count += 1 + return count + + def _EncodeCompiledCursor(self, cc): + """Encodes the current position in the query as a compiled cursor. + + Args: + cc: The compiled cursor to fill out. + """ + position = cc.add_position() + position.set_start_key(self.__position) + + def _GetResult(self): + """Returns the next result from the result set, without deduplication. + + Returns: + (path, value): The path and value of the next result. + """ + if not self.__cursor: + return None, None + row = self.__cursor.fetchone() + if not row: + self.__cursor = None + return None, None + path, data, position_parts = str(row[0]), row[1], row[2:] + self.__position = ''.join(str(x) for x in position_parts) + return path, data + + def _Next(self): + """Fetches the next unique result from the result set. + + Returns: + A datastore_pb.EntityProto instance. + """ + entity = None + path, data = self.__next_result + self.__next_result = None, None + while self.__cursor and not entity: + if path and path not in self.__seen: + self.__seen.add(path) + entity = entity_pb.EntityProto(data) + else: + path, data = self._GetResult() + return entity + + def Skip(self, count): + """Skips the specified number of unique results. + + Args: + count: Number of results to skip. + """ + for unused_i in xrange(count): + self._Next() + + def ResumeFromCompiledCursor(self, cc): + """Resumes a query from a compiled cursor. + + Args: + cc: The compiled cursor to resume from. + """ + target_position = cc.position(0).start_key() + while self.__position <= target_position and self.__cursor: + self.__next_result = self._GetResult() + + def PopulateQueryResult(self, count, result): + """Populates a QueryResult PB with results from the cursor. + + Args: + count: The number of results to retrieve. + result: out: A query_result PB. + """ + if count > _MAXIMUM_RESULTS: + count = _MAXIMUM_RESULTS + + result.set_keys_only(self.__query.keys_only()) + + result_list = result.result_list() + while len(result_list) < count: + if self.limit is not None and len(self.__seen) >= self.limit: + break + entity = self._Next() + if entity is None: + break + result_list.append(entity) + + result.set_more_results(len(result_list) == count) + self._EncodeCompiledCursor(result.mutable_compiled_cursor()) + + +class DatastoreSqliteStub(apiproxy_stub.APIProxyStub): + """Persistent stub for the Python datastore API. + + Stores all entities in an SQLite database. A DatastoreSqliteStub instance + handles a single app's data. + """ + + WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY + READ_WRITE = entity_pb.CompositeIndex.READ_WRITE + DELETED = entity_pb.CompositeIndex.DELETED + ERROR = entity_pb.CompositeIndex.ERROR + + _INDEX_STATE_TRANSITIONS = { + WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)), + READ_WRITE: frozenset((DELETED,)), + ERROR: frozenset((DELETED,)), + DELETED: frozenset((ERROR,)), + } + + READ_ERROR_MSG = ('Data in %s is corrupt or a different version. ' + 'Try running with the --clear_datastore flag.\n%r') + + def __init__(self, + app_id, + datastore_file, + require_indexes=False, + verbose=False, + service_name='datastore_v3', + trusted=False): + """Constructor. + + Initializes the SQLite database if necessary. + + Args: + app_id: string + datastore_file: string, path to sqlite database. Use None to create an + in-memory database. + require_indexes: bool, default False. If True, composite indexes must + exist in index.yaml for queries that need them. + verbose: bool, default False. If True, logs all select statements. + service_name: Service name expected for all calls. + trusted: bool, default False. If True, this stub allows an app to access + the data of another app. + """ + apiproxy_stub.APIProxyStub.__init__(self, service_name) + + assert isinstance(app_id, basestring) and app_id + self.__app_id = app_id + self.__datastore_file = datastore_file + self.SetTrusted(trusted) + + self.__tx_actions = [] + + self.__require_indexes = require_indexes + self.__verbose = verbose + + self.__id_map = {} + self.__id_lock = threading.Lock() + + self.__connection = sqlite3.connect( + self.__datastore_file or ':memory:', + timeout=_MAX_TIMEOUT, + check_same_thread=False) + self.__connection_lock = threading.RLock() + self.__current_transaction = None + self.__next_tx_handle = 1 + + self.__tx_writes = {} + self.__tx_deletes = set() + + self.__next_cursor_id = 1 + self.__cursor_lock = threading.Lock() + self.__cursors = {} + + self.__namespaces = set() + + self.__indexes = {} + self.__index_lock = threading.Lock() + + self.__query_history = {} + + try: + self.__Init() + except sqlite3.DatabaseError, e: + raise datastore_errors.InternalError(self.READ_ERROR_MSG % + (self.__datastore_file, e)) + + def __Init(self): + self.__connection.executescript(_CORE_SCHEMA) + self.__connection.commit() + + c = self.__connection.execute('SELECT app_id, name_space FROM Namespaces') + self.__namespaces = set(c.fetchall()) + + c = self.__connection.execute('SELECT app_id, indexes FROM Apps') + for app_id, index_proto in c.fetchall(): + index_map = self.__indexes.setdefault(app_id, {}) + if not index_proto: + continue + indexes = datastore_pb.CompositeIndices(index_proto) + for index in indexes.index_list(): + index_map.setdefault(index.definition().entity_type(), []).append(index) + + def Clear(self): + """Clears the datastore.""" + conn = self.__GetConnection(None) + try: + c = conn.execute( + "SELECT tbl_name FROM sqlite_master WHERE type = 'table'") + for row in c.fetchall(): + conn.execute('DROP TABLE "%s"' % row) + conn.commit() + finally: + self.__ReleaseConnection(conn, None) + + self.__namespaces = set() + self.__indexes = {} + self.__cursors = {} + self.__query_history = {} + + self.__Init() + + def Read(self): + """Reads the datastore from disk. + + Noop for compatibility with file stub. + """ + pass + + def Write(self): + """Writes the datastore to disk. + + Noop for compatibility with file stub. + """ + pass + + def SetTrusted(self, trusted): + """Set/clear the trusted bit in the stub. + + This bit indicates that the app calling the stub is trusted. A + trusted app can write to datastores of other apps. + + Args: + trusted: boolean. + """ + self.__trusted = trusted + + @staticmethod + def __MakeParamList(size): + """Returns a comma separated list of sqlite substitution parameters. + + Args: + size: Number of parameters in returned list. + Returns: + A comma separated list of substitution parameters. + """ + return ','.join('?' * size) + + @staticmethod + def __GetEntityKind(key): + if isinstance(key, entity_pb.EntityProto): + key = key.key() + return key.path().element_list()[-1].type() + + @staticmethod + def __EncodeIndexPB(pb): + if isinstance(pb, entity_pb.PropertyValue) and pb.has_uservalue(): + userval = entity_pb.PropertyValue() + userval.mutable_uservalue().set_email(pb.uservalue().email()) + userval.mutable_uservalue().set_auth_domain(pb.uservalue().auth_domain()) + userval.mutable_uservalue().set_gaiaid(0) + pb = userval + encoder = sortable_pb_encoder.Encoder() + pb.Output(encoder) + return buffer(encoder.buffer().tostring()) + + @staticmethod + def __AddQueryParam(params, param): + params.append(param) + return len(params) + + @staticmethod + def __CreateFilterString(filter_list, params): + """Transforms a filter list into an SQL WHERE clause. + + Args: + filter_list: The list of (property, operator, value) filters + to transform. A value_type of -1 indicates no value type comparison + should be done. + params: out: A list of parameters to pass to the query. + Returns: + An SQL 'where' clause. + """ + clauses = [] + for prop, operator, value in filter_list: + sql_op = _OPERATOR_MAP[operator] + + value_index = DatastoreSqliteStub.__AddQueryParam(params, value) + clauses.append('%s %s :%d' % (prop, sql_op, value_index)) + + filters = ' AND '.join(clauses) + if filters: + filters = 'WHERE ' + filters + return filters + + @staticmethod + def __CreateOrderString(order_list): + """Returns an 'ORDER BY' clause from the given list of orders. + + Args: + order_list: A list of (field, order) tuples. + Returns: + An SQL ORDER BY clause. + """ + orders = ', '.join('%s %s' % (x[0], _ORDER_MAP[x[1]]) for x in order_list) + if orders: + orders = 'ORDER BY ' + orders + return orders + + def __ValidateAppId(self, app_id): + """Verify that this is the stub for app_id. + + Args: + app_id: An application ID. + + Raises: + datastore_errors.BadRequestError: if this is not the stub for app_id. + """ + assert app_id + if not self.__trusted and app_id != self.__app_id: + raise datastore_errors.BadRequestError( + 'app %s cannot access app %s\'s data' % (self.__app_id, app_id)) + + def __ValidateTransaction(self, tx): + """Verify that this transaction exists and is valid. + + Args: + tx: datastore_pb.Transaction + + Raises: + datastore_errors.BadRequestError: if the tx is valid or doesn't exist. + """ + assert isinstance(tx, datastore_pb.Transaction) + self.__ValidateAppId(tx.app()) + if tx.handle() != self.__current_transaction: + raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, + 'Transaction %s not found' % tx) + + def __ValidateKey(self, key): + """Validate this key. + + Args: + key: entity_pb.Reference + + Raises: + datastore_errors.BadRequestError: if the key is invalid + """ + assert isinstance(key, entity_pb.Reference) + + self.__ValidateAppId(key.app()) + + for elem in key.path().element_list(): + if elem.has_id() == elem.has_name(): + raise datastore_errors.BadRequestError( + 'each key path element should have id or name but not both: %r' + % key) + + def __GetConnection(self, transaction): + """Retrieves a connection to the SQLite DB. + + If a transaction is supplied, the transaction's connection is returned; + otherwise a fresh connection is returned. + + Args: + transaction: A Transaction PB. + Returns: + An SQLite connection object. + """ + self.__connection_lock.acquire() + request_tx = transaction and transaction.handle() + if request_tx == 0: + request_tx = None + if request_tx != self.__current_transaction: + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.BAD_REQUEST, + 'Only one concurrent transaction per thread is permitted.') + return self.__connection + + def __ReleaseConnection(self, conn, transaction, rollback=False): + """Releases a connection for use by other operations. + + If a transaction is supplied, no action is taken. + + Args: + conn: An SQLite connection object. + transaction: A Transaction PB. + rollback: If True, roll back the database TX instead of committing it. + """ + if not transaction or not transaction.has_handle(): + if rollback: + conn.rollback() + else: + conn.commit() + self.__connection_lock.release() + + def __ConfigureNamespace(self, conn, prefix, app_id, name_space): + """Ensures the relevant tables and indexes exist. + + Args: + conn: An SQLite database connection. + prefix: The namespace prefix to configure. + app_id: The app ID. + name_space: The per-app namespace name. + """ + format_args = {'app_id': app_id, 'name_space': name_space, 'prefix': prefix} + conn.executescript(_NAMESPACE_SCHEMA % format_args) + conn.commit() + + def __WriteIndexData(self, conn, app): + """Writes index data to disk. + + Args: + conn: An SQLite connection. + app: The app ID to write indexes for. + """ + indices = datastore_pb.CompositeIndices() + for indexes in self.__indexes[app].values(): + indices.index_list().extend(indexes) + + conn.execute('UPDATE Apps SET indexes = ? WHERE app_id = ?', + (app, indices.Encode())) + + def __GetTablePrefix(self, data): + """Returns the namespace prefix for a query. + + Args: + data: An Entity, Key or Query PB, or an (app_id, ns) tuple. + Returns: + A valid table prefix + """ + if isinstance(data, entity_pb.EntityProto): + data = data.key() + if not isinstance(data, tuple): + data = (data.app(), data.name_space()) + prefix = ('%s!%s' % data).replace('"', '""') + if data not in self.__namespaces: + self.__namespaces.add(data) + self.__ConfigureNamespace(self.__connection, prefix, *data) + return prefix + + def __DeleteRows(self, conn, paths, table): + """Deletes rows from a table. + + Args: + conn: An SQLite connection. + paths: Paths to delete. + table: The table to delete from. + Returns: + The number of rows deleted. + """ + c = conn.execute('DELETE FROM "%s" WHERE __path__ IN (%s)' + % (table, self.__MakeParamList(len(paths))), + paths) + return c.rowcount + + def __DeleteEntityRows(self, conn, keys, table): + """Deletes rows from the specified table that index the keys provided. + + Args: + conn: A database connection. + keys: A list of keys to delete index entries for. + table: The table to delete from. + Returns: + The number of rows deleted. + """ + keys = sorted((x.app(), x.name_space(), x) for x in keys) + for (app_id, ns), group in itertools.groupby(keys, lambda x: x[:2]): + path_strings = [self.__EncodeIndexPB(x[2].path()) for x in group] + prefix = self.__GetTablePrefix((app_id, ns)) + return self.__DeleteRows(conn, path_strings, '%s!%s' % (prefix, table)) + + def __DeleteIndexEntries(self, conn, keys): + """Deletes entities from the index. + + Args: + conn: An SQLite connection. + keys: A list of keys to delete. + """ + self.__DeleteEntityRows(conn, keys, 'EntitiesByProperty') + + def __InsertEntities(self, conn, entities): + """Inserts or updates entities in the DB. + + Args: + conn: A database connection. + entities: A list of entities to store. + """ + + def RowGenerator(entities): + for unused_prefix, e in entities: + yield (self.__EncodeIndexPB(e.key().path()), + self.__GetEntityKind(e), + buffer(e.Encode())) + + entities = sorted((self.__GetTablePrefix(x), x) for x in entities) + for prefix, group in itertools.groupby(entities, lambda x: x[0]): + conn.executemany( + 'INSERT OR REPLACE INTO "%s!Entities" VALUES (?, ?, ?)' % prefix, + RowGenerator(group)) + + def __InsertIndexEntries(self, conn, entities): + """Inserts index entries for the supplied entities. + + Args: + conn: A database connection. + entities: A list of entities to create index entries for. + """ + + def RowGenerator(entities): + for unused_prefix, e in entities: + for p in e.property_list(): + yield (self.__GetEntityKind(e), + p.name(), + self.__EncodeIndexPB(p.value()), + self.__EncodeIndexPB(e.key().path())) + entities = sorted((self.__GetTablePrefix(x), x) for x in entities) + for prefix, group in itertools.groupby(entities, lambda x: x[0]): + conn.executemany( + 'INSERT INTO "%s!EntitiesByProperty" VALUES (?, ?, ?, ?)' % prefix, + RowGenerator(group)) + + def __AllocateIds(self, conn, prefix, size): + """Allocates IDs. + + Args: + conn: An Sqlite connection object. + prefix: A table namespace prefix. + size: Number of IDs to allocate. + Returns: + int: The beginning of a range of size IDs + """ + self.__id_lock.acquire() + next_id, block_size = self.__id_map.get(prefix, (0, 0)) + if size >= block_size: + block_size = max(1000, size) + c = conn.execute( + 'UPDATE IdSeq SET next_id = next_id + ? WHERE prefix = ?', + (block_size, prefix)) + assert c.rowcount == 1 + c = conn.execute('SELECT next_id FROM IdSeq WHERE prefix = ? LIMIT 1', + (prefix,)) + next_id = c.fetchone()[0] - block_size + + ret = next_id + + next_id += size + block_size -= size + self.__id_map[prefix] = (next_id, block_size) + self.__id_lock.release() + + return ret + + def MakeSyncCall(self, service, call, request, response): + """The main RPC entry point. service must be 'datastore_v3'.""" + self.AssertPbIsInitialized(request) + try: + apiproxy_stub.APIProxyStub.MakeSyncCall(self, service, call, request, + response) + except sqlite3.OperationalError, e: + if e.args[0] == 'database is locked': + raise datastore_errors.Timeout('Database is locked.') + else: + raise + self.AssertPbIsInitialized(response) + + def AssertPbIsInitialized(self, pb): + """Raises an exception if the given PB is not initialized and valid.""" + explanation = [] + assert pb.IsInitialized(explanation), explanation + pb.Encode() + + def QueryHistory(self): + """Returns a dict that maps Query PBs to times they've been run.""" + return dict((pb, times) for pb, times in self.__query_history.items() if + pb.app() == self.__app_id) + + def __PutEntities(self, conn, entities): + self.__DeleteIndexEntries(conn, [e.key() for e in entities]) + self.__InsertEntities(conn, entities) + self.__InsertIndexEntries(conn, entities) + + def __DeleteEntities(self, conn, keys): + self.__DeleteIndexEntries(conn, keys) + self.__DeleteEntityRows(conn, keys, 'Entities') + + def _Dynamic_Put(self, put_request, put_response): + conn = self.__GetConnection(put_request.transaction()) + try: + entities = put_request.entity_list() + for entity in entities: + self.__ValidateKey(entity.key()) + + for prop in itertools.chain(entity.property_list(), + entity.raw_property_list()): + if prop.value().has_uservalue(): + uid = md5.new(prop.value().uservalue().email().lower()).digest() + uid = '1' + ''.join(['%02d' % ord(x) for x in uid])[:20] + prop.mutable_value().mutable_uservalue().set_obfuscated_gaiaid(uid) + + assert entity.has_key() + assert entity.key().path().element_size() > 0 + + last_path = entity.key().path().element_list()[-1] + if last_path.id() == 0 and not last_path.has_name(): + id_ = self.__AllocateIds(conn, self.__GetTablePrefix(entity.key()), 1) + last_path.set_id(id_) + + assert entity.entity_group().element_size() == 0 + group = entity.mutable_entity_group() + root = entity.key().path().element(0) + group.add_element().CopyFrom(root) + + else: + assert (entity.has_entity_group() and + entity.entity_group().element_size() > 0) + + if put_request.transaction().handle(): + self.__tx_writes[entity.key()] = entity + self.__tx_deletes.discard(entity.key()) + + if not put_request.transaction().handle(): + self.__PutEntities(conn, entities) + put_response.key_list().extend([e.key() for e in entities]) + finally: + self.__ReleaseConnection(conn, put_request.transaction()) + + def _Dynamic_Get(self, get_request, get_response): + conn = self.__GetConnection(get_request.transaction()) + try: + for key in get_request.key_list(): + self.__ValidateAppId(key.app()) + prefix = self.__GetTablePrefix(key) + c = conn.execute( + 'SELECT entity FROM "%s!Entities" WHERE __path__ = ?' % (prefix,), + (self.__EncodeIndexPB(key.path()),)) + group = get_response.add_entity() + row = c.fetchone() + if row: + group.mutable_entity().ParseFromString(row[0]) + finally: + self.__ReleaseConnection(conn, get_request.transaction()) + + def _Dynamic_Delete(self, delete_request, delete_response): + conn = self.__GetConnection(delete_request.transaction()) + try: + for key in delete_request.key_list(): + self.__ValidateAppId(key.app()) + if delete_request.transaction().handle(): + self.__tx_deletes.add(key) + self.__tx_writes.pop(key, None) + + if not delete_request.transaction().handle(): + self.__DeleteEntities(conn, delete_request.key_list()) + finally: + self.__ReleaseConnection(conn, delete_request.transaction()) + + def __GenerateFilterInfo(self, filters, query): + """Transform a list of filters into a more usable form. + + Args: + filters: A list of filter PBs. + query: The query to generate filter info for. + Returns: + A dict mapping property names to lists of (op, value) tuples. + """ + filter_info = {} + for filt in filters: + assert filt.property_size() == 1 + prop = filt.property(0) + value = prop.value() + if prop.name() == '__key__': + value = ReferencePropertyToReference(value.referencevalue()) + assert value.app() == query.app() + assert value.name_space() == query.name_space() + value = value.path() + filter_info.setdefault(prop.name(), []).append( + (filt.op(), self.__EncodeIndexPB(value))) + return filter_info + + def __GenerateOrderInfo(self, orders): + """Transform a list of orders into a more usable form. + + Args: + orders: A list of order PBs. + Returns: + A list of (property, direction) tuples. + """ + orders = [(order.property(), order.direction()) for order in orders] + if orders and orders[-1] == ('__key__', datastore_pb.Query_Order.ASCENDING): + orders.pop() + return orders + + def __GetPrefixRange(self, prefix): + """Returns a (min, max) range that encompasses the given prefix. + + Args: + prefix: A string prefix to filter for. Must be a PB encodable using + __EncodeIndexPB. + Returns: + (min, max): Start and end string values to filter on. + """ + ancestor_min = self.__EncodeIndexPB(prefix) + ancestor_max = buffer(str(ancestor_min) + '\xfb\xff\xff\xff\x89') + return ancestor_min, ancestor_max + + def __KindQuery(self, query, filter_info, order_info): + """Performs kind only, kind and ancestor, and ancestor only queries.""" + if not (set(filter_info.keys()) | + set(x[0] for x in order_info)).issubset(['__key__']): + return None + if len(order_info) > 1: + return None + + filters = [] + filters.extend(('__path__', op, value) for op, value + in filter_info.get('__key__', [])) + if query.has_kind(): + filters.append(('kind', datastore_pb.Query_Filter.EQUAL, query.kind())) + if query.has_ancestor(): + amin, amax = self.__GetPrefixRange(query.ancestor().path()) + filters.append(('__path__', + datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL, amin)) + filters.append(('__path__', datastore_pb.Query_Filter.LESS_THAN, amax)) + + if order_info: + orders = [('__path__', order_info[0][1])] + else: + orders = [('__path__', datastore_pb.Query_Order.ASCENDING)] + + params = [] + query = ('SELECT Entities.__path__, Entities.entity, %s ' + 'FROM "%s!Entities" AS Entities %s %s' % ( + ','.join(x[0] for x in orders), + self.__GetTablePrefix(query), + self.__CreateFilterString(filters, params), + self.__CreateOrderString(orders))) + return query, params + + def __SinglePropertyQuery(self, query, filter_info, order_info): + """Performs queries satisfiable by the EntitiesByProperty table.""" + property_names = set(filter_info.keys()) + property_names.update(x[0] for x in order_info) + property_names.discard('__key__') + if len(property_names) != 1: + return None + + property_name = property_names.pop() + filter_ops = filter_info.get(property_name, []) + + if len([1 for o, _ in filter_ops + if o == datastore_pb.Query_Filter.EQUAL]) > 1: + return None + + if len(order_info) > 1 or (order_info and order_info[0][0] == '__key__'): + return None + + if query.has_ancestor(): + return None + + if not query.has_kind(): + return None + + prefix = self.__GetTablePrefix(query) + filters = [] + filters.append(('EntitiesByProperty.kind', + datastore_pb.Query_Filter.EQUAL, query.kind())) + filters.append(('name', datastore_pb.Query_Filter.EQUAL, property_name)) + for op, value in filter_ops: + if property_name == '__key__': + filters.append(('EntitiesByProperty.__path__', op, value)) + else: + filters.append(('value', op, value)) + + orders = [('EntitiesByProperty.kind', datastore_pb.Query_Order.ASCENDING), + ('name', datastore_pb.Query_Order.ASCENDING)] + if order_info: + orders.append(('value', order_info[0][1])) + else: + orders.append(('value', datastore_pb.Query_Order.ASCENDING)) + orders.append(('EntitiesByProperty.__path__', + datastore_pb.Query_Order.ASCENDING)) + + params = [] + format_args = ( + ','.join(x[0] for x in orders[2:]), + prefix, + prefix, + self.__CreateFilterString(filters, params), + self.__CreateOrderString(orders)) + query = ('SELECT Entities.__path__, Entities.entity, %s ' + 'FROM "%s!EntitiesByProperty" AS EntitiesByProperty INNER JOIN ' + '"%s!Entities" AS Entities USING (__path__) %s %s' % format_args) + return query, params + + def __StarSchemaQueryPlan(self, query, filter_info, order_info): + """Executes a query using a 'star schema' based on EntitiesByProperty. + + A 'star schema' is a join between an objects table (Entities) and multiple + instances of a facts table (EntitiesByProperty). Ideally, this will result + in a merge join if the only filters are inequalities and the sort orders + match those in the index for the facts table; otherwise, the DB will do its + best to satisfy the query efficiently. + + Args: + query: The datastore_pb.Query PB. + filter_info: A dict mapping properties filtered on to (op, value) tuples. + order_info: A list of (property, direction) tuples. + Returns: + (query, params): An SQL query string and list of parameters for it. + """ + filter_sets = [] + for name, filter_ops in filter_info.items(): + filter_sets.extend((name, [x]) for x in filter_ops + if x[0] == datastore_pb.Query_Filter.EQUAL) + ineq_ops = [x for x in filter_ops + if x[0] != datastore_pb.Query_Filter.EQUAL] + if ineq_ops: + filter_sets.append((name, ineq_ops)) + + for prop, _ in order_info: + if prop == '__key__': + continue + if prop not in filter_info: + filter_sets.append((prop, [])) + + prefix = self.__GetTablePrefix(query) + + joins = [] + filters = [] + join_name_map = {} + for name, filter_ops in filter_sets: + join_name = 'ebp_%d' % (len(joins),) + join_name_map.setdefault(name, join_name) + joins.append( + 'INNER JOIN "%s!EntitiesByProperty" AS %s ' + 'ON Entities.__path__ = %s.__path__' + % (prefix, join_name, join_name)) + filters.append(('%s.kind' % join_name, datastore_pb.Query_Filter.EQUAL, + query.kind())) + filters.append(('%s.name' % join_name, datastore_pb.Query_Filter.EQUAL, + name)) + for op, value in filter_ops: + filters.append(('%s.value' % join_name, op, buffer(value))) + if query.has_ancestor(): + amin, amax = self.__GetPrefixRange(query.ancestor().path()) + filters.append(('%s.__path__' % join_name, + datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL, amin)) + filters.append(('%s.__path__' % join_name, + datastore_pb.Query_Filter.LESS_THAN, amax)) + + orders = [] + for prop, order in order_info: + if prop == '__key__': + orders.append(('Entities.__path__', order)) + else: + prop = '%s.value' % (join_name_map[prop],) + orders.append((prop, order)) + if not order_info or order_info[-1][0] != '__key__': + orders.append(('Entities.__path__', datastore_pb.Query_Order.ASCENDING)) + + params = [] + format_args = ( + ','.join(x[0] for x in orders), + prefix, + ' '.join(joins), + self.__CreateFilterString(filters, params), + self.__CreateOrderString(orders)) + query = ('SELECT Entities.__path__, Entities.entity, %s ' + 'FROM "%s!Entities" AS Entities %s %s %s' % format_args) + return query, params + + def __MergeJoinQuery(self, query, filter_info, order_info): + if order_info: + return None + if query.has_ancestor(): + return None + if not query.has_kind(): + return None + for filter_ops in filter_info.values(): + for op, _ in filter_ops: + if op != datastore_pb.Query_Filter.EQUAL: + return None + + return self.__StarSchemaQueryPlan(query, filter_info, order_info) + + def __LastResortQuery(self, query, filter_info, order_info): + """Last resort query plan that executes queries requring composite indexes. + + Args: + query: The datastore_pb.Query PB. + filter_info: A dict mapping properties filtered on to (op, value) tuples. + order_info: A list of (property, direction) tuples. + Returns: + (query, params): An SQL query string and list of parameters for it. + """ + if self.__require_indexes: + index = self.__FindIndexForQuery(query) + if not index: + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.NEED_INDEX, + 'This query requires a composite index that is not defined. ' + 'You must update the index.yaml file in your application root.') + return self.__StarSchemaQueryPlan(query, filter_info, order_info) + + def __FindIndexForQuery(self, query): + """Finds an index that can be used to satisfy the provided query. + + Args: + query: A datastore_pb.Query PB. + Returns: + An entity_pb.CompositeIndex PB, if a suitable index exists; otherwise None + """ + unused_required, kind, ancestor, props, num_eq_filters = ( + datastore_index.CompositeIndexForQuery(query)) + required_key = (kind, ancestor, props) + indexes = self.__indexes.get(query.app(), {}).get(kind, []) + + eq_filters_set = set(props[:num_eq_filters]) + remaining_filters = props[num_eq_filters:] + for index in indexes: + definition = datastore_index.ProtoToIndexDefinition(index) + index_key = datastore_index.IndexToKey(definition) + if required_key == index_key: + return index + if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]: + this_props = index_key[2] + this_eq_filters_set = set(this_props[:num_eq_filters]) + this_remaining_filters = this_props[num_eq_filters:] + if (eq_filters_set == this_eq_filters_set and + remaining_filters == this_remaining_filters): + return index + + _QUERY_STRATEGIES = [ + __KindQuery, + __SinglePropertyQuery, + __MergeJoinQuery, + __LastResortQuery, + ] + + def __GetQueryCursor(self, conn, query): + """Returns an SQLite query cursor for the provided query. + + Args: + conn: The SQLite connection. + query: A datastore_pb.Query protocol buffer. + Returns: + A QueryCursor object. + """ + if query.has_transaction() and not query.has_ancestor(): + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.BAD_REQUEST, + 'Only ancestor queries are allowed inside transactions.') + + num_components = len(query.filter_list()) + len(query.order_list()) + if query.has_ancestor(): + num_components += 1 + if num_components > _MAX_QUERY_COMPONENTS: + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.BAD_REQUEST, + ('query is too large. may not have more than %s filters' + ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS)) + + app_id = query.app() + self.__ValidateAppId(app_id) + + filters, orders = datastore_index.Normalize(query.filter_list(), + query.order_list()) + + filter_info = self.__GenerateFilterInfo(filters, query) + order_info = self.__GenerateOrderInfo(orders) + + for strategy in DatastoreSqliteStub._QUERY_STRATEGIES: + result = strategy(self, query, filter_info, order_info) + if result: + break + else: + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.BAD_REQUEST, + 'No strategy found to satisfy query.') + + sql_stmt, params = result + + if self.__verbose: + logging.info("Executing statement '%s' with arguments %r", + sql_stmt, [str(x) for x in params]) + db_cursor = conn.execute(sql_stmt, params) + cursor = QueryCursor(query, db_cursor) + if query.has_compiled_cursor() and query.compiled_cursor().position_size(): + cursor.ResumeFromCompiledCursor(query.compiled_cursor()) + if query.has_offset(): + cursor.Skip(query.offset()) + + clone = datastore_pb.Query() + clone.CopyFrom(query) + clone.clear_hint() + clone.clear_limit() + clone.clear_count() + clone.clear_offset() + self.__query_history[clone] = self.__query_history.get(clone, 0) + 1 + + return cursor + + def _Dynamic_RunQuery(self, query, query_result): + conn = self.__GetConnection(query.transaction()) + try: + cursor = self.__GetQueryCursor(conn, query) + + self.__cursor_lock.acquire() + cursor_id = self.__next_cursor_id + self.__next_cursor_id += 1 + self.__cursor_lock.release() + + cursor_pb = query_result.mutable_cursor() + cursor_pb.set_app(query.app()) + cursor_pb.set_cursor(cursor_id) + + if query.has_count(): + count = query.count() + elif query.has_limit(): + count = query.limit() + else: + count = _BATCH_SIZE + + cursor.PopulateQueryResult(count, query_result) + self.__cursors[cursor_pb] = cursor + finally: + self.__ReleaseConnection(conn, query.transaction()) + + def _Dynamic_Next(self, next_request, query_result): + self.__ValidateAppId(next_request.cursor().app()) + + try: + cursor = self.__cursors[next_request.cursor()] + except KeyError: + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.BAD_REQUEST, + 'Cursor %d not found' % next_request.cursor().cursor()) + + assert cursor.app == next_request.cursor().app() + + count = _BATCH_SIZE + if next_request.has_count(): + count = next_request.count() + cursor.PopulateQueryResult(count, query_result) + + def _Dynamic_Count(self, query, integer64proto): + if query.has_limit(): + query.set_limit(min(query.limit(), _MAXIMUM_RESULTS)) + else: + query.set_limit(_MAXIMUM_RESULTS) + + conn = self.__GetConnection(query.transaction()) + try: + cursor = self.__GetQueryCursor(conn, query) + integer64proto.set_value(cursor.Count()) + finally: + self.__ReleaseConnection(conn, query.transaction()) + + def _Dynamic_BeginTransaction(self, request, transaction): + self.__ValidateAppId(request.app()) + + self.__connection_lock.acquire() + assert self.__current_transaction is None + handle = self.__next_tx_handle + self.__next_tx_handle += 1 + + transaction.set_app(request.app()) + transaction.set_handle(handle) + self.__current_transaction = handle + + def _Dynamic_AddActions(self, request, _): + + if ((len(self.__tx_actions) + request.add_request_size()) > + _MAX_ACTIONS_PER_TXN): + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.BAD_REQUEST, + 'Too many messages, maximum allowed %s' % _MAX_ACTIONS_PER_TXN) + + new_actions = [] + for add_request in request.add_request_list(): + self.__ValidateTransaction(add_request.transaction()) + clone = taskqueue_service_pb.TaskQueueAddRequest() + clone.CopyFrom(add_request) + clone.clear_transaction() + new_actions.append(clone) + + self.__tx_actions.extend(new_actions) + + def _Dynamic_Commit(self, transaction, _): + assert self.__current_transaction == transaction.handle() + conn = self.__connection + + try: + self.__PutEntities(conn, self.__tx_writes.values()) + self.__DeleteEntities(conn, self.__tx_deletes) + + for action in self.__tx_actions: + try: + apiproxy_stub_map.MakeSyncCall( + 'taskqueue', 'Add', action, api_base_pb.VoidProto()) + except apiproxy_errors.ApplicationError, e: + logging.warning('Transactional task %s has been dropped, %s', + action, e) + finally: + self.__current_transaction = None + self.__tx_actions = [] + self.__tx_writes = {} + self.__tx_deletes = set() + self.__ReleaseConnection(conn, None) + + def _Dynamic_Rollback(self, transaction, _): + conn = self.__GetConnection(transaction) + self.__current_transaction = None + self.__tx_actions = [] + self.__tx_writes = {} + self.__tx_deletes = set() + self.__ReleaseConnection(conn, None, True) + + def _Dynamic_GetSchema(self, req, schema): + conn = self.__GetConnection(None) + try: + prefix = self.__GetTablePrefix(req) + + filters = [] + if req.has_start_kind(): + filters.append(('kind', datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL, + req.start_kind())) + if req.has_end_kind(): + filters.append(('kind', datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL, + req.end_kind())) + + params = [] + if req.properties(): + sql_stmt = ('SELECT kind, name, value FROM "%s!EntitiesByProperty" %s ' + 'GROUP BY kind, name, substr(value, 1, 1) ORDER BY kind' + % (prefix, self.__CreateFilterString(filters, params))) + else: + sql_stmt = ('SELECT kind FROM "%s!Entities" %s GROUP BY kind' + % (prefix, self.__CreateFilterString(filters, params))) + c = conn.execute(sql_stmt, params) + + kind = None + current_name = None + kind_pb = None + for row in c.fetchall(): + if row[0] != kind: + if kind_pb: + schema.kind_list().append(kind_pb) + kind = row[0].encode('utf-8') + kind_pb = entity_pb.EntityProto() + kind_pb.mutable_key().set_app('') + kind_pb.mutable_key().mutable_path().add_element().set_type(kind) + kind_pb.mutable_entity_group() + + if req.properties(): + name, value_data = row[1:] + if current_name != name: + current_name = name + prop_pb = kind_pb.add_property() + prop_pb.set_name(name.encode('utf-8')) + prop_pb.set_multiple(False) + + value_decoder = sortable_pb_encoder.Decoder( + array.array('B', str(value_data))) + value_pb = prop_pb.mutable_value() + value_pb.Merge(value_decoder) + + if value_pb.has_int64value(): + value_pb.set_int64value(0) + if value_pb.has_booleanvalue(): + value_pb.set_booleanvalue(False) + if value_pb.has_stringvalue(): + value_pb.set_stringvalue('none') + if value_pb.has_doublevalue(): + value_pb.set_doublevalue(0.0) + if value_pb.has_pointvalue(): + value_pb.mutable_pointvalue().set_x(0.0) + value_pb.mutable_pointvalue().set_y(0.0) + if value_pb.has_uservalue(): + value_pb.mutable_uservalue().set_gaiaid(0) + value_pb.mutable_uservalue().set_email('none') + value_pb.mutable_uservalue().set_auth_domain('none') + value_pb.mutable_uservalue().clear_nickname() + value_pb.mutable_uservalue().clear_obfuscated_gaiaid() + if value_pb.has_referencevalue(): + value_pb.clear_referencevalue() + value_pb.mutable_referencevalue().set_app('none') + pathelem = value_pb.mutable_referencevalue().add_pathelement() + pathelem.set_type('none') + pathelem.set_name('none') + + if kind_pb: + schema.kind_list().append(kind_pb) + finally: + self.__ReleaseConnection(conn, None) + + def _Dynamic_AllocateIds(self, allocate_ids_request, allocate_ids_response): + conn = self.__GetConnection(None) + + model_key = allocate_ids_request.model_key() + size = allocate_ids_request.size() + + self.__ValidateAppId(model_key.app()) + + first_id = self.__AllocateIds(conn, self.__GetTablePrefix(model_key), size) + allocate_ids_response.set_start(first_id) + allocate_ids_response.set_end(first_id + size - 1) + + self.__ReleaseConnection(conn, None) + + def __FindIndex(self, index): + """Finds an existing index by definition. + + Args: + index: entity_pb.CompositeIndex + + Returns: + entity_pb.CompositeIndex, if it exists; otherwise None + """ + app_indexes = self.__indexes.get(index.app_id(), {}) + for stored_index in app_indexes.get(index.definition().entity_type(), []): + if index.definition() == stored_index.definition(): + return stored_index + + return None + + def _Dynamic_CreateIndex(self, index, id_response): + app_id = index.app_id() + kind = index.definition().entity_type() + + self.__ValidateAppId(app_id) + if index.id() != 0: + raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, + 'New index id must be 0.') + + self.__index_lock.acquire() + try: + if self.__FindIndex(index): + raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, + 'Index already exists.') + + next_id = max([idx.id() for x in self.__indexes.get(app_id, {}).values() + for idx in x] + [0]) + 1 + index.set_id(next_id) + id_response.set_value(next_id) + + clone = entity_pb.CompositeIndex() + clone.CopyFrom(index) + self.__indexes.setdefault(app_id, {}).setdefault(kind, []).append(clone) + + conn = self.__GetConnection(None) + try: + self.__WriteIndexData(conn, app_id) + finally: + self.__ReleaseConnection(conn, None) + finally: + self.__index_lock.release() + + def _Dynamic_GetIndices(self, app_str, composite_indices): + self.__ValidateAppId(app_str.value()) + + index_list = composite_indices.index_list() + for indexes in self.__indexes.get(app_str.value(), {}).values(): + index_list.extend(indexes) + + def _Dynamic_UpdateIndex(self, index, _): + self.__ValidateAppId(index.app_id()) + my_index = self.__FindIndex(index) + if not my_index: + raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, + "Index doesn't exist.") + elif (index.state() != my_index.state() and + index.state() not in self._INDEX_STATE_TRANSITIONS[my_index.state()]): + raise apiproxy_errors.ApplicationError( + datastore_pb.Error.BAD_REQUEST, + 'Cannot move index state from %s to %s' % + (entity_pb.CompositeIndex.State_Name(my_index.state()), + (entity_pb.CompositeIndex.State_Name(index.state())))) + + self.__index_lock.acquire() + try: + my_index.set_state(index.state()) + finally: + self.__index_lock.release() + + def _Dynamic_DeleteIndex(self, index, _): + app_id = index.app_id() + kind = index.definition().entity_type() + self.__ValidateAppId(app_id) + + my_index = self.__FindIndex(index) + if not my_index: + raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST, + "Index doesn't exist.") + + conn = self.__GetConnection(None) + try: + self.__WriteIndexData(conn, app_id) + finally: + self.__ReleaseConnection(conn, None) + self.__index_lock.acquire() + try: + self.__indexes[app_id][kind].remove(my_index) + finally: + self.__index_lock.release() diff --git a/google-appengine/google/appengine/datastore/entity_pb.py b/google-appengine/google/appengine/datastore/entity_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/datastore/sortable_pb_encoder.py b/google-appengine/google/appengine/datastore/sortable_pb_encoder.py new file mode 100644 index 0000000..e1d4e65 --- /dev/null +++ b/google-appengine/google/appengine/datastore/sortable_pb_encoder.py @@ -0,0 +1,282 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +"""An Encoder class for Protocol Buffers that preserves sorting characteristics. + +This is used by datastore_sqlite_stub in order to index entities in a fashion +that preserves the datastore's sorting semantics. Broadly, there are four +changes from regular PB encoding: + + - Strings are escaped and null terminated instead of length-prefixed. The + escaping replaces \0 with \1\1 and \1 with \1\2, thus preserving the ordering + of the original string. + - Variable length integers are encoded using a variable length encoding that + preserves order. The first byte stores the absolute value if it's between + -119 to 119, otherwise it stores the number of bytes that follow. + - Numbers are stored big endian instead of little endian. + - Negative doubles are entirely negated, while positive doubles have their sign + bit flipped. + +Warning: + Due to the way nested Protocol Buffers are encoded, this encoder will NOT + preserve sorting characteristics for embedded protocol buffers! +""" + + + + + + + +import array +import struct + +from google.net.proto import ProtocolBuffer + + +_MAX_UNSIGNED_BYTE = 255 + +_MAX_LONG_BYTES = 8 + +_MAX_INLINE = (_MAX_UNSIGNED_BYTE - (2 * _MAX_LONG_BYTES)) / 2 +_MIN_INLINE = -_MAX_INLINE +_OFFSET = 1 + 8 +_POS_OFFSET = _OFFSET + _MAX_INLINE * 2 + + +class Encoder(ProtocolBuffer.Encoder): + """Encodes Protocol Buffers in a form that sorts nicely.""" + + def put16(self, value): + if value < 0 or value >= (1<<16): + raise ProtocolBuffer.ProtocolBufferEncodeError, 'u16 too big' + self.buf.append((value >> 8) & 0xff) + self.buf.append((value >> 0) & 0xff) + return + + def put32(self, value): + if value < 0 or value >= (1L<<32): + raise ProtocolBuffer.ProtocolBufferEncodeError, 'u32 too big' + self.buf.append((value >> 24) & 0xff) + self.buf.append((value >> 16) & 0xff) + self.buf.append((value >> 8) & 0xff) + self.buf.append((value >> 0) & 0xff) + return + + def put64(self, value): + if value < 0 or value >= (1L<<64): + raise ProtocolBuffer.ProtocolBufferEncodeError, 'u64 too big' + self.buf.append((value >> 56) & 0xff) + self.buf.append((value >> 48) & 0xff) + self.buf.append((value >> 40) & 0xff) + self.buf.append((value >> 32) & 0xff) + self.buf.append((value >> 24) & 0xff) + self.buf.append((value >> 16) & 0xff) + self.buf.append((value >> 8) & 0xff) + self.buf.append((value >> 0) & 0xff) + return + + def _PutVarInt(self, value): + if value is None: + self.buf.append(0) + return + + if value >= _MIN_INLINE and value <= _MAX_INLINE: + value = _OFFSET + (value - _MIN_INLINE) + self.buf.append(value & 0xff) + return + + negative = False + + if value < 0: + value = _MIN_INLINE - value + negative = True + else: + value = value - _MAX_INLINE + + len = 0 + w = value + while w > 0: + w >>= 8 + len += 1 + + if negative: + head = _OFFSET - len + else: + head = _POS_OFFSET + len + self.buf.append(head & 0xff) + + for i in range(len - 1, -1, -1): + b = value >> (i * 8) + if negative: + b = _MAX_UNSIGNED_BYTE - (b & 0xff) + self.buf.append(b & 0xff) + + def putVarInt32(self, value): + if value >= 0x80000000 or value < -0x80000000: + raise ProtocolBuffer.ProtocolBufferEncodeError, 'int32 too big' + self._PutVarInt(value) + + def putVarInt64(self, value): + if value >= 0x8000000000000000 or value < -0x8000000000000000: + raise ProtocolBuffer.ProtocolBufferEncodeError, 'int64 too big' + self._PutVarInt(value) + + def putVarUint64(self, value): + if value < 0 or value >= 0x10000000000000000: + raise ProtocolBuffer.ProtocolBufferEncodeError, 'uint64 too big' + self._PutVarInt(value) + + def putFloat(self, value): + encoded = array.array('B') + encoded.fromstring(struct.pack('>f', value)) + if value < 0: + encoded[0] ^= 0xFF + encoded[1] ^= 0xFF + encoded[2] ^= 0xFF + encoded[3] ^= 0xFF + else: + encoded[0] ^= 0x80 + self.buf.extend(encoded) + + def putDouble(self, value): + encoded = array.array('B') + encoded.fromstring(struct.pack('>d', value)) + if value < 0: + encoded[0] ^= 0xFF + encoded[1] ^= 0xFF + encoded[2] ^= 0xFF + encoded[3] ^= 0xFF + encoded[4] ^= 0xFF + encoded[5] ^= 0xFF + encoded[6] ^= 0xFF + encoded[7] ^= 0xFF + else: + encoded[0] ^= 0x80 + self.buf.extend(encoded) + + def putPrefixedString(self, value): + self.buf.fromstring(value.replace('\1', '\1\2').replace('\0', '\1\1') + '\0') + + +class Decoder(ProtocolBuffer.Decoder): + def __init__(self, buf, idx=0, limit=None): + if not limit: + limit = len(buf) + ProtocolBuffer.Decoder.__init__(self, buf, idx, limit) + + def get16(self): + if self.idx + 2 > self.limit: + raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated' + c = self.buf[self.idx] + d = self.buf[self.idx + 1] + self.idx += 2 + return (c << 8) | d + + def get32(self): + if self.idx + 4 > self.limit: + raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated' + c = long(self.buf[self.idx]) + d = self.buf[self.idx + 1] + e = self.buf[self.idx + 2] + f = self.buf[self.idx + 3] + self.idx += 4 + return (c << 24) | (d << 16) | (e << 8) | f + + def get64(self): + if self.idx + 8 > self.limit: + raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated' + c = long(self.buf[self.idx]) + d = long(self.buf[self.idx + 1]) + e = long(self.buf[self.idx + 2]) + f = long(self.buf[self.idx + 3]) + g = long(self.buf[self.idx + 4]) + h = self.buf[self.idx + 5] + i = self.buf[self.idx + 6] + j = self.buf[self.idx + 7] + self.idx += 8 + return ((c << 56) | (d << 48) | (e << 40) | (f << 32) | (g << 24) + | (h << 16) | (i << 8) | j) + + def getVarInt64(self): + b = self.get8() + if b >= _OFFSET and b <= _POS_OFFSET: + return b - _OFFSET + _MIN_INLINE + if b == 0: + return None + + if b < _OFFSET: + negative = True + bytes = _OFFSET - b + else: + negative = False + bytes = b - _POS_OFFSET + + ret = 0 + for i in range(bytes): + b = self.get8() + if negative: + b = _MAX_UNSIGNED_BYTE - b + ret = ret << 8 | b + + if negative: + return _MIN_INLINE - ret + else: + return ret + _MAX_INLINE + + def getVarInt32(self): + result = self.getVarInt64() + if result >= 0x80000000L or result < -0x80000000L: + raise ProtocolBuffer.ProtocolBufferDecodeError, 'corrupted' + return result + + def getVarUint64(self): + result = self.getVarInt64() + if result < 0: + raise ProtocolBuffer.ProtocolBufferDecodeError, 'corrupted' + return result + + def getFloat(self): + if self.idx + 4 > self.limit: + raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated' + a = self.buf[self.idx:self.idx+4] + self.idx += 4 + if a[0] & 0x80: + a[0] ^= 0x80 + else: + a = [x ^ 0xFF for x in a] + return struct.unpack('>f', array.array('B', a).tostring())[0] + + def getDouble(self): + if self.idx + 8 > self.limit: + raise ProtocolBuffer.ProtocolBufferDecodeError, 'truncated' + a = self.buf[self.idx:self.idx+8] + self.idx += 8 + if a[0] & 0x80: + a[0] ^= 0x80 + else: + a = [x ^ 0xFF for x in a] + return struct.unpack('>d', array.array('B', a).tostring())[0] + + def getPrefixedString(self): + end_idx = self.idx + while self.buf[end_idx] != 0: + end_idx += 1 + + data = array.array('B', self.buf[self.idx:end_idx]).tostring() + self.idx = end_idx + 1 + return data.replace('\1\1', '\0').replace('\1\2', '\1') diff --git a/google-appengine/google/appengine/dist/py_imp.py b/google-appengine/google/appengine/dist/py_imp.py index a6a0f38..cb097bc 100755 --- a/google-appengine/google/appengine/dist/py_imp.py +++ b/google-appengine/google/appengine/dist/py_imp.py @@ -28,7 +28,7 @@ PKG_DIRECTORY, C_BUILTIN, PY_FROZEN = 5, 6, 7 def get_magic(): """Return the magic string used to recognize byte-compiled code files.""" - return '\0\0\0\0' + return '\xb3\xf2\r\n' _PY_SOURCE_SUFFIX = ('.py', 'U', PY_SOURCE) diff --git a/google-appengine/google/appengine/ext/admin/__init__.py b/google-appengine/google/appengine/ext/admin/__init__.py index f69173e..ab9a5dd 100755 --- a/google-appengine/google/appengine/ext/admin/__init__.py +++ b/google-appengine/google/appengine/ext/admin/__init__.py @@ -64,6 +64,14 @@ from google.appengine.ext.webapp import template _DEBUG = True +def ustr(value): + """Like str(), but UTF-8-encodes Unicode instead of failing.""" + try: + return str(value) + except UnicodeError: + return unicode(value).encode('UTF-8') + + class ImageHandler(webapp.RequestHandler): """Serves a static image. @@ -578,9 +586,12 @@ class DatastoreRequestHandler(BaseRequestHandler): set of results and 0 for the entity count. """ kind = self.request.get('kind') + namespace = self.request.get('namespace') + if not namespace: + namespace = None if not kind: return ([], 0) - query = datastore.Query(kind) + query = datastore.Query(kind, _namespace=namespace) order = self.request.get('order') order_type = self.request.get('order_type') @@ -724,6 +735,7 @@ class DatastoreQueryHandler(DatastoreRequestHandler): 'message': self.request.get('msg'), 'pages': pages, 'current_page': current_page, + 'namespace': self.request.get('namespace'), 'num': num, 'next_start': -1, 'prev_start': -1, @@ -846,6 +858,7 @@ class DatastoreEditHandler(DatastoreRequestHandler): 'key_id': entity_key_id, 'fields': fields, 'focus': self.request.get('focus'), + 'namespace': self.request.get('namespace'), 'next': self.request.get('next'), 'parent_key': parent_key, 'parent_kind': parent_kind, @@ -862,7 +875,10 @@ class DatastoreEditHandler(DatastoreRequestHandler): return entity = datastore.Get(datastore.Key(entity_key)) else: - entity = datastore.Entity(kind) + namespace = self.request.get('namespace') + if not namespace: + namespace = None + entity = datastore.Entity(kind, _namespace=namespace) args = self.request.arguments() for arg in args: @@ -874,7 +890,7 @@ class DatastoreEditHandler(DatastoreRequestHandler): data_type = DataType.get_by_name(data_type_name) if entity and entity.has_key(field_name): old_formatted_value = data_type.format(entity[field_name]) - if old_formatted_value == form_value: + if old_formatted_value == ustr(form_value): continue if len(form_value) > 0: @@ -912,7 +928,7 @@ class DataType(object): return _NAMED_DATA_TYPES[name] def format(self, value): - return str(value) + return ustr(value) def short_format(self, value): return self.format(value) @@ -922,7 +938,8 @@ class DataType(object): string_value = self.format(value) else: string_value = '' - return '' % (cgi.escape(self.name()), cgi.escape(name), self.input_field_size(), + return '' % (cgi.escape(ustr(self.name())), cgi.escape(ustr(name)), + self.input_field_size(), cgi.escape(string_value, True)) def input_field_size(self): @@ -934,11 +951,11 @@ class DataType(object): class StringType(DataType): def format(self, value): - return value + return ustr(value) def input_field(self, name, value, sample_values): - value = str(value) - sample_values = [str(s) for s in sample_values] + value = ustr(value) + sample_values = [ustr(s) for s in sample_values] multiline = False if value: multiline = len(value) > 255 or value.find('\n') >= 0 @@ -973,7 +990,7 @@ class TextType(StringType): return 'Text' def input_field(self, name, value, sample_values): - return '' % (cgi.escape(name), cgi.escape(str(value))) + return '' % (cgi.escape(ustr(name)), cgi.escape(ustr(value))) def parse(self, value): return datastore_types.Text(value) @@ -1006,7 +1023,8 @@ class TimeType(DataType): return 'datetime' def parse(self, value): - return datetime.datetime(*(time.strptime(value, TimeType._FORMAT)[0:6])) + return datetime.datetime(*(time.strptime(ustr(value), + TimeType._FORMAT)[0:6])) def python_type(self): return datetime.datetime @@ -1017,8 +1035,8 @@ class ListType(DataType): value_file = cStringIO.StringIO() try: writer = csv.writer(value_file) - writer.writerow(value) - return value_file.getvalue() + writer.writerow(map(ustr, value)) + return ustr(value_file.getvalue()) finally: value_file.close() @@ -1026,10 +1044,15 @@ class ListType(DataType): return 'list' def parse(self, value): - value_file = cStringIO.StringIO(value) + value_file = cStringIO.StringIO(ustr(value)) try: reader = csv.reader(value_file) - return reader.next() + fields = [] + for field in reader.next(): + if isinstance(field, str): + field = field.decode('utf-8') + fields.append(field) + return fields finally: value_file.close() @@ -1284,6 +1307,7 @@ _DATA_TYPES = { datastore_types.PostalAddress: PostalAddressType(), datastore_types.Rating: RatingType(), datastore_types.BlobKey: BlobKeyType(), + datastore_types.ByteString: StringType(), } _NAMED_DATA_TYPES = {} diff --git a/google-appengine/google/appengine/ext/admin/templates/datastore.html b/google-appengine/google/appengine/ext/admin/templates/datastore.html index 06cc3ee..3ae084d 100644 --- a/google-appengine/google/appengine/ext/admin/templates/datastore.html +++ b/google-appengine/google/appengine/ext/admin/templates/datastore.html @@ -83,6 +83,12 @@ {% if kinds or in_production %}
diff --git a/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html b/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html index 0621df8..0e2247a 100644 --- a/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html +++ b/google-appengine/google/appengine/ext/admin/templates/datastore_edit.html @@ -109,6 +109,17 @@ {% endif %} + {% if not key %} + {% if namespace %} + + + Namespace + ({{ namespace|escape }}) + + + + {% endif %} + {% endif %} {% for field in fields %} diff --git a/google-appengine/google/appengine/ext/appstats/datamodel_pb.py b/google-appengine/google/appengine/ext/appstats/datamodel_pb.py old mode 100644 new mode 100755 diff --git a/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py b/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py index f85803c..661f91a 100755 --- a/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py +++ b/google-appengine/google/appengine/ext/appstats/sample_appengine_config.py @@ -28,6 +28,8 @@ There are four sections: import logging +import random +import re # 0) WSGI middleware declaration. @@ -156,9 +158,9 @@ appstats_FILTER_LIST = [] # above) *and* random.random() < RECORD_FRACTION. def appstats_should_record(env): - if config.FILTER_LIST: - logging.debug('FILTER_LIST: %r', config.FILTER_LIST) - for filter_dict in config.FILTER_LIST: + if appstats_FILTER_LIST: + logging.debug('FILTER_LIST: %r', appstats_FILTER_LIST) + for filter_dict in appstats_FILTER_LIST: for key, regex in filter_dict.iteritems(): negated = isinstance(regex, str) and regex.startswith('!') if negated: diff --git a/google-appengine/google/appengine/ext/appstats/static/appstats_js.js b/google-appengine/google/appengine/ext/appstats/static/appstats_js.js index a77bdad..f9c882b 100755 --- a/google-appengine/google/appengine/ext/appstats/static/appstats_js.js +++ b/google-appengine/google/appengine/ext/appstats/static/appstats_js.js @@ -1,71 +1,76 @@ -/* Copyright 2008-9 Google Inc. All Rights Reserved. */ (function(){function e(a){throw a;}var h=true,i=null,k=false,aa=Object,l=Error,ba=undefined,ca=parseInt,da=document;function ea(a,b){return a.currentTarget=b}function fa(a,b){return a.keyCode=b}function ga(a,b){return a.type=b}function ha(a,b){return a.length=b}function ia(a,b){return a.className=b}function ja(a,b){return a.target=b} -var ka="appendChild",m="push",la="relatedTarget",ma="slice",n="replace",na="nodeType",oa="preventDefault",q="indexOf",s="dispatchEvent",pa="capture",qa="nodeName",ra="charCode",t="keyCode",u="firstChild",sa="setAttribute",ta="handleEvent",w="type",ua="nextSibling",va="setActive",wa="toString",x="length",xa="propertyIsEnumerable",y="prototype",z="split",ya="stopPropagation",za="style",Aa="body",Ba="removeChild",A="target",B="call",C="apply",Ca="navigator",D="parentNode",Da="join",Ea="nodeValue",E, -F=this,Fa=function(a,b,c){a=a[z](".");c=c||F;!(a[0]in c)&&c.execScript&&c.execScript("var "+a[0]);for(var d;a[x]&&(d=a.shift());)if(!a[x]&&b!==ba)c[d]=b;else c=c[d]?c[d]:(c[d]={})},Ga=function(a,b){a=a[z](".");b=b||F;for(var c;c=a.shift();)if(b[c])b=b[c];else return i;return b},Ha=function(){},Ia=function(a){a.T=function(){return a.Tb||(a.Tb=new a)}},Ja=function(a){var b=typeof a;if(b=="object")if(a){if(a instanceof Array||!(a instanceof aa)&&aa[y][wa][B](a)=="[object Array]"||typeof a[x]=="number"&& -typeof a.splice!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("splice"))return"array";if(!(a instanceof aa)&&(aa[y][wa][B](a)=="[object Function]"||typeof a[B]!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("call")))return"function"}else return"null";else if(b=="function"&&typeof a[B]=="undefined")return"object";return b},Ka=function(a){return Ja(a)=="array"},La=function(a){var b=Ja(a);return b=="array"||b=="object"&&typeof a[x]=="number"},I=function(a){return typeof a=="string"},J=function(a){return Ja(a)== -"function"},Ma=function(a){a=Ja(a);return a=="object"||a=="array"||a=="function"},K=function(a){if(a.hasOwnProperty&&a.hasOwnProperty(Na))return a[Na];a[Na]||(a[Na]=++Oa);return a[Na]},Na="closure_hashCode_"+Math.floor(Math.random()*2147483648)[wa](36),Oa=0,Pa=function(a){var b=Ja(a);if(b=="object"||b=="array"){if(a.Bb)return a.Bb[B](a);b=b=="array"?[]:{};for(var c in a)b[c]=Pa(a[c]);return b}return a},Qa=function(a){var b=Array[y][ma][B](arguments,1);return function(){var c=Array[y][ma][B](arguments); -c.unshift[C](c,b);return a[C](this,c)}},L=function(a,b){function c(){}c.prototype=b[y];a.d=b[y];a.prototype=new c;a[y].constructor=a};var M=Array[y],Ra=M[q]?function(a,b,c){return M[q][B](a,b,c)}:function(a,b,c){c=c==i?0:c<0?Math.max(0,a[x]+c):c;if(I(a)){if(!I(b)||b[x]!=1)return-1;return a[q](b,c)}for(c=c;c=0},Va=function(a,b){b=Ra(a,b);var c;if(c=b>=0)M.splice[B](a,b,1)[x]==1;return c},Wa=function(){return M.concat[C](M,arguments)},Xa=function(a){if(Ka(a))return Wa(a);else{for(var b=[],c=0,d=a[x];c")!=-1)a=a[n](kb,">");if(a[q]('"')!=-1)a=a[n](lb,""");return a}},ib=/&/g,jb=//g,lb=/\"/g,mb=/[&<>\"]/,pb=function(a,b){var c=0;a=hb(String(a))[z](".");b=hb(String(b))[z](".");for(var d=Math.max(a[x],b[x]),f=0;c==0&&fb)return 1;return 0};(Date.now||function(){return+new Date})();var qb,rb,sb,tb,ub=function(){return F[Ca]?F[Ca].userAgent:i};tb=sb=rb=qb=k;var vb;if(vb=ub()){var wb=F[Ca];qb=vb[q]("Opera")==0;rb=!qb&&vb[q]("MSIE")!=-1;sb=!qb&&vb[q]("WebKit")!=-1;tb=!qb&&!sb&&wb.product=="Gecko"}var xb=qb,N=rb,O=tb,yb=sb,zb=F[Ca],Ab=(zb&&zb.platform||"")[q]("Mac")!=-1,Bb="",Cb; -if(xb&&F.opera){var Db=F.opera.version;Bb=typeof Db=="function"?Db():Db}else{if(O)Cb=/rv\:([^\);]+)(\)|;)/;else if(N)Cb=/MSIE\s+([^\);]+)(\)|;)/;else if(yb)Cb=/WebKit\/(\S+)/;if(Cb){var Eb=Cb.exec(ub());Bb=Eb?Eb[1]:""}}var Fb=Bb,Gb={},Hb=function(a){return Gb[a]||(Gb[a]=pb(Fb,a)>=0)};var Ib,Jb=function(a){return(a=a.className)&&typeof a[z]=="function"?a[z](" "):[]},Kb=function(a){var b=Jb(a),c;c=Ya(arguments,1);for(var d=0,f=0;f");c=c[Da]("")}var f=a.createElement(c);if(d)if(I(d))ia(f,d);else Sb(f,d);if(b[x]>2){d=function(j){if(j)f[ka](I(j)?a.createTextNode(j):j)};for(c=2;c0)?Sa(Vb(g)?Xa(g):g,d):d(g)}}return f},Wb=function(a){return a&&a[D]?a[D][Ba](a):i},Xb=function(a,b){if(a.contains&&b[na]== -1)return a==b||a.contains(b);if(typeof a.compareDocumentPosition!="undefined")return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b[D];return b==a},Nb=function(a){return a[na]==9?a:a.ownerDocument||a.document},Yb=function(a,b){if("textContent"in a)a.textContent=b;else if(a[u]&&a[u][na]==3){for(;a.lastChild!=a[u];)a[Ba](a.lastChild);a[u].data=b}else{for(var c;c=a[u];)a[Ba](c);a[ka](Nb(a).createTextNode(b))}},Zb={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},$b={IMG:" ",BR:"\n"},ac=function(a){var b= -a.getAttributeNode("tabindex");if(b&&b.specified){a=a.tabIndex;return typeof a=="number"&&a>=0}return k},bc=function(a,b){if(b)a.tabIndex=0;else a.removeAttribute("tabIndex")},cc=function(a,b,c){if(!(a[qa]in Zb))if(a[na]==3)c?b[m](String(a[Ea])[n](/(\r\n|\r|\n)/g,"")):b[m](a[Ea]);else if(a[qa]in $b)b[m]($b[a[qa]]);else for(a=a[u];a;){cc(a,b,c);a=a[ua]}},Vb=function(a){if(a&&typeof a[x]=="number")if(Ma(a))return typeof a.item=="function"||typeof a.item=="string";else if(J(a))return typeof a.item== -"function";return k},Mb=function(a){this.I=a||F.document||da};E=Mb[y];E.Fa=Ob;E.c=function(a){return I(a)?this.I.getElementById(a):a};E.n=function(){return Tb(this.I,arguments)};E.createElement=function(a){return this.I.createElement(a)};E.createTextNode=function(a){return this.I.createTextNode(a)};E.appendChild=function(a,b){a[ka](b)};E.contains=Xb;var dc=function(){};dc[y].Qa=k;dc[y].Ub=function(){return this.Qa};dc[y].M=function(){if(!this.Qa){this.Qa=h;this.g()}};dc[y].g=function(){};var P=function(a,b){ga(this,a);ja(this,b);ea(this,this[A])};L(P,dc);E=P[y];E.g=function(){delete this[w];delete this[A];delete this.currentTarget};E.X=k;E.la=h;E.stopPropagation=function(){this.X=h};E.preventDefault=function(){this.la=k};var ec=function(a,b){a&&this.sa(a,b)};L(ec,P);var fc=[1,4,2];E=ec[y];ja(E,i);E.relatedTarget=i;E.offsetX=0;E.offsetY=0;E.clientX=0;E.clientY=0;E.screenX=0;E.screenY=0;E.button=0;fa(E,0);E.charCode=0;E.ctrlKey=k;E.altKey=k;E.shiftKey=k;E.metaKey=k;E.N=i; -E.sa=function(a,b){var c=ga(this,a[w]);ja(this,a[A]||a.srcElement);ea(this,b);if(b=a[la]){if(O)try{b=b[qa]&&b}catch(d){b=i}}else if(c=="mouseover")b=a.fromElement;else if(c=="mouseout")b=a.toElement;this.relatedTarget=b;this.offsetX=a.offsetX!==ba?a.offsetX:a.layerX;this.offsetY=a.offsetY!==ba?a.offsetY:a.layerY;this.clientX=a.clientX!==ba?a.clientX:a.pageX;this.clientY=a.clientY!==ba?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;fa(this,a[t]||0);this.charCode= -a[ra]||(c=="keypress"?a[t]:0);this.ctrlKey=a.ctrlKey;this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.N=a;delete this.la;delete this.X};var gc=function(a,b){return N?a[w]=="click"?b==0:!!(a.N.button&fc[b]):a.N.button==b};ec[y].stopPropagation=function(){this.X=h;if(this.N[ya])this.N[ya]();else this.N.cancelBubble=h};var hc=N&&!Hb("8"); -ec[y].preventDefault=function(){this.la=k;var a=this.N;if(a[oa])a[oa]();else{a.returnValue=k;if(hc)try{if(a.ctrlKey||a[t]>=112&&a[t]<=123)fa(a,-1)}catch(b){}}};ec[y].g=function(){ec.d.g[B](this);this.N=i;ja(this,i);ea(this,i);this.relatedTarget=i};var Q=function(a,b){this.pb=b;this.ba=[];if(a>this.pb)e(l("[goog.structs.SimplePool] Initial cannot be greater than max"));for(b=0;b=0),j;uc=function(G){j=G};if(g){pc=function(){return ic(o)};qc=function(G){kc(o,G)};rc=function(){return ic(p)};sc=function(G){kc(p,G)};tc=function(){return ic(r)};vc=function(){kc(r,c())};wc=function(){return ic(v)};xc=function(G){kc(v,G)};yc=function(){return ic(H)};zc=function(G){kc(H, -G)};var o=new Q(0,600);o.K=a;var p=new Q(0,600);p.K=b;var r=new Q(0,600);r.K=c;var v=new Q(0,600);v.K=d;var H=new Q(0,600);H.K=f}else{pc=a;qc=Ha;rc=b;sc=Ha;tc=c;vc=Ha;wc=d;xc=Ha;yc=f;zc=Ha}})();var Ac={},R={},Bc={},Cc={},S=function(a,b,c,d,f){if(b)if(Ka(b)){for(var g=0;g=0;r--){var v=p[r];if((g||b==v[w])&&(j|| -c==v[pa])){T(v.key);d++}}});else{a=K(a);if(Bc[a]){a=Bc[a];for(f=a[x]-1;f>=0;f--){var o=a[f];if((g||b==o[w])&&(j||c==o[pa])){T(o.key);d++}}}}return d},Fc=function(a,b,c){var d=R;if(b in d){d=d[b];if(c in d){d=d[c];a=K(a);if(d[a])return d[a]}}return i},Dc=function(a){if(a in Cc)return Cc[a];return Cc[a]="on"+a},Jc=function(a,b,c,d,f){var g=1;b=K(b);if(a[b]){a.C--;a=a[b];if(a.Ka)a.Ka++;else a.Ka=1;try{for(var j=a[x],o=0;o=0&&g.C;H--){ea(o,r[H]);f&=Jc(g,r[H],c,h,o)}if(j){g=d[k];g.C=g.H;for(H=0;!o.X&&H=0&&g.C;j--){ea(a,c[j]);b&=Jc(g,c[j],a[w],h,a)&&a.la!=k}}if(k in f){g=f[k];g.C=g.H;if(d)for(j=0;!a.X&&j$c(this))e(l("Child component index out of bounds"));if(!this.w||!this.q){this.w={};this.q=[]}if(a.k==this){this.w[Vc(a)]=a;Va(this.q,a)}else bb(this.w,Vc(a),a);Yc(a,this);Za(this.q,b,0,a);if(a.e&&this.e&&a.k==this){c=this.O();c.insertBefore(a.c(),c.childNodes[b+1]||i)}else if(c){this.b||this.n();c=this.s(b+1);b=this.O();c=c?c.b:i;if(a.e)e(l("Component already rendered"));a.b||a.n();b?b.insertBefore(a.b,c||i):a.z.I[Aa][ka](a.b); -if(!a.k||a.k.e)a.J()}else this.e&&!a.e&&a.b&&a.J()};E.O=function(){return this.b};var ad=function(a){if(a.ua==i)a.ua="rtl"==(Oc(a.e?a.b:a.z.I[Aa],"direction")||((a.e?a.b:a.z.I[Aa]).currentStyle?(a.e?a.b:a.z.I[Aa]).currentStyle.direction:i)||(a.e?a.b:a.z.I[Aa])[za].direction);return a.ua};U[y].xa=function(a){if(this.e)e(l("Component already rendered"));this.ua=a};var $c=function(a){return a.q?a.q[x]:0};U[y].jb=function(a){return this.w&&a?cb(this.w,a)||i:i}; -U[y].s=function(a){return this.q?this.q[a]||i:i};var Zc=function(a,b,c){a.q&&Sa(a.q,b,c)},bd=function(a,b){return a.q&&b?Ra(a.q,b):-1};U[y].removeChild=function(a,b){if(a){var c=I(a)?a:Vc(a);a=this.jb(c);if(c&&a){ab(this.w,c);Va(this.q,a);if(b){a.aa();a.b&&Wb(a.b)}Yc(a,i)}}if(!a)e(l("Child is not in parent component"));return a};var cd,dd=function(a,b){if(O||cd){a[sa]("role",b);a.dc=b}},ed=function(a,b,c){if(O||cd)a[sa]("aria-"+b,c)};var gd=function(a,b,c,d,f){if(!N&&!(yb&&Hb("525")))return h;if(Ab&&f)return fd(a);if(f&&!d)return k;if(N&&!c&&(b==17||b==18))return k;if(N&&d&&b==a)return k;switch(a){case 13:return h;case 27:return!yb}return fd(a)},fd=function(a){if(a>=48&&a<=57)return h;if(a>=96&&a<=106)return h;if(a>=65&&a<=90)return h;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return h;default:return k}};var V=function(a){a&&hd(this,a)};L(V,Nc);E=V[y];E.b=i;E.Ia=i;E.Ya=i;E.Ja=i;E.ta=-1;E.ha=-1; -var id={"3":13,"12":144,"63232":38,"63233":40,"63234":37,"63235":39,"63236":112,"63237":113,"63238":114,"63239":115,"63240":116,"63241":117,"63242":118,"63243":119,"63244":120,"63245":121,"63246":122,"63247":123,"63248":44,"63272":46,"63273":36,"63275":35,"63276":33,"63277":34,"63289":144,"63302":45},jd={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},kd={61:187, -59:186},ld=N||yb&&Hb("525");V[y].Kb=function(a){if(ld&&!gd(a[t],this.ta,a.shiftKey,a.ctrlKey,a.altKey))this[ta](a);else this.ha=O&&a[t]in kd?kd[a[t]]:a[t]};V[y].Lb=function(){this.ha=this.ta=-1}; -V[y].handleEvent=function(a){var b=a.N,c,d;if(N&&a[w]=="keypress"){c=this.ha;d=c!=13&&c!=27?b[t]:0}else if(yb&&a[w]=="keypress"){c=this.ha;d=b[ra]>=0&&b[ra]<63232&&fd(c)?b[ra]:0}else if(xb){c=this.ha;d=fd(c)?b[t]:0}else{c=b[t]||this.ha;d=b[ra]||0;if(Ab&&d==63&&!c)c=191}var f=c,g=b.keyIdentifier;if(c)if(c>=63232&&c in id)f=id[c];else{if(c==25&&a.shiftKey)f=9}else if(g&&g in jd)f=jd[g];a=f==this.ta;this.ta=f;b=new md(f,d,a,b);try{this[s](b)}finally{b.M()}}; -var hd=function(a,b){a.Ja&&a.detach();a.b=b;a.Ia=S(a.b,"keypress",a);a.Ya=S(a.b,"keydown",a.Kb,k,a);a.Ja=S(a.b,"keyup",a.Lb,k,a)};V[y].detach=function(){if(this.Ia){T(this.Ia);T(this.Ya);T(this.Ja);this.Ja=this.Ya=this.Ia=i}this.b=i;this.ta=-1};V[y].g=function(){V.d.g[B](this);this.detach()};var md=function(a,b,c,d){d&&this.sa(d,void 0);ga(this,"key");fa(this,a);this.charCode=b;this.repeat=c};L(md,ec);var od=function(a){for(var b;a;){b=K(a);if(b=nd[b])break;a=a.d?a.d.constructor:i}if(b)return J(b.T)?b.T():new b;return i},qd=function(a,b){if(!a)e(l("Invalid class name "+a));if(!J(b))e(l("Invalid decorator function "+b));pd[a]=b},nd={},pd={};var rd=function(){},sd;Ia(rd);E=rd[y];E.ea=function(){};E.n=function(a){return a.Fa().n("div",this.na(a)[Da](" "),a.Ea)};E.O=function(a){return a};E.ma=function(a,b,c){if(a=a.c?a.c():a)if(N&&!Hb("7")){var d=td(this,Jb(a),b);d[m](b);Qa(c?Kb:Lb,a)[C](i,d)}else c?Kb(a,b):Lb(a,b)};E.Z=function(){return h}; -E.L=function(a,b){b.id&&Wc(a,b.id);var c=this.O(b);c&&c[u]?ud(a,c[u][ua]?Xa(c.childNodes):c[u]):ud(a,i);var d=0,f=this.p(),g=this.p(),j=k,o=k;c=k;var p=Jb(b);Sa(p,function(v){if(!j&&v==f){j=h;if(g==f)o=h}else if(!o&&v==g)o=h;else d|=vd(this,v)},this);a.m=d;if(!j){p[m](f);if(g==f)o=h}o||p[m](g);(a=a.B)&&p[m][C](p,a);if(N&&!Hb("7")){var r=td(this,p);if(r[x]>0){p[m][C](p,r);c=h}}if(!j||!o||a||c)ia(b,p[Da](" "));return b};E.Xa=function(a){ad(a)&&this.xa(a.c(),h);a.j()&&this.wa(a,a.P())}; -E.Ma=function(a,b){Rc(a,!b,!N&&!xb)};E.xa=function(a,b){this.ma(a,this.p()+"-rtl",b)};E.ca=function(a){var b;if(a.v&32&&(b=a.o()))return ac(b);return k};E.wa=function(a,b){var c;if(a.v&32&&(c=a.o())){if(!b&&a.m&32){try{c.blur()}catch(d){}a.m&32&&a.oa(i)}ac(c)!=b&&bc(c,b)}};E.ya=function(a,b){Pc(a,b)};E.G=function(a,b,c){var d=a.c();if(d){var f=wd(this,b);f&&this.ma(a,f,c);if(O){sd||(sd=gb(1,"disabled",4,"pressed",8,"selected",16,"checked",64,"expanded"));(a=sd[b])&&ed(d,a,c)}}};E.o=function(a){return a.c()}; -E.p=function(){return"goog-control"};E.na=function(a){var b=this.p(),c=[b],d=this.p();d!=b&&c[m](d);if(b=a.m){d=[];for(var f=1;b;f<<=1)if(b&f){d[m](wd(this,f));b&=~f}b=d}else b=i;b&&c[m][C](c,b);(a=a.B)&&c[m][C](c,a);N&&!Hb("7")&&c[m][C](c,td(this,c));return c}; -var td=function(a,b,c){var d=[];if(c)b=b.concat([c]);Sa([],function(f){if(Ta(f,Qa(Ua,b))&&(!c||Ua(f,c)))d[m](f[Da]("_"))});return d},wd=function(a,b){a.Da||xd(a);return a.Da[b]},vd=function(a,b){a.sb||yd(a);a=ca(a.sb[b],10);return isNaN(a)?0:a},xd=function(a){var b=a.p();a.Da=gb(1,b+"-disabled",2,b+"-hover",4,b+"-active",8,b+"-selected",16,b+"-checked",32,b+"-focused",64,b+"-open")},yd=function(a){a.Da||xd(a);a.sb=db(a.Da)};var W=function(a,b,c){U[B](this,c);this.a=b||od(this.constructor);this.Ea=a};L(W,U);E=W[y];E.Ea=i;E.m=0;E.v=39;E.zb=255;E.Na=0;E.r=h;E.B=i;E.ra=h;E.Ba=k;E.o=function(){return this.a.o(this)};E.Ga=function(){return this.t||(this.t=new V)};E.kb=function(){return this.a};var zd=function(a,b){if(b){if(a.B)Ua(a.B,b)||a.B[m](b);else a.B=[b];a.a.ma(a,b,h)}},Ad=function(a,b){if(b&&a.B){Va(a.B,b);if(a.B[x]==0)a.B=i;a.a.ma(a,b,k)}};E=W[y];E.ma=function(a,b){b?zd(this,a):Ad(this,a)}; -E.n=function(){var a=this.a.n(this);this.b=a;if(O){var b=this.a.ea();b&&dd(a,b)}this.Ba||this.a.Ma(a,k);this.P()||this.a.ya(a,k)};E.O=function(){return this.a.O(this.c())};E.Z=function(a){return this.a.Z(a)};E.Pa=function(a){this.b=a=this.a.L(this,a);if(O){var b=this.a.ea();b&&dd(a,b)}this.Ba||this.a.Ma(a,k);this.r=a[za].display!="none"}; -E.J=function(){W.d.J[B](this);this.a.Xa(this);if(this.v&-2){this.ra&&Bd(this,h);if(this.v&32){var a=this.o();if(a){var b=this.Ga();hd(b,a);Xc(this).f(b,"key",this.U).f(a,"focus",this.pa).f(a,"blur",this.oa)}}}};var Bd=function(a,b){var c=Xc(a),d=a.c();if(b){c.f(d,"mouseover",a.Va).f(d,"mousedown",a.qa).f(d,"mouseup",a.Wa).f(d,"mouseout",a.Ua);N&&c.f(d,"dblclick",a.lb)}else{c.Q(d,"mouseover",a.Va).Q(d,"mousedown",a.qa).Q(d,"mouseup",a.Wa).Q(d,"mouseout",a.Ua);N&&c.Q(d,"dblclick",a.lb)}}; -W[y].aa=function(){W.d.aa[B](this);this.t&&this.t.detach();this.P()&&this.j()&&this.a.wa(this,k)};W[y].g=function(){W.d.g[B](this);if(this.t){this.t.M();delete this.t}delete this.a;this.B=this.Ea=i};var ud=function(a,b){a.Ea=b};E=W[y];E.xa=function(a){W.d.xa[B](this,a);var b=this.c();b&&this.a.xa(b,a)};E.Ma=function(a){this.Ba=a;var b=this.c();b&&this.a.Ma(b,a)};E.P=function(){return this.r}; -E.ya=function(a,b){if(b||this.r!=a&&this[s](a?"show":"hide")){(b=this.c())&&this.a.ya(b,a);this.j()&&this.a.wa(this,a);this.r=a;return h}return k};E.j=function(){return!!!(this.m&1)};E.va=function(a){var b=this.k;if(!(b&&typeof b.j=="function"&&!b.j())&&Cd(this,1,!a)){if(!a){this[va](k);this.F(k)}this.P()&&this.a.wa(this,a);this.G(1,!a)}};E.F=function(a){Cd(this,2,a)&&this.G(2,a)};E.setActive=function(a){Cd(this,4,a)&&this.G(4,a)}; -var Dd=function(a,b){Cd(a,8,b)&&a.G(8,b)},Ed=function(a,b){Cd(a,16,b)&&a.G(16,b)},Fd=function(a,b){Cd(a,32,b)&&a.G(32,b)},Gd=function(a,b){Cd(a,64,b)&&a.G(64,b)};W[y].G=function(a,b){if(this.v&a&&b!=!!(this.m&a)){this.a.G(this,a,b);this.m=b?this.m|a:this.m&~a}}; -var Hd=function(a,b,c){if(a.e&&a.m&b&&!c)e(l("Component already rendered"));!c&&a.m&b&&a.G(b,k);a.v=c?a.v|b:a.v&~b},X=function(a,b){return!!(a.zb&b)&&!!(a.v&b)},Cd=function(a,b,c){return!!(a.v&b)&&!!(a.m&b)!=c&&(!(a.Na&b)||a[s](Uc(b,c)))&&!a.Ub()};E=W[y];E.Va=function(a){a[la]&&!Xb(this.c(),a[la])&&this[s]("enter")&&this.j()&&X(this,2)&&this.F(h)};E.Ua=function(a){if(a[la]&&!Xb(this.c(),a[la])&&this[s]("leave")){X(this,4)&&this[va](k);X(this,2)&&this.F(k)}}; -E.qa=function(a){if(this.j()){X(this,2)&&this.F(h);if(gc(a,0)){X(this,4)&&this[va](h);this.a.ca(this)&&this.o().focus()}}!this.Ba&&gc(a,0)&&a[oa]()};E.Wa=function(a){if(this.j()){X(this,2)&&this.F(h);this.m&4&&Id(this,a)&&X(this,4)&&this[va](k)}};E.lb=function(a){this.j()&&Id(this,a)};var Id=function(a,b){X(a,16)&&Ed(a,!!!(a.m&16));X(a,8)&&Dd(a,h);X(a,64)&&Gd(a,!!!(a.m&64));var c=new P("action",a);if(b)for(var d=["altKey","ctrlKey","metaKey","shiftKey"],f,g=0;f=d[g];g++)c[f]=b[f];return a[s](c)}; -W[y].pa=function(){X(this,32)&&Fd(this,h)};W[y].oa=function(){X(this,4)&&this[va](k);X(this,32)&&Fd(this,k)};W[y].U=function(a){if(this.P()&&this.j()&&this.Ta(a)){a[oa]();a[ya]();return h}return k};W[y].Ta=function(a){return a[t]==13&&Id(this,a)};if(!J(W))e(l("Invalid component class "+W));if(!J(rd))e(l("Invalid renderer class "+rd));var Jd=K(W);nd[Jd]=rd;qd("goog-control",function(){return new W(i)});var Kd=function(){};L(Kd,rd);Ia(Kd);Kd[y].n=function(a){return a.Fa().n("div",this.p())};Kd[y].L=function(a,b){if(b.tagName=="HR"){var c=b;b=this.n(a);c[D]&&c[D].insertBefore(b,c);Wb(c)}else Kb(b,this.p());return b};Kd[y].p=function(){return"goog-menuseparator"};var Ld=function(a,b){W[B](this,i,a||Kd.T(),b);Hd(this,1,k);Hd(this,2,k);Hd(this,4,k);Hd(this,32,k);this.m=1};L(Ld,W);Ld[y].J=function(){Ld.d.J[B](this);dd(this.c(),"separator")};qd("goog-menuseparator",function(){return new Ld});var Md=function(){};Ia(Md);Md[y].ea=function(){};var Nd=function(a,b,c){if(b)b.tabIndex=c?0:-1};E=Md[y];E.n=function(a){return a.Fa().n("div",this.na(a)[Da](" "))};E.O=function(a){return a};E.Z=function(a){return a.tagName=="DIV"};E.L=function(a,b){b.id&&Wc(a,b.id);var c=this.p(),d=k,f=Jb(b);f&&Sa(f,function(g){if(g==c)d=h;else g&&this.bb(a,g,c)},this);d||Kb(b,c);Od(this,a,b);return b}; -E.bb=function(a,b,c){if(b==c+"-disabled")a.va(k);else if(b==c+"-horizontal")Pd(a,"horizontal");else b==c+"-vertical"&&Pd(a,"vertical")};var Od=function(a,b,c){if(c){a=c[u];for(var d;a&&a[D]==c;){d=a[ua];if(a[na]==1){var f;a:{f=void 0;for(var g=Jb(a),j=0,o=g[x];j-1&&b!=this.l){var c=this.s(this.l);c&&c.F(k);this.l=b;c=this.s(this.l);this.ja&&c[va](h);if(this.h&&c!=this.h)c.v&64?Gd(c,h):Gd(this.h,k)}ed(this.c(),"activedescendant",a[A].c().id)};E.Rb=function(a){if(a[A]==this.s(this.l))this.l=-1;ed(this.c(),"activedescendant","")};E.Mb=function(a){if((a=a[A])&&a!=this.h&&a.k==this){this.h&&Gd(this.h,k);this.h=a}};E.Gb=function(a){if(a[A]==this.h)this.h=i}; -E.qa=function(a){this.$&&Td(this,h);var b=this.o(),c;a:{if(b)if((c=b.getAttributeNode("tabindex"))&&c.specified){c=b.tabIndex;c=typeof c=="number"&&c>=0;break a}c=k}c?b.focus():a[oa]()};E.Hb=function(){this.ja=k};E.Fb=function(a){var b;a:{b=a[A];if(this.R)for(var c=this.c();b&&b[D]&&b!=c;){var d=b.id;if(d in this.R){b=this.R[d];break a}b=b[D]}b=i}if(b)switch(a[w]){case "mousedown":b.qa(a);break;case "mouseup":b.Wa(a);break;case "mouseover":b.Va(a);break;case "mouseout":b.Ua(a);break}};E.pa=function(){}; -E.oa=function(){Sd(this,-1);this.ja=k;this.h&&Gd(this.h,k)};E.U=function(a){if(this.j()&&$c(this)!=0&&this.Ta(a)){a[oa]();a[ya]();return h}return k}; -E.Ta=function(a){var b=this.s(this.l);if(b&&typeof b.U=="function"&&b.U(a))return h;if(this.h&&this.h!=b&&typeof this.h.U=="function"&&this.h.U(a))return h;switch(a[t]){case 27:if(this.ca())this.o().blur();else return k;break;case 36:Vd(this);break;case 35:Wd(this);break;case 38:if(this.W=="vertical")Xd(this);else return k;break;case 37:if(this.W=="horizontal")ad(this)?Yd(this):Xd(this);else return k;break;case 40:if(this.W=="vertical")Yd(this);else return k;break;case 39:if(this.W=="horizontal")ad(this)? -Xd(this):Yd(this);else return k;break;default:return k}return h};var Qd=function(a,b){var c=b.c();c=c.id||(c.id=Vc(b));if(!a.R)a.R={};a.R[c]=b};Y[y].Aa=function(a,b){Y.d.Aa[B](this,a,b)};Y[y].Oa=function(a,b,c){a.Na|=2;a.Na|=64;if(this.ca()||!this.xb)Hd(a,32,k);a.e&&k!=a.ra&&Bd(a,k);a.ra=k;Y.d.Oa[B](this,a,b,c);c&&this.e&&Qd(this,a);b<=this.l&&this.l++}; -Y[y].removeChild=function(a,b){var c=bd(this,a);if(c!=-1)if(c==this.l)a.F(k);else c-1&&a.s(a.l).F(k)}; -Y[y].F=function(a){Sd(this,bd(this,a))};var Vd=function(a){Zd(a,function(b,c){return(b+1)%c},$c(a)-1)},Wd=function(a){Zd(a,function(b,c){b--;return b<0?c-1:b},0)},Yd=function(a){Zd(a,function(b,c){return(b+1)%c},a.l)},Xd=function(a){Zd(a,function(b,c){b--;return b<0?c-1:b},a.l)},Zd=function(a,b,c){c=c<0?bd(a,a.h):c;var d=$c(a);c=b(c,d);for(var f=0;f<=d;){var g=a.s(c);if(g&&g.P()&&g.j()&&g.v&2){a.$a(c);return h}f++;c=b(c,d)}return k};Y[y].$a=function(a){Sd(this,a)};var Td=function(a,b){a.ja=b};var $d=function(){};L($d,rd);Ia($d);E=$d[y];E.p=function(){return"goog-tab"};E.ea=function(){return"tab"};E.n=function(a){var b=$d.d.n[B](this,a);(a=a.Sa())&&this.cb(b,a);return b};E.L=function(a,b){b=$d.d.L[B](this,a,b);var c=this.Sa(b);c&&be(a,c);if(a.m&8)if((c=a.k)&&J(c.da)){a.G(8,k);c.da(a)}return b};E.Sa=function(a){return a.title||""};E.cb=function(a,b){if(a)a.title=b||""};var ce=function(a,b,c){W[B](this,a,b||$d.T(),c);Hd(this,8,h);this.Na|=9};L(ce,W);ce[y].Sa=function(){return this.ub};ce[y].cb=function(a){this.kb().cb(this.c(),a);this.ub=a};var be=function(a,b){a.ub=b};qd("goog-tab",function(){return new ce(i)});var de=function(){};L(de,Md);Ia(de);de[y].p=function(){return"goog-tab-bar"};de[y].ea=function(){return"tablist"};de[y].bb=function(a,b,c){this.ob||ee(this);var d=this.ob[b];d?fe(a,d):de.d.bb[B](this,a,b,c)};de[y].na=function(a){var b=de.d.na[B](this,a);this.Ca||ge(this);b[m](this.Ca[a.Wb]);return b};var ge=function(a){var b=a.p();a.Ca=gb("top",b+"-top","bottom",b+"-bottom","start",b+"-start","end",b+"-end")},ee=function(a){a.Ca||ge(a);a.ob=db(a.Ca)};var Z=function(a,b,c){fe(this,a||"top");Y[B](this,this.W,b||de.T(),c);a=Xc(this);a.f(this,"select",this.Pb);a.f(this,"unselect",this.Qb);a.f(this,"disable",this.Nb);a.f(this,"hide",this.Ob)};L(Z,Y);Z[y].yb=h;Z[y].D=i;Z[y].g=function(){Z.d.g[B](this);this.D=i};Z[y].removeChild=function(a,b){he(this,a);return Z.d[Ba][B](this,a,b)};var fe=function(a,b){Pd(a,b=="start"||b=="end"?"vertical":"horizontal");a.Wb=b};Z[y].$a=function(a){Z.d.$a[B](this,a);this.yb&&ie(this,a)}; -Z[y].da=function(a){if(a)Dd(a,h);else this.D&&Dd(this.D,k)};var ie=function(a,b){a.da(a.s(b))},he=function(a,b){if(b&&b==a.D){for(var c=bd(a,b),d=c-1;b=a.s(d);d--)if(b.P()&&b.j()){a.da(b);return}for(c=c+1;b=a.s(c);c++)if(b.P()&&b.j()){a.da(b);return}a.da(i)}};E=Z[y];E.Pb=function(a){this.D&&this.D!=a[A]&&Dd(this.D,k);this.D=a[A]};E.Qb=function(a){if(a[A]==this.D)this.D=i};E.Nb=function(a){he(this,a[A])};E.Ob=function(a){he(this,a[A])};E.pa=function(){this.s(this.l)||this.F(this.D||this.s(0))}; -qd("goog-tab-bar",function(){return new Z});var $=function(a,b,c){this.A=Pb(a)||i;this.ib=b?Pb(b):i;this.i=c==h;if(this.A){this.A.tabIndex=0;S(this.A,"click",this.Zb,k,this);S(this.A,"keydown",this.$b,k,this)}this.Y(this.i)};L($,Nc);$[y].g=function(){this.A&&Hc(this.A);$.d.g[B](this)};$[y].Y=function(a){if(this.ib)this.ib[za].display=a?"":"none";if(this.A)if(a){Lb(this.A,"goog-zippy-collapsed");Kb(this.A,"goog-zippy-expanded")}else{Lb(this.A,"goog-zippy-expanded");Kb(this.A,"goog-zippy-collapsed")}this.i=a;this[s](new je("toggle",this,this.i))}; -$[y].$b=function(a){if(a[t]==13||a[t]==32){this.Y(!this.i);a[oa]();a[ya]()}};$[y].Zb=function(){this.Y(!this.i)};var je=function(a,b,c){P[B](this,a,b);this.cc=c};L(je,P);var le=function(a,b){this.db=[];a=Pb(a);a=Qb(da,"span","ae-zippy",a);for(var c=0,d;d=a[c];c++){for(var f=d[D][D][D][ua];f&&f[na]!=1;)f=f[ua];this.db[m](new $(d,f,k))}this.Cb=new ke(this.db,Pb(b))};le[y].Db=function(){return this.Cb};le[y].Eb=function(){return this.db}; -var ke=function(a,b){this.za=a;if(this.za[x]){a=0;for(var c;c=this.za[a];a++)S(c,"toggle",this.bc,k,this)}this.Za=0;this.i=k;a="ae-toggle ae-plus ae-action";this.za[x]||(a+=" ae-disabled");this.S=Ub("span",{className:a},"Expand All");S(this.S,"click",this.Ab,k,this);b[ka](this.S)};ke[y].Ab=function(){this.za[x]&&this.Y(!this.i)};ke[y].bc=function(a){a=a.currentTarget;if(a.i)this.Za+=1;else this.Za-=1;if(a.i!=this.i)if(a.i){this.i=h;me(this,h)}else if(this.Za==0){this.i=k;me(this,k)}}; -ke[y].Y=function(a){this.i=a;a=0;for(var b;b=this.za[a];a++)b.i!=this.i&&b.Y(this.i);me(this)}; -var me=function(a,b){if(b!==ba?b:a.i){Lb(a.S,"ae-plus");Kb(a.S,"ae-minus");Yb(a.S,"Collapse All")}else{Lb(a.S,"ae-minus");Kb(a.S,"ae-plus");Yb(a.S,"Expand All")}},ne=function(a){this.ac=a;this.tb={};var b,c=Ub("div",{},b=Ub("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),Ub("div",{className:"goog-tab-bar-clear"}),a=Ub("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Z;d.L(b);S(d,"select",this.fb,k,this);S(d,"unselect",this.fb,k,this);b= -0;for(var f;f=this.ac[b];b++)if(f=Pb("ae-stats-details-"+f)){var g=Qb(da,"h2",i,f)[0],j;j=void 0;if(N&&"innerText"in g)j=g.innerText[n](/(\r\n|\r|\n)/g,"\n");else{j=[];cc(g,j,h);j=j[Da]("")}j=j[n](/\xAD/g,"");j=j[n](/ +/g," ");if(j!=" ")j=j[n](/^\s*/,"");j=j;Wb(g);g=new ce(j);this.tb[K(g)]=f;d.Aa(g,h);a[ka](f);b==0?d.da(g):Pc(f,k)}Pb("bd")[ka](c)};ne[y].fb=function(a){var b=this.tb[K(a[A])];Pc(b,a[w]=="select")};Fa("ae.Stats.Details.Tabs",ne,void 0);Fa("goog.ui.Zippy",$,void 0);$[y].setExpanded=$[y].Y; -Fa("ae.Stats.MakeZippys",le,void 0);le[y].getExpandCollapse=le[y].Db;le[y].getZippys=le[y].Eb;ke[y].setExpanded=ke[y].Y;})(); +/* Copyright 2008-10 Google Inc. All Rights Reserved. */ (function(){function f(a){throw a;}var h=true,i=null,k=false,aa=Object,l=Error,ba=undefined,ca=parseInt,da=document,ea=Math;function fa(a,b){return a.currentTarget=b}function ga(a,b){return a.keyCode=b}function ha(a,b){return a.type=b}function ia(a,b){return a.length=b}function ja(a,b){return a.className=b}function ka(a,b){return a.target=b} +var la="appendChild",m="push",ma="slice",n="replace",na="nodeType",oa="preventDefault",p="indexOf",r="dispatchEvent",pa="capture",qa="nodeName",t="write",ra="charCode",u="keyCode",v="firstChild",sa="setAttribute",ta="handleEvent",w="type",ua="nextSibling",va="setActive",wa="toString",y="length",xa="propertyIsEnumerable",z="prototype",ya="ctrlKey",A="split",za="stopPropagation",Aa="style",Ba="body",Ca="removeChild",B="target",C="call",D="apply",Da="navigator",E="parentNode",Ea="join",Fa="nodeValue", +F,G=this,Ga=function(a,b,c){a=a[A](".");c=c||G;!(a[0]in c)&&c.execScript&&c.execScript("var "+a[0]);for(var d;a[y]&&(d=a.shift());)if(!a[y]&&b!==ba)c[d]=b;else c=c[d]?c[d]:(c[d]={})},Ha=function(a,b){a=a[A](".");b=b||G;for(var c;c=a.shift();)if(b[c])b=b[c];else return i;return b},Ia=function(){},Ja=function(a){a.R=function(){return a.bc||(a.bc=new a)}},Ka=function(a){var b=typeof a;if(b=="object")if(a){if(a instanceof Array||!(a instanceof aa)&&aa[z][wa][C](a)=="[object Array]"||typeof a[y]=="number"&& +typeof a.splice!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("splice"))return"array";if(!(a instanceof aa)&&(aa[z][wa][C](a)=="[object Function]"||typeof a[C]!="undefined"&&typeof a[xa]!="undefined"&&!a[xa]("call")))return"function"}else return"null";else if(b=="function"&&typeof a[C]=="undefined")return"object";return b},La=function(a){return Ka(a)=="array"},Ma=function(a){var b=Ka(a);return b=="array"||b=="object"&&typeof a[y]=="number"},H=function(a){return typeof a=="string"},K=function(a){return Ka(a)== +"function"},Na=function(a){a=Ka(a);return a=="object"||a=="array"||a=="function"},Qa=function(a){if(a.hasOwnProperty&&a.hasOwnProperty(Oa))return a[Oa];a[Oa]||(a[Oa]=++Pa);return a[Oa]},Oa="closure_uid_"+ea.floor(ea.random()*2147483648)[wa](36),Pa=0,Ra=function(a){var b=Ka(a);if(b=="object"||b=="array"){if(a.Jb)return a.Jb[C](a);b=b=="array"?[]:{};for(var c in a)b[c]=Ra(a[c]);return b}return a},Sa=function(a){var b=Array[z][ma][C](arguments,1);return function(){var c=Array[z][ma][C](arguments);c.unshift[D](c, +b);return a[D](this,c)}},L=function(a,b){function c(){}c.prototype=b[z];a.d=b[z];a.prototype=new c;a[z].constructor=a};var M=Array[z],Ta=M[p]?function(a,b,c){return M[p][C](a,b,c)}:function(a,b,c){c=c==i?0:c<0?ea.max(0,a[y]+c):c;if(H(a)){if(!H(b)||b[y]!=1)return-1;return a[p](b,c)}for(c=c;c=0},Xa=function(a,b){b=Ta(a,b);var c;if(c=b>=0)M.splice[C](a,b,1)[y]==1;return c},Ya=function(){return M.concat[D](M,arguments)},Za=function(a){if(La(a))return Ya(a);else{for(var b=[],c=0,d=a[y];c")!=-1)a=a[n](mb,">");if(a[p]('"')!=-1)a=a[n](nb,""");return a}},kb=/&/g,lb=//g,nb=/\"/g,ob=/[&<>\"]/,rb=function(a,b){var c=0;a=jb(String(a))[A](".");b=jb(String(b))[A](".");for(var d=ea.max(a[y],b[y]),e=0;c==0&&eb)return 1;return 0};var sb,tb,ub,vb,wb=function(){return G[Da]?G[Da].userAgent:i};vb=ub=tb=sb=k;var xb;if(xb=wb()){var yb=G[Da];sb=xb[p]("Opera")==0;tb=!sb&&xb[p]("MSIE")!=-1;ub=!sb&&xb[p]("WebKit")!=-1;vb=!sb&&!ub&&yb.product=="Gecko"}var zb=sb,N=tb,O=vb,Ab=ub,Bb=G[Da],Cb=(Bb&&Bb.platform||"")[p]("Mac")!=-1,Db="",Eb; +if(zb&&G.opera){var Fb=G.opera.version;Db=typeof Fb=="function"?Fb():Fb}else{if(O)Eb=/rv\:([^\);]+)(\)|;)/;else if(N)Eb=/MSIE\s+([^\);]+)(\)|;)/;else if(Ab)Eb=/WebKit\/(\S+)/;if(Eb){var Gb=Eb.exec(wb());Db=Gb?Gb[1]:""}}var Hb=Db,Ib={},Jb=function(a){return Ib[a]||(Ib[a]=rb(Hb,a)>=0)};var Kb,Lb=function(a){return(a=a.className)&&typeof a[A]=="function"?a[A](/\s+/):[]},Mb=function(a){var b=Lb(a),c;c=$a(arguments,1);for(var d=0,e=0;e");c=c[Ea]("")}var e=a.createElement(c);if(d)if(H(d))ja(e,d);else Ub(e,d);if(b[y]>2){d=function(j){if(j)e[la](H(j)?a.createTextNode(j):j)};for(c=2;c0)?Ua(Xb(g)?Za(g):g,d):d(g)}}return e},Yb=function(a){return a&&a[E]?a[E][Ca](a):i},Zb=function(a,b){if(a.contains&&b[na]==1)return a==b|| +a.contains(b);if(typeof a.compareDocumentPosition!="undefined")return a==b||Boolean(a.compareDocumentPosition(b)&16);for(;b&&a!=b;)b=b[E];return b==a},Pb=function(a){return a[na]==9?a:a.ownerDocument||a.document},$b=function(a,b){if("textContent"in a)a.textContent=b;else if(a[v]&&a[v][na]==3){for(;a.lastChild!=a[v];)a[Ca](a.lastChild);a[v].data=b}else{for(var c;c=a[v];)a[Ca](c);a[la](Pb(a).createTextNode(b))}},ac={SCRIPT:1,STYLE:1,HEAD:1,IFRAME:1,OBJECT:1},bc={IMG:" ",BR:"\n"},cc=function(a){var b= +a.getAttributeNode("tabindex");if(b&&b.specified){a=a.tabIndex;return typeof a=="number"&&a>=0}return k},dc=function(a,b){if(b)a.tabIndex=0;else a.removeAttribute("tabIndex")},ec=function(a,b,c){if(!(a[qa]in ac))if(a[na]==3)c?b[m](String(a[Fa])[n](/(\r\n|\r|\n)/g,"")):b[m](a[Fa]);else if(a[qa]in bc)b[m](bc[a[qa]]);else for(a=a[v];a;){ec(a,b,c);a=a[ua]}},Xb=function(a){if(a&&typeof a[y]=="number")if(Na(a))return typeof a.item=="function"||typeof a.item=="string";else if(K(a))return typeof a.item== +"function";return k},Ob=function(a){this.G=a||G.document||da};F=Ob[z];F.Fa=Qb;F.c=function(a){return H(a)?this.G.getElementById(a):a};F.m=function(){return Vb(this.G,arguments)};F.createElement=function(a){return this.G.createElement(a)};F.createTextNode=function(a){return this.G.createTextNode(a)};F.appendChild=function(a,b){a[la](b)};F.contains=Zb;var fc=function(){};fc[z].Sa=k;fc[z].K=function(){if(!this.Sa){this.Sa=h;this.f()}};fc[z].f=function(){};var gc=function(a,b){ha(this,a);ka(this,b);fa(this,this[B])};L(gc,fc);F=gc[z];F.f=function(){delete this[w];delete this[B];delete this.currentTarget};F.V=k;F.ka=h;F.stopPropagation=function(){this.V=h};F.preventDefault=function(){this.ka=k};var hc=function(a,b){a&&this.sa(a,b)};L(hc,gc);var ic=[1,4,2];F=hc[z];ka(F,i);F.relatedTarget=i;F.offsetX=0;F.offsetY=0;F.clientX=0;F.clientY=0;F.screenX=0;F.screenY=0;F.button=0;ga(F,0);F.charCode=0;F.ctrlKey=k;F.altKey=k;F.shiftKey=k;F.metaKey=k;F.ic=k;F.M=i; +F.sa=function(a,b){var c=ha(this,a[w]);ka(this,a[B]||a.srcElement);fa(this,b);if(b=a.relatedTarget){if(O)try{b=b[qa]&&b}catch(d){b=i}}else if(c=="mouseover")b=a.fromElement;else if(c=="mouseout")b=a.toElement;this.relatedTarget=b;this.offsetX=a.offsetX!==ba?a.offsetX:a.layerX;this.offsetY=a.offsetY!==ba?a.offsetY:a.layerY;this.clientX=a.clientX!==ba?a.clientX:a.pageX;this.clientY=a.clientY!==ba?a.clientY:a.pageY;this.screenX=a.screenX||0;this.screenY=a.screenY||0;this.button=a.button;ga(this,a[u]|| +0);this.charCode=a[ra]||(c=="keypress"?a[u]:0);this.ctrlKey=a[ya];this.altKey=a.altKey;this.shiftKey=a.shiftKey;this.metaKey=a.metaKey;this.ic=Cb?a.metaKey:a[ya];this.M=a;delete this.ka;delete this.V};var jc=function(a,b){return N?a[w]=="click"?b==0:!!(a.M.button&ic[b]):a.M.button==b};hc[z].stopPropagation=function(){this.V=h;if(this.M[za])this.M[za]();else this.M.cancelBubble=h};var kc=N&&!Jb("8"); +hc[z].preventDefault=function(){this.ka=k;var a=this.M;if(a[oa])a[oa]();else{a.returnValue=k;if(kc)try{if(a[ya]||a[u]>=112&&a[u]<=123)ga(a,-1)}catch(b){}}};hc[z].f=function(){hc.d.f[C](this);this.M=i;ka(this,i);fa(this,i);this.relatedTarget=i};var P=function(a,b){this.wb=b;this.$=[];if(a>this.wb)f(l("[goog.structs.SimplePool] Initial cannot be greater than max"));for(b=0;b=0),j;xc=function(I){j=I};if(g){sc=function(){return lc(o)};tc=function(I){nc(o,I)};uc=function(){return lc(q)};vc=function(I){nc(q,I)};wc=function(){return lc(s)};yc=function(){nc(s,c())};zc=function(){return lc(x)};Ac=function(I){nc(x,I)};Bc=function(){return lc(J)};Cc=function(I){nc(J, +I)};var o=new P(0,600);o.I=a;var q=new P(0,600);q.I=b;var s=new P(0,600);s.I=c;var x=new P(0,600);x.I=d;var J=new P(0,600);J.I=e}else{sc=a;tc=Ia;uc=b;vc=Ia;wc=c;yc=Ia;zc=d;Ac=Ia;Bc=e;Cc=Ia}})();var Dc={},Q={},Ec={},Fc={},R=function(a,b,c,d,e){if(b)if(La(b)){for(var g=0;g=0;s--){var x=q[s];if((g||b==x[w])&& +(j||c==x[pa])){Jc(x.key);d++}}});else{a=Qa(a);if(Ec[a]){a=Ec[a];for(e=a[y]-1;e>=0;e--){var o=a[e];if((g||b==o[w])&&(j||c==o[pa])){Jc(o.key);d++}}}}return d},Ic=function(a,b,c){var d=Q;if(b in d){d=d[b];if(c in d){d=d[c];a=Qa(a);if(d[a])return d[a]}}return i},Gc=function(a){if(a in Fc)return Fc[a];return Fc[a]="on"+a},Nc=function(a,b,c,d,e){var g=1;b=Qa(b);if(a[b]){a.A--;a=a[b];if(a.Ka)a.Ka++;else a.Ka=1;try{for(var j=a[y],o=0;o=0&&g.A;J--){fa(o,s[J]);e&=Nc(g,s[J],c,h,o)}if(j){g=d[k];g.A=g.F;for(J=0;!o.V&&J=0&&g.A;j--){fa(a,c[j]);b&=Nc(g,c[j],a[w],h,a)&&a.ka!=k}}if(k in e){g=e[k];g.A=g.F;if(d)for(j=0;!a.V&&jed(this))f(l("Child component index out of bounds"));if(!this.r||!this.p){this.r={};this.p=[]}if(a.h==this){this.r[$c(a)]=a;Xa(this.p,a)}else db(this.r,$c(a),a);cd(a,this);ab(this.p,b,0,a);if(a.e&&this.e&&a.h==this){c=this.N();c.insertBefore(a.c(),c.childNodes[b]||i)}else if(c){this.b||this.m();c=U(this,b+1);b=this.N();c=c?c.b:i;if(a.e)f(l("Component already rendered"));a.b||a.m();b?b.insertBefore(a.b,c||i):a.w.G[Ba][la](a.b); +if(!a.h||a.h.e)a.H()}else this.e&&!a.e&&a.b&&a.H()};F.N=function(){return this.b};var fd=function(a){if(a.ua==i)a.ua="rtl"==(Tc(a.e?a.b:a.w.G[Ba],"direction")||((a.e?a.b:a.w.G[Ba]).currentStyle?(a.e?a.b:a.w.G[Ba]).currentStyle.direction:i)||(a.e?a.b:a.w.G[Ba])[Aa].direction);return a.ua};T[z].xa=function(a){if(this.e)f(l("Component already rendered"));this.ua=a}; +var ed=function(a){return a.p?a.p[y]:0},U=function(a,b){return a.p?a.p[b]||i:i},dd=function(a,b,c){a.p&&Ua(a.p,b,c)},gd=function(a,b){return a.p&&b?Ta(a.p,b):-1};T[z].removeChild=function(a,b){if(a){var c=H(a)?a:$c(a);a=this.r&&c?eb(this.r,c)||i:i;if(c&&a){cb(this.r,c);Xa(this.p,a);if(b){a.Z();a.b&&Yb(a.b)}cd(a,i)}}if(!a)f(l("Child is not in parent component"));return a};var hd=function(a,b){if(O){a[sa]("role",b);a.mc=b}},id=function(a,b,c){O&&a[sa]("aria-"+b,c)};var kd=function(a,b,c,d,e){if(!N&&!(Ab&&Jb("525")))return h;if(Cb&&e)return jd(a);if(e&&!d)return k;if(N&&!c&&(b==17||b==18))return k;if(N&&d&&b==a)return k;switch(a){case 13:return h;case 27:return!Ab}return jd(a)},jd=function(a){if(a>=48&&a<=57)return h;if(a>=96&&a<=106)return h;if(a>=65&&a<=90)return h;switch(a){case 32:case 63:case 107:case 109:case 110:case 111:case 186:case 189:case 187:case 188:case 190:case 191:case 192:case 222:case 219:case 220:case 221:return h;default:return k}};var V=function(a){a&&ld(this,a)};L(V,Sc);F=V[z];F.b=i;F.Ia=i;F.cb=i;F.Ja=i;F.ta=-1;F.fa=-1; +var md={"3":13,"12":144,"63232":38,"63233":40,"63234":37,"63235":39,"63236":112,"63237":113,"63238":114,"63239":115,"63240":116,"63241":117,"63242":118,"63243":119,"63244":120,"63245":121,"63246":122,"63247":123,"63248":44,"63272":46,"63273":36,"63275":35,"63276":33,"63277":34,"63289":144,"63302":45},nd={Up:38,Down:40,Left:37,Right:39,Enter:13,F1:112,F2:113,F3:114,F4:115,F5:116,F6:117,F7:118,F8:119,F9:120,F10:121,F11:122,F12:123,"U+007F":46,Home:36,End:35,PageUp:33,PageDown:34,Insert:45},od={61:187, +59:186},pd=N||Ab&&Jb("525");V[z].Tb=function(a){if(pd&&!kd(a[u],this.ta,a.shiftKey,a[ya],a.altKey))this[ta](a);else this.fa=O&&a[u]in od?od[a[u]]:a[u]};V[z].Ub=function(){this.fa=this.ta=-1}; +V[z].handleEvent=function(a){var b=a.M,c,d;if(N&&a[w]=="keypress"){c=this.fa;d=c!=13&&c!=27?b[u]:0}else if(Ab&&a[w]=="keypress"){c=this.fa;d=b[ra]>=0&&b[ra]<63232&&jd(c)?b[ra]:0}else if(zb){c=this.fa;d=jd(c)?b[u]:0}else{c=b[u]||this.fa;d=b[ra]||0;if(Cb&&d==63&&!c)c=191}var e=c,g=b.keyIdentifier;if(c)if(c>=63232&&c in md)e=md[c];else{if(c==25&&a.shiftKey)e=9}else if(g&&g in nd)e=nd[g];a=e==this.ta;this.ta=e;b=new qd(e,d,a,b);try{this[r](b)}finally{b.K()}};V[z].c=function(){return this.b}; +var ld=function(a,b){a.Ja&&a.detach();a.b=b;a.Ia=R(a.b,"keypress",a);a.cb=R(a.b,"keydown",a.Tb,k,a);a.Ja=R(a.b,"keyup",a.Ub,k,a)};V[z].detach=function(){if(this.Ia){Jc(this.Ia);Jc(this.cb);Jc(this.Ja);this.Ja=this.cb=this.Ia=i}this.b=i;this.ta=-1};V[z].f=function(){V.d.f[C](this);this.detach()};var qd=function(a,b,c,d){d&&this.sa(d,void 0);ha(this,"key");ga(this,a);this.charCode=b;this.repeat=c};L(qd,hc);var sd=function(a){for(var b;a;){b=Qa(a);if(b=rd[b])break;a=a.d?a.d.constructor:i}if(b)return K(b.R)?b.R():new b;return i},ud=function(a,b){if(!a)f(l("Invalid class name "+a));if(!K(b))f(l("Invalid decorator function "+b));td[a]=b},rd={},td={};var vd=function(){},wd;Ja(vd);F=vd[z];F.da=function(){};F.m=function(a){return a.Fa().m("div",this.na(a)[Ea](" "),a.Ea)};F.N=function(a){return a};F.ma=function(a,b,c){if(a=a.c?a.c():a)if(N&&!Jb("7")){var d=xd(this,Lb(a),b);d[m](b);Sa(c?Mb:Nb,a)[D](i,d)}else c?Mb(a,b):Nb(a,b)};F.X=function(){return h}; +F.J=function(a,b){b.id&&ad(a,b.id);var c=this.N(b);c&&c[v]?yd(a,c[v][ua]?Za(c.childNodes):c[v]):yd(a,i);var d=0,e=this.o(),g=this.o(),j=k,o=k;c=k;var q=Lb(b);Ua(q,function(x){if(!j&&x==e){j=h;if(g==e)o=h}else if(!o&&x==g)o=h;else d|=zd(this,x)},this);a.l=d;if(!j){q[m](e);if(g==e)o=h}o||q[m](g);(a=a.z)&&q[m][D](q,a);if(N&&!Jb("7")){var s=xd(this,q);if(s[y]>0){q[m][D](q,s);c=h}}if(!j||!o||a||c)ja(b,q[Ea](" "));return b};F.bb=function(a){fd(a)&&this.xa(a.c(),h);a.j()&&this.wa(a,a.O())}; +F.Ma=function(a,b){Wc(a,!b,!N&&!zb)};F.xa=function(a,b){this.ma(a,this.o()+"-rtl",b)};F.ba=function(a){var b;if(a.v&32&&(b=a.n()))return cc(b);return k};F.wa=function(a,b){var c;if(a.v&32&&(c=a.n())){if(!b&&a.l&32){try{c.blur()}catch(d){}a.l&32&&a.oa(i)}cc(c)!=b&&dc(c,b)}};F.ya=function(a,b){Uc(a,b)};F.D=function(a,b,c){var d=a.c();if(d){var e=Ad(this,b);e&&this.ma(a,e,c);if(O){wd||(wd=ib(1,"disabled",4,"pressed",8,"selected",16,"checked",64,"expanded"));(a=wd[b])&&id(d,a,c)}}};F.n=function(a){return a.c()}; +F.o=function(){return"goog-control"};F.na=function(a){var b=this.o(),c=[b],d=this.o();d!=b&&c[m](d);b=a.l;for(d=[];b;){var e=b&-b;d[m](Ad(this,e));b&=~e}c[m][D](c,d);(a=a.z)&&c[m][D](c,a);N&&!Jb("7")&&c[m][D](c,xd(this,c));return c}; +var xd=function(a,b,c){var d=[];if(c)b=b.concat([c]);Ua([],function(e){if(Va(e,Sa(Wa,b))&&(!c||Wa(e,c)))d[m](e[Ea]("_"))});return d},Ad=function(a,b){a.Da||Bd(a);return a.Da[b]},zd=function(a,b){a.zb||Cd(a);a=ca(a.zb[b],10);return isNaN(a)?0:a},Bd=function(a){var b=a.o();a.Da=ib(1,b+"-disabled",2,b+"-hover",4,b+"-active",8,b+"-selected",16,b+"-checked",32,b+"-focused",64,b+"-open")},Cd=function(a){a.Da||Bd(a);a.zb=fb(a.Da)};var W=function(a,b,c){T[C](this,c);this.a=b||sd(this.constructor);this.Ea=a};L(W,T);F=W[z];F.Ea=i;F.l=0;F.v=39;F.Hb=255;F.Na=0;F.q=h;F.z=i;F.ra=h;F.Ba=k;F.n=function(){return this.a.n(this)};F.Ga=function(){return this.t||(this.t=new V)};F.qb=function(){return this.a};var Dd=function(a,b){if(b){if(a.z)Wa(a.z,b)||a.z[m](b);else a.z=[b];a.a.ma(a,b,h)}},Ed=function(a,b){if(b&&a.z){Xa(a.z,b);if(a.z[y]==0)a.z=i;a.a.ma(a,b,k)}};F=W[z];F.ma=function(a,b){b?Dd(this,a):Ed(this,a)}; +F.m=function(){var a=this.a.m(this);this.b=a;if(O){var b=this.a.da();b&&hd(a,b)}this.Ba||this.a.Ma(a,k);this.O()||this.a.ya(a,k)};F.N=function(){return this.a.N(this.c())};F.X=function(a){return this.a.X(a)};F.Ra=function(a){this.b=a=this.a.J(this,a);if(O){var b=this.a.da();b&&hd(a,b)}this.Ba||this.a.Ma(a,k);this.q=a[Aa].display!="none"}; +F.H=function(){W.d.H[C](this);this.a.bb(this);if(this.v&-2){this.ra&&Fd(this,h);if(this.v&32){var a=this.n();if(a){var b=this.Ga();ld(b,a);S(S(S(bd(this),b,"key",this.S),a,"focus",this.pa),a,"blur",this.oa)}}}};var Fd=function(a,b){var c=bd(a),d=a.c();if(b){S(S(S(S(c,d,"mouseover",a.Za),d,"mousedown",a.qa),d,"mouseup",a.$a),d,"mouseout",a.Ya);N&&S(c,d,"dblclick",a.rb)}else{Qc(Qc(Qc(Qc(c,d,"mouseover",a.Za),d,"mousedown",a.qa),d,"mouseup",a.$a),d,"mouseout",a.Ya);N&&Qc(c,d,"dblclick",a.rb)}}; +W[z].Z=function(){W.d.Z[C](this);this.t&&this.t.detach();this.O()&&this.j()&&this.a.wa(this,k)};W[z].f=function(){W.d.f[C](this);if(this.t){this.t.K();delete this.t}delete this.a;this.z=this.Ea=i};var yd=function(a,b){a.Ea=b};F=W[z];F.xa=function(a){W.d.xa[C](this,a);var b=this.c();b&&this.a.xa(b,a)};F.Ma=function(a){this.Ba=a;var b=this.c();b&&this.a.Ma(b,a)};F.O=function(){return this.q}; +F.ya=function(a,b){if(b||this.q!=a&&this[r](a?"show":"hide")){(b=this.c())&&this.a.ya(b,a);this.j()&&this.a.wa(this,a);this.q=a;return h}return k};F.j=function(){return!!!(this.l&1)};F.va=function(a){var b=this.h;if(!(b&&typeof b.j=="function"&&!b.j())&&Gd(this,1,!a)){if(!a){this[va](k);this.C(k)}this.O()&&this.a.wa(this,a);this.D(1,!a)}};F.C=function(a){Gd(this,2,a)&&this.D(2,a)};F.setActive=function(a){Gd(this,4,a)&&this.D(4,a)}; +var Hd=function(a,b){Gd(a,8,b)&&a.D(8,b)},Id=function(a,b){Gd(a,16,b)&&a.D(16,b)},Jd=function(a,b){Gd(a,32,b)&&a.D(32,b)},Kd=function(a,b){Gd(a,64,b)&&a.D(64,b)};W[z].D=function(a,b){if(this.v&a&&b!=!!(this.l&a)){this.a.D(this,a,b);this.l=b?this.l|a:this.l&~a}}; +var Ld=function(a,b,c){if(a.e&&a.l&b&&!c)f(l("Component already rendered"));!c&&a.l&b&&a.D(b,k);a.v=c?a.v|b:a.v&~b},X=function(a,b){return!!(a.Hb&b)&&!!(a.v&b)},Gd=function(a,b,c){return!!(a.v&b)&&!!(a.l&b)!=c&&(!(a.Na&b)||a[r](Zc(b,c)))&&!a.Sa};W[z].Za=function(a){!Md(a,this.c())&&this[r]("enter")&&this.j()&&X(this,2)&&this.C(h)};W[z].Ya=function(a){if(!Md(a,this.c())&&this[r]("leave")){X(this,4)&&this[va](k);X(this,2)&&this.C(k)}};var Md=function(a,b){return!!a.relatedTarget&&Zb(b,a.relatedTarget)}; +W[z].qa=function(a){if(this.j()){X(this,2)&&this.C(h);if(jc(a,0)){X(this,4)&&this[va](h);this.a.ba(this)&&this.n().focus()}}!this.Ba&&jc(a,0)&&a[oa]()};W[z].$a=function(a){if(this.j()){X(this,2)&&this.C(h);this.l&4&&Nd(this,a)&&X(this,4)&&this[va](k)}};W[z].rb=function(a){this.j()&&Nd(this,a)};var Nd=function(a,b){X(a,16)&&Id(a,!!!(a.l&16));X(a,8)&&Hd(a,h);X(a,64)&&Kd(a,!!!(a.l&64));var c=new gc("action",a);if(b)for(var d=["altKey","ctrlKey","metaKey","shiftKey"],e,g=0;e=d[g];g++)c[e]=b[e];return a[r](c)}; +W[z].pa=function(){X(this,32)&&Jd(this,h)};W[z].oa=function(){X(this,4)&&this[va](k);X(this,32)&&Jd(this,k)};W[z].S=function(a){if(this.O()&&this.j()&&this.Xa(a)){a[oa]();a[za]();return h}return k};W[z].Xa=function(a){return a[u]==13&&Nd(this,a)};if(!K(W))f(l("Invalid component class "+W));if(!K(vd))f(l("Invalid renderer class "+vd));var Od=Qa(W);rd[Od]=vd;ud("goog-control",function(){return new W(i)});var Pd=function(){};L(Pd,vd);Ja(Pd);Pd[z].m=function(a){return a.Fa().m("div",this.o())};Pd[z].J=function(a,b){if(b.tagName=="HR"){var c=b;b=this.m(a);c[E]&&c[E].insertBefore(b,c);Yb(c)}else Mb(b,this.o());return b};Pd[z].o=function(){return"goog-menuseparator"};var Qd=function(a,b){W[C](this,i,a||Pd.R(),b);Ld(this,1,k);Ld(this,2,k);Ld(this,4,k);Ld(this,32,k);this.l=1};L(Qd,W);Qd[z].H=function(){Qd.d.H[C](this);hd(this.c(),"separator")};ud("goog-menuseparator",function(){return new Qd});var Rd=function(){};Ja(Rd);Rd[z].da=function(){};var Sd=function(a,b,c){if(b)b.tabIndex=c?0:-1};F=Rd[z];F.m=function(a){return a.Fa().m("div",this.na(a)[Ea](" "))};F.N=function(a){return a};F.X=function(a){return a.tagName=="DIV"};F.J=function(a,b){b.id&&ad(a,b.id);var c=this.o(),d=k,e=Lb(b);e&&Ua(e,function(g){if(g==c)d=h;else g&&this.ib(a,g,c)},this);d||Mb(b,c);Td(this,a,b);return b}; +F.ib=function(a,b,c){if(b==c+"-disabled")a.va(k);else if(b==c+"-horizontal")Ud(a,"horizontal");else b==c+"-vertical"&&Ud(a,"vertical")};var Td=function(a,b,c){if(c){a=c[v];for(var d;a&&a[E]==c;){d=a[ua];if(a[na]==1){var e;a:{e=void 0;for(var g=Lb(a),j=0,o=g[y];j-1&&b!=this.k){var c=U(this,this.k);c&&c.C(k);this.k=b;c=U(this,this.k);this.ia&&c[va](h);if(this.g&&c!=this.g)c.v&64?Kd(c,h):Kd(this.g,k)}id(this.c(),"activedescendant",a[B].c().id)};F.$b=function(a){if(a[B]==U(this,this.k))this.k=-1;id(this.c(),"activedescendant","")};F.Vb=function(a){if((a=a[B])&&a!=this.g&&a.h==this){this.g&&Kd(this.g,k);this.g=a}};F.Pb=function(a){if(a[B]==this.g)this.g=i}; +F.qa=function(a){this.Y&&Yd(this,h);var b=this.n(),c;a:{if(b)if((c=b.getAttributeNode("tabindex"))&&c.specified){c=b.tabIndex;c=typeof c=="number"&&c>=0;break a}c=k}c?b.focus():a[oa]()};F.Qb=function(){this.ia=k};F.Ob=function(a){var b;a:{b=a[B];if(this.P)for(var c=this.c();b&&b[E]&&b!=c;){var d=b.id;if(d in this.P){b=this.P[d];break a}b=b[E]}b=i}if(b)switch(a[w]){case "mousedown":b.qa(a);break;case "mouseup":b.$a(a);break;case "mouseover":b.Za(a);break;case "mouseout":b.Ya(a);break}};F.pa=function(){}; +F.oa=function(){Xd(this,-1);this.ia=k;this.g&&Kd(this.g,k)};F.S=function(a){if(this.j()&&ed(this)!=0&&this.Xa(a)){a[oa]();a[za]();return h}return k}; +F.Xa=function(a){var b=U(this,this.k);if(b&&typeof b.S=="function"&&b.S(a))return h;if(this.g&&this.g!=b&&typeof this.g.S=="function"&&this.g.S(a))return h;switch(a[u]){case 27:if(this.ba())this.n().blur();else return k;break;case 36:Zd(this);break;case 35:$d(this);break;case 38:if(this.U=="vertical")be(this);else return k;break;case 37:if(this.U=="horizontal")fd(this)?de(this):be(this);else return k;break;case 40:if(this.U=="vertical")de(this);else return k;break;case 39:if(this.U=="horizontal")fd(this)? +be(this):de(this);else return k;break;default:return k}return h};var Vd=function(a,b){var c=b.c();c=c.id||(c.id=$c(b));if(!a.P)a.P={};a.P[c]=b};Y[z].Aa=function(a,b){Y.d.Aa[C](this,a,b)};Y[z].Pa=function(a,b,c){a.Na|=2;a.Na|=64;if(this.ba()||!this.Fb)Ld(a,32,k);a.e&&k!=a.ra&&Fd(a,k);a.ra=k;Y.d.Pa[C](this,a,b,c);c&&this.e&&Vd(this,a);b<=this.k&&this.k++}; +Y[z].removeChild=function(a,b){var c=gd(this,a);if(c!=-1)if(c==this.k)a.C(k);else c-1&&U(a,a.k).C(k)}; +Y[z].C=function(a){Xd(this,gd(this,a))};var Zd=function(a){ee(a,function(b,c){return(b+1)%c},ed(a)-1)},$d=function(a){ee(a,function(b,c){b--;return b<0?c-1:b},0)},de=function(a){ee(a,function(b,c){return(b+1)%c},a.k)},be=function(a){ee(a,function(b,c){b--;return b<0?c-1:b},a.k)},ee=function(a,b,c){c=c<0?gd(a,a.g):c;var d=ed(a);c=b(c,d);for(var e=0;e<=d;){var g=U(a,c);if(g&&g.O()&&g.j()&&g.v&2){a.gb(c);return h}e++;c=b(c,d)}return k};Y[z].gb=function(a){Xd(this,a)};var Yd=function(a,b){a.ia=b};var fe=function(){};L(fe,vd);Ja(fe);F=fe[z];F.o=function(){return"goog-tab"};F.da=function(){return"tab"};F.m=function(a){var b=fe.d.m[C](this,a);(a=a.Wa())&&this.jb(b,a);return b};F.J=function(a,b){b=fe.d.J[C](this,a,b);var c=this.Wa(b);c&&ge(a,c);if(a.l&8)if((c=a.h)&&K(c.ca)){a.D(8,k);c.ca(a)}return b};F.Wa=function(a){return a.title||""};F.jb=function(a,b){if(a)a.title=b||""};var he=function(a,b,c){W[C](this,a,b||fe.R(),c);Ld(this,8,h);this.Na|=9};L(he,W);he[z].Wa=function(){return this.Bb};he[z].jb=function(a){this.qb().jb(this.c(),a);this.Bb=a};var ge=function(a,b){a.Bb=b};ud("goog-tab",function(){return new he(i)});var ie=function(){};L(ie,Rd);Ja(ie);ie[z].o=function(){return"goog-tab-bar"};ie[z].da=function(){return"tablist"};ie[z].ib=function(a,b,c){this.vb||je(this);var d=this.vb[b];d?ke(a,d):ie.d.ib[C](this,a,b,c)};ie[z].na=function(a){var b=ie.d.na[C](this,a);this.Ca||le(this);b[m](this.Ca[a.dc]);return b};var le=function(a){var b=a.o();a.Ca=ib("top",b+"-top","bottom",b+"-bottom","start",b+"-start","end",b+"-end")},je=function(a){a.Ca||le(a);a.vb=fb(a.Ca)};var Z=function(a,b,c){ke(this,a||"top");Y[C](this,this.U,b||ie.R(),c);a=bd(this);S(a,this,"select",this.Yb);S(a,this,"unselect",this.Zb);S(a,this,"disable",this.Wb);S(a,this,"hide",this.Xb)};L(Z,Y);Z[z].Gb=h;Z[z].B=i;Z[z].f=function(){Z.d.f[C](this);this.B=i};Z[z].removeChild=function(a,b){me(this,a);return Z.d[Ca][C](this,a,b)};var ke=function(a,b){Ud(a,b=="start"||b=="end"?"vertical":"horizontal");a.dc=b};Z[z].gb=function(a){Z.d.gb[C](this,a);this.Gb&&ne(this,a)}; +Z[z].ca=function(a){if(a)Hd(a,h);else this.B&&Hd(this.B,k)};var ne=function(a,b){a.ca(U(a,b))},me=function(a,b){if(b&&b==a.B){for(var c=gd(a,b),d=c-1;b=U(a,d);d--)if(b.O()&&b.j()){a.ca(b);return}for(c=c+1;b=U(a,c);c++)if(b.O()&&b.j()){a.ca(b);return}a.ca(i)}};F=Z[z];F.Yb=function(a){this.B&&this.B!=a[B]&&Hd(this.B,k);this.B=a[B]};F.Zb=function(a){if(a[B]==this.B)this.B=i};F.Wb=function(a){me(this,a[B])};F.Xb=function(a){me(this,a[B])};F.pa=function(){U(this,this.k)||this.C(this.B||U(this,0))}; +ud("goog-tab-bar",function(){return new Z});var oe=function(a,b,c,d){function e(j){if(j){j.tabIndex=0;R(j,"click",g.gc,k,g);R(j,"keydown",g.hc,k,g)}}this.L=Rb(a)||i;this.la=Rb(d||i);this.Ta=(this.db=K(b)?b:i)||!b?i:Rb(b);this.i=c==h;var g=this;e(this.L);e(this.la);this.W(this.i)};L(oe,Sc);oe[z].f=function(){this.L&&Lc(this.L);this.la&&Lc(this.la);oe.d.f[C](this)}; +oe[z].W=function(a){if(this.Ta)Uc(this.Ta,a);else if(a&&this.db)this.Ta=this.db();if(this.la){Uc(this.L,!a);Uc(this.la,a)}else if(this.L)if(a){Nb(this.L,"goog-zippy-collapsed");Mb(this.L,"goog-zippy-expanded")}else{Nb(this.L,"goog-zippy-expanded");Mb(this.L,"goog-zippy-collapsed")}this.i=a;this[r](new pe("toggle",this,this.i))};oe[z].hc=function(a){if(a[u]==13||a[u]==32){this.W(!this.i);a[oa]();a[za]()}};oe[z].gc=function(){this.W(!this.i)};var pe=function(a,b,c){gc[C](this,a,b);this.lc=c};L(pe,gc);var re=function(a,b){this.kb=[];a=Rb(a);a=Sb(da,"span","ae-zippy",a);for(var c=0,d;d=a[c];c++){for(var e=d[E][E][E][ua];e&&e[na]!=1;)e=e[ua];this.kb[m](new oe(d,e,k))}this.Lb=new qe(this.kb,Rb(b))};re[z].Mb=function(){return this.Lb};re[z].Nb=function(){return this.kb}; +var qe=function(a,b){this.za=a;if(this.za[y]){a=0;for(var c;c=this.za[a];a++)R(c,"toggle",this.kc,k,this)}this.eb=0;this.i=k;a="ae-toggle ae-plus ae-action";this.za[y]||(a+=" ae-disabled");this.Q=Wb("span",{className:a},"Expand All");R(this.Q,"click",this.Ib,k,this);b[la](this.Q)};qe[z].Ib=function(){this.za[y]&&this.W(!this.i)};qe[z].kc=function(a){a=a.currentTarget;if(a.i)this.eb+=1;else this.eb-=1;if(a.i!=this.i)if(a.i){this.i=h;se(this,h)}else if(this.eb==0){this.i=k;se(this,k)}}; +qe[z].W=function(a){this.i=a;a=0;for(var b;b=this.za[a];a++)b.i!=this.i&&b.W(this.i);se(this)}; +var se=function(a,b){if(b!==ba?b:a.i){Nb(a.Q,"ae-plus");Mb(a.Q,"ae-minus");$b(a.Q,"Collapse All")}else{Nb(a.Q,"ae-minus");Mb(a.Q,"ae-plus");$b(a.Q,"Expand All")}},te=function(a){this.jc=a;this.Ab={};var b,c=Wb("div",{},b=Wb("div",{id:"ae-stats-details-tabs",className:"goog-tab-bar goog-tab-bar-top"}),Wb("div",{className:"goog-tab-bar-clear"}),a=Wb("div",{id:"ae-stats-details-tabs-content",className:"goog-tab-content"})),d=new Z;d.J(b);R(d,"select",this.mb,k,this);R(d,"unselect",this.mb,k,this);b= +0;for(var e;e=this.jc[b];b++)if(e=Rb("ae-stats-details-"+e)){var g=Sb(da,"h2",i,e)[0],j;j=void 0;if(N&&"innerText"in g)j=g.innerText[n](/(\r\n|\r|\n)/g,"\n");else{j=[];ec(g,j,h);j=j[Ea]("")}j=j[n](/\xAD/g,"");j=j[n](/ +/g," ");if(j!=" ")j=j[n](/^\s*/,"");j=j;Yb(g);g=new he(j);this.Ab[Qa(g)]=e;d.Aa(g,h);a[la](e);b==0?d.ca(g):Uc(e,k)}Rb("bd")[la](c)};te[z].mb=function(a){var b=this.Ab[Qa(a[B])];Uc(b,a[w]=="select")};Ga("ae.Stats.Details.Tabs",te,void 0);Ga("goog.ui.Zippy",oe,void 0); +oe[z].setExpanded=oe[z].W;Ga("ae.Stats.MakeZippys",re,void 0);re[z].getExpandCollapse=re[z].Mb;re[z].getZippys=re[z].Nb;qe[z].setExpanded=qe[z].W;var $=function(){this.Qa=[];this.fb=[]},ue=[[5,0.2,1],[6,0.2,1.2],[5,0.25,1.25],[6,0.25,1.5],[4,0.5,2],[5,0.5,2.5],[6,0.5,3],[4,1,4],[5,1,5],[6,1,6],[4,2,8],[5,2,10]],ve=function(a){if(a<=0)return[2,0.5,1];for(var b=1;a<1;){a*=10;b/=10}for(;a>=10;){a/=10;b*=10}for(var c=0;c');a[t]('
');for(var e=0;e<=b;e++){a[t]('');a[t]('');a[t](" "+e*c+"")}a[t]("
\n")}; +$[z].Kb=function(){this.fb=[];var a=ve(this.ab),b=a[0],c=a[1];a=100/a[2];this[t]('\n');we(this,b,c,a);for(var d=0;d\n\n")}we(this,b,c,a);this[t]("
');if(e.label[y]>0){e.ga[y]>0&&this[t]('');this[t](e.label);e.ga[y]>0&&this[t]("")}this[t]("");this[t]('
\n");return this.fb[Ea]("")}; +$[z].Eb=function(a,b,c,d,e,g){this.ab=ea.max(this.ab,ea.max(b+c,b+d));this.Qa[m]({label:a,start:b,pb:c,Ua:d,tb:e,ga:g})};Ga("Gantt",$,void 0);$[z].add_bar=$[z].Eb;$[z].draw=$[z].Kb;})(); diff --git a/google-appengine/google/appengine/ext/appstats/static/gantt.js b/google-appengine/google/appengine/ext/appstats/static/gantt.js index 4bc8a93..4c1d651 100644 --- a/google-appengine/google/appengine/ext/appstats/static/gantt.js +++ b/google-appengine/google/appengine/ext/appstats/static/gantt.js @@ -7,23 +7,28 @@ * @author schefflerjens@google.com (Jens Scheffler) */ -function Gantt() { - - // Overridable configuration constants. - this.PIX = 'stats/static/pix.gif'; // URL of a transparent 1x1 GIF. - this.PREFIX = 'ae-stats-gantt-'; // Class name this.PREFIX. - this.HEIGHT = '1em'; // Height of one bar. - this.EXTRA_HEIGHT = '0.5em'; // Height of the extra bar. - this.BG_COLOR = '#eeeeff'; // Background color for the bar. - this.COLOR = '#7777ff'; // Color of the main bar. - this.EXTRA_COLOR = '#ff6666'; // Color of the extra bar. - this.INLINE_FONT_SIZE = '80%'; // Font size of inline_label. - this.INLINE_TOP = '0.1em'; // Top of inline label text. - this.TICK_COLOR = 'grey'; // Color for ticks. - - // Internal fields used to render the chart - // Should not be modified - var SCALES = [[5, 0.2, 1.0], +/** + * @constructor + */ +var Gantt = function() { + /** + * @type {Array} + */ + this.bars = []; + + /** + * @type {Array} + */ + this.output = []; +}; + + +/** + * Internal fields used to render the chart. + * Should not be modified. + * @type {Array.} + */ +Gantt.SCALES = [[5, 0.2, 1.0], [6, 0.2, 1.2], [5, 0.25, 1.25], [6, 0.25, 1.5], @@ -35,153 +40,243 @@ function Gantt() { [6, 1.0, 6.0], [4, 2.0, 8.0], [5, 2.0, 10.0]]; - var bars = []; - var highest_duration = 0; - var output = []; - /* - * Appends text to the output array - */ - var write = function(text) { - output.push(text); - } - /* - * Internal helper to draw a table row showing the scale. - */ - var draw_scale = function(gantt, howmany, spacing, scale) { - write(''); - write('
'); - for (var i = 0; i <= howmany; i++) { - write(''); - write(''); - write(' ' + (i * spacing) + ''); // TODO: number format %4g +/** + * Helper to compute the proper X axis scale. + * Args: + * highest: the highest value in the data series. + * + * Returns: + * A tuple (howmany, spacing, limit) where howmany is the number of + * increments, spacing is the increment to be used between successive + * axis labels, and limit is the rounded-up highest value of the + * axis. Within float precision, howmany * spacing == highest will + * hold. + * + * The axis is assumed to always start at zero. + */ +Gantt.compute_scale = function(highest) { + if (highest <= 0) { + return [2, 0.5, 1.0] // Special-case if there's no data. + } + var scale = 1.0 + while (highest < 1.0) { + highest *= 10.0 + scale /= 10.0 + } + while (highest >= 10.0) { + highest /= 10.0 + scale *= 10.0 + } + // Now 1 <= highest < 10 + for (var i = 0; i < Gantt.SCALES.length; i++) { + if (highest <= Gantt.SCALES[i][2]) { + return [Gantt.SCALES[i][0], Gantt.SCALES[i][1] * scale, + Gantt.SCALES[i][2] * scale]; } - write('
\n'); } + // Avoid the need for "assert False". Not actually reachable. + return [5, 2.0 * scale, 10.0 * scale]; +}; - /* - * Helper to compute the proper X axis scale. - * Args: - * highest: the highest value in the data series. - * - * Returns: - * A tuple (howmany, spacing, limit) where howmany is the number of - * increments, spacing is the increment to be used between successive - * axis labels, and limit is the rounded-up highest value of the - * axis. Within float precision, howmany * spacing == highest will - * hold. - * - * The axis is assumed to always start at zero. - */ - var compute_scale = function(highest) { - if (highest <= 0) { - return [2, 0.5, 1.0] // Special-case if there's no data. - } - var scale = 1.0 - while (highest < 1.0) { - highest *= 10.0 - scale /= 10.0 - } - while (highest >= 10.0) { - highest /= 10.0 - scale *= 10.0 - } - // Now 1 <= highest < 10 - for (var i = 0; i < SCALES.length; i++) { - if (highest <= SCALES[i][2]) { - return [SCALES[i][0], SCALES[i][1] * scale, SCALES[i][2] * scale]; - } - } - // Avoid the need for "assert False". Not actually reachable. - return [5, 2.0 * scale, 10.0 * scale]; + +/** + * URL of a transparent 1x1 GIF. + * @type {string} + */ +Gantt.prototype.PIX = 'stats/static/pix.gif'; + + +/** + * CSS class name prefix. + * @type {string} + */ +Gantt.prototype.PREFIX = 'ae-stats-gantt-'; + + +/** + * Height of one bar. + * @type {string} + */ +Gantt.prototype.HEIGHT = '1em'; + + +/** + * Height of the extra bar. + * @type {string} + */ +Gantt.prototype.EXTRA_HEIGHT = '0.5em'; + + +/** + * Background color for the bar. + * @type {string} + */ +Gantt.prototype.BG_COLOR = '#eeeeff'; + + +/** + * Color of the main bar. + * @type {string} + */ +Gantt.prototype.COLOR = '#7777ff'; + + +/** + * Color of the extra bar. + * @type {string} + */ +Gantt.prototype.EXTRA_COLOR = '#ff6666'; + + +/** + * Font size of inline_label. + * @type {string} + */ +Gantt.prototype.INLINE_FONT_SIZE = '80%'; + + +/** + * Top of inline label text. + * @type {string} + */ +Gantt.prototype.INLINE_TOP = '0.1em'; + + +/** + * Color for ticks. + * @type {string} + */ +Gantt.prototype.TICK_COLOR = 'grey'; + + +/** + * @type {number} + */ +Gantt.prototype.highest_duration = 0; + + +/* + * Appends text to the output array. + * @param {string} text The text to append to the output. + */ +Gantt.prototype.write = function(text) { + this.output.push(text); +}; + + +/* + * Internal helper to draw a table row showing the scale. + * @param {number} howmany + * @param {number} spacing + * @param {number} scale + */ +Gantt.prototype.draw_scale = function(howmany, spacing, scale) { + this.write('' + + ''); + this.write('
'); + for (var i = 0; i <= howmany; i++) { + this.write(''); + this.write(''); + this.write(' ' + (i * spacing) + ''); // TODO: number format %4g } + this.write('
\n'); +}; - /* - * Add a bar to the chart. - * Args: - * label: Valid HTML or HTML-escaped text for the left column. - * start: Start time for the event. - * duration: Duration for the event. - * extra_duration: Duration for the second bar; use 0 to suppress. - * inline_label: Valid HTML or HTML-escaped text drawn after the bars; - * use '' to suppress. - * link_target: HTML-escaped link where clicking on any element - * will take you; use '' for no linking. - * All arguments representing times or durations should be integers - * or floats expressed in seconds. The scale drawn is always - * expressed in seconds (with limited precision). - */ - this.add_bar = function(label, start, duration, extra_duration, - inline_label, link_target) { - highest_duration = Math.max( - highest_duration, Math.max(start + duration, start + extra_duration)); - bars.push({label: label, start: start, duration: duration, - extra_duration: extra_duration, inline_label: inline_label, - link_target: link_target}); - return this; - }; - - /* - * Draw the bar chart as HTML. - */ - this.draw = function() { - output = []; - var scale = compute_scale(highest_duration); - var howmany = scale[0]; - var spacing = scale[1]; - var limit = scale[2]; - scale = 100.0 / limit; - write('\n'); - draw_scale(this, howmany, spacing, scale); - for (var i = 0; i < bars.length; i++) { - var bar = bars[i]; - write('\n\n\n'); + + } + this.draw_scale(howmany, spacing, scale); + this.write('
'); - if (bar.label.length > 0) { - if (bar.link_target.length > 0) { - write(''); - } - write(bar.label); - if (bar.link_target.length > 0) { - write(''); - } - } - write(''); - write('
'); + +/** + * Draw the bar chart as HTML. + */ +Gantt.prototype.draw = function() { + this.output = []; + var scale = Gantt.compute_scale(this.highest_duration); + var howmany = scale[0]; + var spacing = scale[1]; + var limit = scale[2]; + scale = 100.0 / limit; + this.write('\n'); + this.draw_scale(howmany, spacing, scale); + for (var i = 0; i < this.bars.length; i++) { + var bar = this.bars[i]; + this.write('\n'); - } - draw_scale(this, howmany, spacing, scale); - write('
'); + if (bar.label.length > 0) { if (bar.link_target.length > 0) { - write(''); - } - write(''); - if (bar.extra_duration > 0) { - write(''); - } - if (bar.inline_label.length > 0) { - write(' '); - write(bar.inline_label); - write(''); + this.write(''); } + this.write(bar.label); if (bar.link_target.length > 0) { - write(''); + this.write(''); } - write('
\n'); - return output.join(''); - }; -} + this.write('
'); + this.write('
\n'); + + var html = this.output.join(''); + return html; +}; + + +/** + * Add a bar to the chart. + * All arguments representing times or durations should be integers + * or floats expressed in seconds. The scale drawn is always + * expressed in seconds (with limited precision). + * @param {string} label Valid HTML or HTML-escaped text for the left column. + * @param {number} start Start time for the event. + * @param {number} duration Duration for the event. + * @param {number} extra_duration Duration for the second bar; use 0 to + * suppress. + * @param {string} inline_label Valid HTML or HTML-escaped text drawn after the + * bars; use '' to suppress. + * @param {string} link_target HTML-escaped link where clicking on any element + * will take you; use '' for no linking. + */ +Gantt.prototype.add_bar = function(label, start, duration, extra_duration, + inline_label, link_target) { + this.highest_duration = Math.max( + this.highest_duration, Math.max(start + duration, + start + extra_duration)); + this.bars.push({label: label, start: start, duration: duration, + extra_duration: extra_duration, inline_label: inline_label, + link_target: link_target}); +}; + + +goog.exportSymbol('Gantt', Gantt); +goog.exportProperty(Gantt.prototype, 'add_bar', Gantt.prototype.add_bar); +goog.exportProperty(Gantt.prototype, 'draw', Gantt.prototype.draw); diff --git a/google-appengine/google/appengine/ext/appstats/templates/details.html b/google-appengine/google/appengine/ext/appstats/templates/details.html index 9dd44a6..20710af 100644 --- a/google-appengine/google/appengine/ext/appstats/templates/details.html +++ b/google-appengine/google/appengine/ext/appstats/templates/details.html @@ -163,7 +163,6 @@ {% endblock %} {% block tailstuff %} -