summaryrefslogtreecommitdiffstats
path: root/google_appengine/google/appengine/api
diff options
context:
space:
mode:
authorJason A. Donenfeld <Jason@zx2c4.com>2009-10-19 20:20:09 -0400
committerJason A. Donenfeld <Jason@zx2c4.com>2009-10-19 20:20:09 -0400
commit2d6dd2c5ade3f5fad3e2257dce52a6e188fe7535 (patch)
treeda9c93d2f87df6d2b688a455a31e69859117ba1e /google_appengine/google/appengine/api
downloadFramedPrototype-2d6dd2c5ade3f5fad3e2257dce52a6e188fe7535.tar.xz
FramedPrototype-2d6dd2c5ade3f5fad3e2257dce52a6e188fe7535.zip
Initial import.
Diffstat (limited to 'google_appengine/google/appengine/api')
-rwxr-xr-xgoogle_appengine/google/appengine/api/__init__.py16
-rw-r--r--google_appengine/google/appengine/api/__init__.pycbin0 -> 156 bytes
-rw-r--r--google_appengine/google/appengine/api/api_base_pb.py582
-rw-r--r--google_appengine/google/appengine/api/api_base_pb.pycbin0 -> 26242 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/apiproxy_rpc.py150
-rw-r--r--google_appengine/google/appengine/api/apiproxy_rpc.pycbin0 -> 5367 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/apiproxy_stub.py80
-rw-r--r--google_appengine/google/appengine/api/apiproxy_stub.pycbin0 -> 2862 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/apiproxy_stub_map.py470
-rw-r--r--google_appengine/google/appengine/api/apiproxy_stub_map.pycbin0 -> 19038 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/app_logging.py99
-rwxr-xr-xgoogle_appengine/google/appengine/api/appinfo.py430
-rw-r--r--google_appengine/google/appengine/api/appinfo.pycbin0 -> 13746 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/appinfo_errors.py46
-rw-r--r--google_appengine/google/appengine/api/appinfo_errors.pycbin0 -> 2564 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/capabilities/__init__.py172
-rw-r--r--google_appengine/google/appengine/api/capabilities/__init__.pycbin0 -> 5952 bytes
-rw-r--r--google_appengine/google/appengine/api/capabilities/capability_service_pb.py366
-rw-r--r--google_appengine/google/appengine/api/capabilities/capability_service_pb.pycbin0 -> 18033 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/capabilities/capability_stub.py53
-rw-r--r--google_appengine/google/appengine/api/capabilities/capability_stub.pycbin0 -> 1762 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/croninfo.py132
-rw-r--r--google_appengine/google/appengine/api/croninfo.pycbin0 -> 4778 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore.py2170
-rw-r--r--google_appengine/google/appengine/api/datastore.pycbin0 -> 73429 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_admin.py213
-rw-r--r--google_appengine/google/appengine/api/datastore_admin.pycbin0 -> 7403 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_entities.py343
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_errors.py105
-rw-r--r--google_appengine/google/appengine/api/datastore_errors.pycbin0 -> 6056 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_file_stub.py1061
-rw-r--r--google_appengine/google/appengine/api/datastore_file_stub.pycbin0 -> 36485 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/datastore_types.py1788
-rw-r--r--google_appengine/google/appengine/api/datastore_types.pycbin0 -> 64168 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/images/__init__.py827
-rw-r--r--google_appengine/google/appengine/api/images/__init__.pycbin0 -> 29034 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/images/images_not_implemented_stub.py36
-rw-r--r--google_appengine/google/appengine/api/images/images_not_implemented_stub.pycbin0 -> 1326 bytes
-rw-r--r--google_appengine/google/appengine/api/images/images_service_pb.py1988
-rw-r--r--google_appengine/google/appengine/api/images/images_service_pb.pycbin0 -> 92677 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/images/images_stub.py411
-rw-r--r--google_appengine/google/appengine/api/images/images_stub.pycbin0 -> 13183 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/labs/__init__.py16
-rw-r--r--google_appengine/google/appengine/api/labs/__init__.pycbin0 -> 161 bytes
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/__init__.py20
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/__init__.pycbin0 -> 258 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/labs/taskqueue/taskqueue.py633
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pycbin0 -> 25279 bytes
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py1645
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pycbin0 -> 80247 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py327
-rw-r--r--google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pycbin0 -> 10766 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/mail.py1127
-rw-r--r--google_appengine/google/appengine/api/mail.pycbin0 -> 36719 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/mail_errors.py55
-rw-r--r--google_appengine/google/appengine/api/mail_errors.pycbin0 -> 3365 bytes
-rw-r--r--google_appengine/google/appengine/api/mail_service_pb.py584
-rw-r--r--google_appengine/google/appengine/api/mail_service_pb.pycbin0 -> 26434 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/mail_stub.py233
-rw-r--r--google_appengine/google/appengine/api/mail_stub.pycbin0 -> 7980 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/memcache/__init__.py931
-rw-r--r--google_appengine/google/appengine/api/memcache/__init__.pycbin0 -> 35018 bytes
-rw-r--r--google_appengine/google/appengine/api/memcache/memcache_service_pb.py2002
-rw-r--r--google_appengine/google/appengine/api/memcache/memcache_service_pb.pycbin0 -> 96235 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/memcache/memcache_stub.py293
-rw-r--r--google_appengine/google/appengine/api/memcache/memcache_stub.pycbin0 -> 10214 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/namespace_manager/__init__.py75
-rw-r--r--google_appengine/google/appengine/api/namespace_manager/__init__.pycbin0 -> 2413 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/queueinfo.py143
-rw-r--r--google_appengine/google/appengine/api/queueinfo.pycbin0 -> 4721 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/quota.py71
-rwxr-xr-xgoogle_appengine/google/appengine/api/urlfetch.py361
-rw-r--r--google_appengine/google/appengine/api/urlfetch.pycbin0 -> 12400 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/urlfetch_errors.py60
-rw-r--r--google_appengine/google/appengine/api/urlfetch_errors.pycbin0 -> 2546 bytes
-rw-r--r--google_appengine/google/appengine/api/urlfetch_service_pb.py823
-rw-r--r--google_appengine/google/appengine/api/urlfetch_service_pb.pycbin0 -> 37893 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/urlfetch_stub.py270
-rw-r--r--google_appengine/google/appengine/api/urlfetch_stub.pycbin0 -> 8446 bytes
-rw-r--r--google_appengine/google/appengine/api/user_service_pb.py491
-rw-r--r--google_appengine/google/appengine/api/user_service_pb.pycbin0 -> 22376 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/user_service_stub.py106
-rw-r--r--google_appengine/google/appengine/api/user_service_stub.pycbin0 -> 3831 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/users.py230
-rw-r--r--google_appengine/google/appengine/api/users.pycbin0 -> 8366 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/validation.py928
-rw-r--r--google_appengine/google/appengine/api/validation.pycbin0 -> 35134 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/xmpp/__init__.py332
-rw-r--r--google_appengine/google/appengine/api/xmpp/__init__.pycbin0 -> 11264 bytes
-rw-r--r--google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py826
-rw-r--r--google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pycbin0 -> 37790 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/xmpp/xmpp_service_stub.py154
-rw-r--r--google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pycbin0 -> 4894 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_builder.py432
-rw-r--r--google_appengine/google/appengine/api/yaml_builder.pycbin0 -> 15897 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_errors.py96
-rw-r--r--google_appengine/google/appengine/api/yaml_errors.pycbin0 -> 4937 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_listener.py218
-rw-r--r--google_appengine/google/appengine/api/yaml_listener.pycbin0 -> 9436 bytes
-rwxr-xr-xgoogle_appengine/google/appengine/api/yaml_object.py294
-rw-r--r--google_appengine/google/appengine/api/yaml_object.pycbin0 -> 10844 bytes
101 files changed, 25314 insertions, 0 deletions
diff --git a/google_appengine/google/appengine/api/__init__.py b/google_appengine/google/appengine/api/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/api/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/api/__init__.pyc b/google_appengine/google/appengine/api/__init__.pyc
new file mode 100644
index 0000000..874041c
--- /dev/null
+++ b/google_appengine/google/appengine/api/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/api_base_pb.py b/google_appengine/google/appengine/api/api_base_pb.py
new file mode 100644
index 0000000..aa30190
--- /dev/null
+++ b/google_appengine/google/appengine/api/api_base_pb.py
@@ -0,0 +1,582 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class StringProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.value_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Integer32Proto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.value_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_value(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt32(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Integer64Proto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.value_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt64(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_value(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt64(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class BoolProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 2
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putBoolean(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_value(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatBool(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class DoubleProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = 0.0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = 0.0
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 9
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(9)
+ out.putDouble(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 9:
+ self.set_value(d.getDouble())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormat(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.DOUBLE,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class BytesProto(ProtocolBuffer.ProtocolMessage):
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.value_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kvalue = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class VoidProto(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','BytesProto','VoidProto']
diff --git a/google_appengine/google/appengine/api/api_base_pb.pyc b/google_appengine/google/appengine/api/api_base_pb.pyc
new file mode 100644
index 0000000..fbbb0fc
--- /dev/null
+++ b/google_appengine/google/appengine/api/api_base_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/apiproxy_rpc.py b/google_appengine/google/appengine/api/apiproxy_rpc.py
new file mode 100755
index 0000000..2ac8923
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_rpc.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Base class for implementing RPC of API proxy stubs."""
+
+
+
+
+
+import sys
+
+
+class RPC(object):
+ """Base class for implementing RPC of API proxy stubs.
+
+ To implement a RPC to make real asynchronous API call:
+ - Extend this class.
+ - Override _MakeCallImpl and/or _WaitImpl to do a real asynchronous call.
+ """
+
+ IDLE = 0
+ RUNNING = 1
+ FINISHING = 2
+
+ def __init__(self, package=None, call=None, request=None, response=None,
+ callback=None, deadline=None, stub=None):
+ """Constructor for the RPC object.
+
+ All arguments are optional, and simply set members on the class.
+ These data members will be overriden by values passed to MakeCall.
+
+ Args:
+ package: string, the package for the call
+ call: string, the call within the package
+ request: ProtocolMessage instance, appropriate for the arguments
+ response: ProtocolMessage instance, appropriate for the response
+ callback: callable, called when call is complete
+ deadline: A double specifying the deadline for this call as the number of
+ seconds from the current time. Ignored if non-positive.
+ stub: APIProxyStub instance, used in default _WaitImpl to do real call
+ """
+ self.__exception = None
+ self.__state = RPC.IDLE
+ self.__traceback = None
+
+ self.package = package
+ self.call = call
+ self.request = request
+ self.response = response
+ self.callback = callback
+ self.deadline = deadline
+ self.stub = stub
+ self.cpu_usage_mcycles = 0
+
+ def MakeCall(self, package=None, call=None, request=None, response=None,
+ callback=None, deadline=None):
+ """Makes an asynchronous (i.e. non-blocking) API call within the
+ specified package for the specified call method.
+
+ It will call the _MakeRealCall to do the real job.
+
+ Args:
+ Same as constructor; see __init__.
+
+ Raises:
+ TypeError or AssertionError if an argument is of an invalid type.
+ AssertionError or RuntimeError is an RPC is already in use.
+ """
+ self.callback = callback or self.callback
+ self.package = package or self.package
+ self.call = call or self.call
+ self.request = request or self.request
+ self.response = response or self.response
+ self.deadline = deadline or self.deadline
+
+ assert self.__state is RPC.IDLE, ('RPC for %s.%s has already been started' %
+ (self.package, self.call))
+ assert self.callback is None or callable(self.callback)
+ self._MakeCallImpl()
+
+ def Wait(self):
+ """Waits on the API call associated with this RPC."""
+ rpc_completed = self._WaitImpl()
+
+ assert rpc_completed, ('RPC for %s.%s was not completed, and no other ' +
+ 'exception was raised ' % (self.package, self.call))
+
+ def CheckSuccess(self):
+ """If there was an exception, raise it now.
+
+ Raises:
+ Exception of the API call or the callback, if any.
+ """
+ if self.exception and self.__traceback:
+ raise self.exception.__class__, self.exception, self.__traceback
+ elif self.exception:
+ raise self.exception
+
+ @property
+ def exception(self):
+ return self.__exception
+
+ @property
+ def state(self):
+ return self.__state
+
+ def _MakeCallImpl(self):
+ """Override this method to implement a real asynchronous call rpc."""
+ self.__state = RPC.RUNNING
+
+ def _WaitImpl(self):
+ """Override this method to implement a real asynchronous call rpc.
+
+ Returns:
+ True if the async call was completed successfully.
+ """
+ try:
+ try:
+ self.stub.MakeSyncCall(self.package, self.call,
+ self.request, self.response)
+ except Exception, e:
+ self.__exception = e
+ finally:
+ self.__state = RPC.FINISHING
+ self.__Callback()
+
+ return True
+
+ def __Callback(self):
+ if self.callback:
+ try:
+ self.callback()
+ except:
+ exc_class, self.__exception, self.__traceback = sys.exc_info()
+ self.__exception._appengine_apiproxy_rpc = self
+ raise
diff --git a/google_appengine/google/appengine/api/apiproxy_rpc.pyc b/google_appengine/google/appengine/api/apiproxy_rpc.pyc
new file mode 100644
index 0000000..da77a36
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_rpc.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/apiproxy_stub.py b/google_appengine/google/appengine/api/apiproxy_stub.py
new file mode 100755
index 0000000..5104ab2
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Base class for implementing API proxy stubs."""
+
+
+
+
+
+from google.appengine.api import apiproxy_rpc
+from google.appengine.runtime import apiproxy_errors
+
+
+MAX_REQUEST_SIZE = 1 << 20
+
+
+class APIProxyStub(object):
+ """Base class for implementing API proxy stub classes.
+
+ To implement an API proxy stub:
+ - Extend this class.
+ - Override __init__ to pass in appropriate default service name.
+ - Implement service methods as _Dynamic_<method>(request, response).
+ """
+
+ def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE):
+ """Constructor.
+
+ Args:
+ service_name: Service name expected for all calls.
+ max_request_size: int, maximum allowable size of the incoming request. A
+ apiproxy_errors.RequestTooLargeError will be raised if the inbound
+ request exceeds this size. Default is 1 MB.
+ """
+ self.__service_name = service_name
+ self.__max_request_size = max_request_size
+
+ def CreateRPC(self):
+ """Creates RPC object instance.
+
+ Returns:
+ a instance of RPC.
+ """
+ return apiproxy_rpc.RPC(stub=self)
+
+ def MakeSyncCall(self, service, call, request, response):
+ """The main RPC entry point.
+
+ Args:
+ service: Must be name as provided to service_name of constructor.
+ call: A string representing the rpc to make. Must be part of
+ the underlying services methods and impemented by _Dynamic_<call>.
+ request: A protocol buffer of the type corresponding to 'call'.
+ response: A protocol buffer of the type corresponding to 'call'.
+ """
+ assert service == self.__service_name, ('Expected "%s" service name, '
+ 'was "%s"' % (self.__service_name,
+ service))
+ if request.ByteSize() > self.__max_request_size:
+ raise apiproxy_errors.RequestTooLargeError(
+ 'The request to API call %s.%s() was too large.' % (service, call))
+ messages = []
+ assert request.IsInitialized(messages), messages
+
+ method = getattr(self, '_Dynamic_' + call)
+ method(request, response)
diff --git a/google_appengine/google/appengine/api/apiproxy_stub.pyc b/google_appengine/google/appengine/api/apiproxy_stub.pyc
new file mode 100644
index 0000000..41e7a0c
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/apiproxy_stub_map.py b/google_appengine/google/appengine/api/apiproxy_stub_map.py
new file mode 100755
index 0000000..716498f
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub_map.py
@@ -0,0 +1,470 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Container of APIProxy stubs for more convenient unittesting.
+
+Classes/variables/functions defined here:
+ APIProxyStubMap: container of APIProxy stubs.
+ apiproxy: global instance of an APIProxyStubMap.
+ MakeSyncCall: APIProxy entry point.
+ UserRPC: User-visible class wrapping asynchronous RPCs.
+"""
+
+
+
+
+
+import inspect
+import sys
+
+from google.appengine.api import apiproxy_rpc
+
+
+def CreateRPC(service):
+ """Creates a RPC instance for the given service.
+
+ The instance is suitable for talking to remote services.
+ Each RPC instance can be used only once, and should not be reused.
+
+ Args:
+ service: string representing which service to call.
+
+ Returns:
+ the rpc object.
+
+ Raises:
+ AssertionError or RuntimeError if the stub for service doesn't supply a
+ CreateRPC method.
+ """
+ stub = apiproxy.GetStub(service)
+ assert stub, 'No api proxy found for service "%s"' % service
+ assert hasattr(stub, 'CreateRPC'), ('The service "%s" doesn\'t have ' +
+ 'a CreateRPC method.' % service)
+ return stub.CreateRPC()
+
+
+def MakeSyncCall(service, call, request, response):
+ """The APIProxy entry point for a synchronous API call.
+
+ Args:
+ service: string representing which service to call
+ call: string representing which function to call
+ request: protocol buffer for the request
+ response: protocol buffer for the response
+
+ Raises:
+ apiproxy_errors.Error or a subclass.
+ """
+ apiproxy.MakeSyncCall(service, call, request, response)
+
+
+class ListOfHooks(object):
+ """An ordered collection of hooks for a particular API call.
+
+ A hook is a function that has exactly the same signature as
+ a service stub. It will be called before or after an api hook is
+ executed, depending on whether this list is for precall of postcall hooks.
+ Hooks can be used for debugging purposes (check certain
+ pre- or postconditions on api calls) or to apply patches to protocol
+ buffers before/after a call gets submitted.
+ """
+
+ def __init__(self):
+ """Constructor."""
+
+ self.__content = []
+
+ self.__unique_keys = set()
+
+ def __len__(self):
+ """Returns the amount of elements in the collection."""
+ return self.__content.__len__()
+
+ def __Insert(self, index, key, function, service=None):
+ """Appends a hook at a certain position in the list.
+
+ Args:
+ index: the index of where to insert the function
+ key: a unique key (within the module) for this particular function.
+ If something from the same module with the same key is already
+ registered, nothing will be added.
+ function: the hook to be added.
+ service: optional argument that restricts the hook to a particular api
+
+ Returns:
+ True if the collection was modified.
+ """
+ unique_key = (key, inspect.getmodule(function))
+ if unique_key in self.__unique_keys:
+ return False
+ num_args = len(inspect.getargspec(function)[0])
+ if (inspect.ismethod(function)):
+ num_args -= 1
+ self.__content.insert(index, (key, function, service, num_args))
+ self.__unique_keys.add(unique_key)
+ return True
+
+ def Append(self, key, function, service=None):
+ """Appends a hook at the end of the list.
+
+ Args:
+ key: a unique key (within the module) for this particular function.
+ If something from the same module with the same key is already
+ registered, nothing will be added.
+ function: the hook to be added.
+ service: optional argument that restricts the hook to a particular api
+
+ Returns:
+ True if the collection was modified.
+ """
+ return self.__Insert(len(self), key, function, service)
+
+ def Push(self, key, function, service=None):
+ """Inserts a hook at the beginning of the list.
+
+ Args:
+ key: a unique key (within the module) for this particular function.
+ If something from the same module with the same key is already
+ registered, nothing will be added.
+ function: the hook to be added.
+ service: optional argument that restricts the hook to a particular api
+
+ Returns:
+ True if the collection was modified.
+ """
+ return self.__Insert(0, key, function, service)
+
+ def Clear(self):
+ """Removes all hooks from the list (useful for unit tests)."""
+ self.__content = []
+ self.__unique_keys = set()
+
+ def Call(self, service, call, request, response, rpc=None):
+ """Invokes all hooks in this collection.
+
+ Args:
+ service: string representing which service to call
+ call: string representing which function to call
+ request: protocol buffer for the request
+ response: protocol buffer for the response
+ rpc: optional RPC used to make this call
+ """
+ for key, function, srv, num_args in self.__content:
+ if srv is None or srv == service:
+ if num_args == 5:
+ function(service, call, request, response, rpc)
+ else:
+ function(service, call, request, response)
+
+
+class APIProxyStubMap(object):
+ """Container of APIProxy stubs for more convenient unittesting.
+
+ Stubs may be either trivial implementations of APIProxy services (e.g.
+ DatastoreFileStub, UserServiceStub) or "real" implementations.
+
+ For unittests, we may want to mix and match real and trivial implementations
+ of services in order to better focus testing on individual service
+ implementations. To achieve this, we allow the client to attach stubs to
+ service names, as well as define a default stub to be used if no specific
+ matching stub is identified.
+ """
+
+
+ def __init__(self, default_stub=None):
+ """Constructor.
+
+ Args:
+ default_stub: optional stub
+
+ 'default_stub' will be used whenever no specific matching stub is found.
+ """
+ self.__stub_map = {}
+ self.__default_stub = default_stub
+ self.__precall_hooks = ListOfHooks()
+ self.__postcall_hooks = ListOfHooks()
+
+ def GetPreCallHooks(self):
+ """Gets a collection for all precall hooks."""
+ return self.__precall_hooks
+
+ def GetPostCallHooks(self):
+ """Gets a collection for all precall hooks."""
+ return self.__postcall_hooks
+
+ def RegisterStub(self, service, stub):
+ """Register the provided stub for the specified service.
+
+ Args:
+ service: string
+ stub: stub
+ """
+ assert not self.__stub_map.has_key(service), repr(service)
+ self.__stub_map[service] = stub
+
+ if service == 'datastore':
+ self.RegisterStub('datastore_v3', stub)
+
+ def GetStub(self, service):
+ """Retrieve the stub registered for the specified service.
+
+ Args:
+ service: string
+
+ Returns:
+ stub
+
+ Returns the stub registered for 'service', and returns the default stub
+ if no such stub is found.
+ """
+ return self.__stub_map.get(service, self.__default_stub)
+
+ def MakeSyncCall(self, service, call, request, response):
+ """The APIProxy entry point.
+
+ Args:
+ service: string representing which service to call
+ call: string representing which function to call
+ request: protocol buffer for the request
+ response: protocol buffer for the response
+
+ Raises:
+ apiproxy_errors.Error or a subclass.
+ """
+ stub = self.GetStub(service)
+ assert stub, 'No api proxy found for service "%s"' % service
+ if hasattr(stub, 'CreateRPC'):
+ rpc = stub.CreateRPC()
+ self.__precall_hooks.Call(service, call, request, response, rpc)
+ rpc.MakeCall(service, call, request, response)
+ rpc.Wait()
+ rpc.CheckSuccess()
+ self.__postcall_hooks.Call(service, call, request, response, rpc)
+ else:
+ self.__precall_hooks.Call(service, call, request, response)
+ stub.MakeSyncCall(service, call, request, response)
+ self.__postcall_hooks.Call(service, call, request, response)
+
+
+class UserRPC(object):
+ """Wrapper class for asynchronous RPC.
+
+ Simplest low-level usage pattern:
+
+ rpc = UserRPC('service', [deadline], [callback])
+ rpc.make_call('method', request, response)
+ .
+ .
+ .
+ rpc.wait()
+ rpc.check_success()
+
+ However, a service module normally provides a wrapper so that the
+ typical usage pattern becomes more like this:
+
+ from google.appengine.api import service
+ rpc = service.create_rpc([deadline], [callback])
+ service.make_method_call(rpc, [service-specific-args])
+ .
+ .
+ .
+ rpc.wait()
+ result = rpc.get_result()
+
+ The service.make_method_call() function sets a service- and method-
+ specific hook function that is called by rpc.get_result() with the
+ rpc object as its first argument, and service-specific value as its
+ second argument. The hook function should call rpc.check_success()
+ and then extract the user-level result from the rpc.result
+ protobuffer. Additional arguments may be passed from
+ make_method_call() to the get_result hook via the second argument.
+ """
+
+ __method = None
+ __get_result_hook = None
+ __user_data = None
+ __postcall_hooks_called = False
+
+ def __init__(self, service, deadline=None, callback=None):
+ """Constructor.
+
+ Args:
+ service: The service name.
+ deadline: Optional deadline. Default depends on the implementation.
+ callback: Optional argument-less callback function.
+ """
+ self.__service = service
+ self.__rpc = CreateRPC(service)
+ self.__rpc.deadline = deadline
+ self.__rpc.callback = callback
+
+ @property
+ def service(self):
+ """Return the service name."""
+ return self.__service
+
+ @property
+ def method(self):
+ """Return the method name."""
+ return self.__method
+
+ @property
+ def deadline(self):
+ """Return the deadline, if set explicitly (otherwise None)."""
+ return self.__rpc.deadline
+
+ def __get_callback(self):
+ """Return the callback attribute, a function without arguments.
+
+ This attribute can also be assigned to. For example, the
+ following code calls some_other_function(rpc) when the RPC is
+ complete:
+
+ rpc = service.create_rpc()
+ rpc.callback = lambda: some_other_function(rpc)
+ service.make_method_call(rpc)
+ rpc.wait()
+ """
+ return self.__rpc.callback
+ def __set_callback(self, callback):
+ """Set the callback function."""
+ self.__rpc.callback = callback
+ callback = property(__get_callback, __set_callback)
+
+ @property
+ def request(self):
+ """Return the request protocol buffer object."""
+ return self.__rpc.request
+
+ @property
+ def response(self):
+ """Return the response protocol buffer object."""
+ return self.__rpc.response
+
+ @property
+ def state(self):
+ """Return the RPC state.
+
+ Possible values are attributes of apiproxy_rpc.RPC: IDLE, RUNNING,
+ FINISHING.
+ """
+ return self.__rpc.state
+
+ @property
+ def get_result_hook(self):
+ """Return the get-result hook function."""
+ return self.__get_result_hook
+
+ @property
+ def user_data(self):
+ """Return the user data for the hook function."""
+ return self.__user_data
+
+ def make_call(self, method, request, response,
+ get_result_hook=None, user_data=None):
+ """Initiate a call.
+
+ Args:
+ method: The method name.
+ request: The request protocol buffer.
+ response: The response protocol buffer.
+ get_result_hook: Optional get-result hook function. If not None,
+ this must be a function with exactly one argument, the RPC
+ object (self). Its return value is returned from get_result().
+ user_data: Optional additional arbitrary data for the get-result
+ hook function. This can be accessed as rpc.user_data. The
+ type of this value is up to the service module.
+
+ This function may only be called once per RPC object. It sends
+ the request to the remote server, but does not wait for a
+ response. This allows concurrent execution of the remote call and
+ further local processing (e.g., making additional remote calls).
+
+ Before the call is initiated, the precall hooks are called.
+ """
+ assert self.__rpc.state == apiproxy_rpc.RPC.IDLE, repr(self.state)
+ self.__method = method
+ self.__get_result_hook = get_result_hook
+ self.__user_data = user_data
+ apiproxy.GetPreCallHooks().Call(
+ self.__service, method, request, response, self.__rpc)
+ self.__rpc.MakeCall(self.__service, method, request, response)
+
+ def wait(self):
+ """Wait for the call to complete, and call callbacks.
+
+ This is the only time callback functions may be called. (However,
+ note that check_success() and get_result() call wait().) Waiting
+ for one RPC may cause callbacks for other RPCs to be called.
+ Callback functions may call check_success() and get_result().
+
+ Callbacks are called without arguments; if a callback needs access
+ to the RPC object a Python nested function (a.k.a. closure) or a
+ bound may be used. To facilitate this, the callback may be
+ assigned after the RPC object is created (but before make_call()
+ is called).
+
+ Note: don't confuse callbacks with get-result hooks or precall
+ and postcall hooks.
+ """
+ assert self.__rpc.state != apiproxy_rpc.RPC.IDLE, repr(self.state)
+ if self.__rpc.state == apiproxy_rpc.RPC.RUNNING:
+ self.__rpc.Wait()
+ assert self.__rpc.state == apiproxy_rpc.RPC.FINISHING, repr(self.state)
+
+ def check_success(self):
+ """Check for success of the RPC, possibly raising an exception.
+
+ This function should be called at least once per RPC. If wait()
+ hasn't been called yet, it is called first. If the RPC caused
+ an exceptional condition, an exception will be raised here.
+ The first time check_success() is called, the postcall hooks
+ are called.
+ """
+ self.wait()
+ self.__rpc.CheckSuccess()
+ if not self.__postcall_hooks_called:
+ self.__postcall_hooks_called = True
+ apiproxy.GetPostCallHooks().Call(self.__service, self.__method,
+ self.request, self.response, self.__rpc)
+
+ def get_result(self):
+ """Get the result of the RPC, or possibly raise an exception.
+
+ This implies a call to check_success(). If a get-result hook was
+ passed to make_call(), that hook is responsible for calling
+ check_success(), and the return value of the hook is returned.
+ Otherwise, check_success() is called directly and None is
+ returned.
+ """
+ if self.__get_result_hook is None:
+ self.check_success()
+ return None
+ else:
+ return self.__get_result_hook(self)
+
+
+def GetDefaultAPIProxy():
+ try:
+ runtime = __import__('google.appengine.runtime', globals(), locals(),
+ ['apiproxy'])
+ return APIProxyStubMap(runtime.apiproxy)
+ except (AttributeError, ImportError):
+ return APIProxyStubMap()
+
+
+apiproxy = GetDefaultAPIProxy()
diff --git a/google_appengine/google/appengine/api/apiproxy_stub_map.pyc b/google_appengine/google/appengine/api/apiproxy_stub_map.pyc
new file mode 100644
index 0000000..5d889f2
--- /dev/null
+++ b/google_appengine/google/appengine/api/apiproxy_stub_map.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/app_logging.py b/google_appengine/google/appengine/api/app_logging.py
new file mode 100755
index 0000000..e576d37
--- /dev/null
+++ b/google_appengine/google/appengine/api/app_logging.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Logging utilities for use by applications.
+
+Classes defined here:
+ AppLogsHandler: StreamHandler subclass
+"""
+
+
+
+
+
+import logging
+import sys
+import types
+
+
+NEWLINE_REPLACEMENT = "\0"
+
+
+class AppLogsHandler(logging.StreamHandler):
+ """Logging handler that will direct output to a persistent store of
+ application logs.
+
+ This handler will output log statements to stderr. This handler is
+ automatically initialized and attached to the Python common logging library.
+ """
+
+
+
+
+ def __init__(self, stream=None):
+ """Constructor.
+
+ Args:
+ # stream is optional. it defaults to sys.stderr.
+ stream: destination for output
+ """
+ logging.StreamHandler.__init__(self, stream)
+
+ def close(self):
+ """Closes the stream.
+
+ This implementation based on the implementation of FileHandler.close()."""
+ self.flush()
+ self.stream.close()
+ logging.StreamHandler.close(self)
+
+ def emit(self, record):
+ """Emit a record.
+
+ This implementation is based on the implementation of
+ StreamHandler.emit()."""
+ try:
+ message = self._AppLogsMessage(record)
+ if isinstance(message, unicode):
+ message = message.encode("UTF-8")
+ self.stream.write(message)
+ self.flush()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ self.handleError(record)
+
+ def _AppLogsMessage(self, record):
+ """Converts the log record into a log line."""
+
+ message = self.format(record).replace("\n", NEWLINE_REPLACEMENT)
+ return "LOG %d %d %s\n" % (self._AppLogsLevel(record.levelno),
+ long(record.created * 1000 * 1000),
+ message)
+
+ def _AppLogsLevel(self, level):
+ """Converts the logging level used in Python to the API logging level"""
+ if level >= logging.CRITICAL:
+ return 4
+ elif level >= logging.ERROR:
+ return 3
+ elif level >= logging.WARNING:
+ return 2
+ elif level >= logging.INFO:
+ return 1
+ else:
+ return 0
diff --git a/google_appengine/google/appengine/api/appinfo.py b/google_appengine/google/appengine/api/appinfo.py
new file mode 100755
index 0000000..6ab406c
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""AppInfo tools.
+
+Library for working with AppInfo records in memory, store and load from
+configuration files.
+"""
+
+
+
+
+
+import re
+
+from google.appengine.api import appinfo_errors
+from google.appengine.api import validation
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_object
+
+
+_URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'
+_FILES_REGEX = r'(?!\^).*(?!\$).'
+
+_DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)'
+_EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
+
+_SERVICE_RE_STRING = r'(mail|xmpp_message)'
+
+_EXPIRATION_CONVERSIONS = {
+ 'd': 60 * 60 * 24,
+ 'h': 60 * 60,
+ 'm': 60,
+ 's': 1,
+}
+
+APP_ID_MAX_LEN = 100
+MAJOR_VERSION_ID_MAX_LEN = 100
+MAX_URL_MAPS = 100
+
+APPLICATION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % APP_ID_MAX_LEN
+VERSION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % MAJOR_VERSION_ID_MAX_LEN
+
+RUNTIME_RE_STRING = r'[a-z]{1,30}'
+
+API_VERSION_RE_STRING = r'[\w.]{1,32}'
+
+HANDLER_STATIC_FILES = 'static_files'
+HANDLER_STATIC_DIR = 'static_dir'
+HANDLER_SCRIPT = 'script'
+
+LOGIN_OPTIONAL = 'optional'
+LOGIN_REQUIRED = 'required'
+LOGIN_ADMIN = 'admin'
+
+SECURE_HTTP = 'never'
+SECURE_HTTPS = 'always'
+SECURE_HTTP_OR_HTTPS = 'optional'
+
+REQUIRE_MATCHING_FILE = 'require_matching_file'
+
+DEFAULT_SKIP_FILES = (r'^(.*/)?('
+ r'(app\.yaml)|'
+ r'(app\.yml)|'
+ r'(index\.yaml)|'
+ r'(index\.yml)|'
+ r'(#.*#)|'
+ r'(.*~)|'
+ r'(.*\.py[co])|'
+ r'(.*/RCS/.*)|'
+ r'(\..*)|'
+ r')$')
+
+LOGIN = 'login'
+SECURE = 'secure'
+URL = 'url'
+STATIC_FILES = 'static_files'
+UPLOAD = 'upload'
+STATIC_DIR = 'static_dir'
+MIME_TYPE = 'mime_type'
+SCRIPT = 'script'
+EXPIRATION = 'expiration'
+
+APPLICATION = 'application'
+VERSION = 'version'
+RUNTIME = 'runtime'
+API_VERSION = 'api_version'
+HANDLERS = 'handlers'
+DEFAULT_EXPIRATION = 'default_expiration'
+SKIP_FILES = 'skip_files'
+SERVICES = 'inbound_services'
+
+
+class URLMap(validation.Validated):
+ """Mapping from URLs to handlers.
+
+ This class acts like something of a union type. Its purpose is to
+ describe a mapping between a set of URLs and their handlers. What
+ handler type a given instance has is determined by which handler-id
+ attribute is used.
+
+ Each mapping can have one and only one handler type. Attempting to
+ use more than one handler-id attribute will cause an UnknownHandlerType
+ to be raised during validation. Failure to provide any handler-id
+ attributes will cause MissingHandlerType to be raised during validation.
+
+ The regular expression used by the url field will be used to match against
+ the entire URL path and query string of the request. This means that
+ partial maps will not be matched. Specifying a url, say /admin, is the
+ same as matching against the regular expression '^/admin$'. Don't begin
+ your matching url with ^ or end them with $. These regular expressions
+ won't be accepted and will raise ValueError.
+
+ Attributes:
+ login: Whether or not login is required to access URL. Defaults to
+ 'optional'.
+ secure: Restriction on the protocol which can be used to serve
+ this URL/handler (HTTP, HTTPS or either).
+ url: Regular expression used to fully match against the request URLs path.
+ See Special Cases for using static_dir.
+ static_files: Handler id attribute that maps URL to the appropriate
+ file. Can use back regex references to the string matched to url.
+ upload: Regular expression used by the application configuration
+ program to know which files are uploaded as blobs. It's very
+ difficult to determine this using just the url and static_files
+ so this attribute must be included. Required when defining a
+ static_files mapping.
+ A matching file name must fully match against the upload regex, similar
+ to how url is matched against the request path. Do not begin upload
+ with ^ or end it with $.
+ static_dir: Handler id that maps the provided url to a sub-directory
+ within the application directory. See Special Cases.
+ mime_type: When used with static_files and static_dir the mime-type
+ of files served from those directories are overridden with this
+ value.
+ script: Handler id that maps URLs to scipt handler within the application
+ directory that will run using CGI.
+ expiration: When used with static files and directories, the time delta to
+ use for cache expiration. Has the form '4d 5h 30m 15s', where each letter
+ signifies days, hours, minutes, and seconds, respectively. The 's' for
+ seconds may be omitted. Only one amount must be specified, combining
+ multiple amounts is optional. Example good values: '10', '1d 6h',
+ '1h 30m', '7d 7d 7d', '5m 30'.
+
+ Special cases:
+ When defining a static_dir handler, do not use a regular expression
+ in the url attribute. Both the url and static_dir attributes are
+ automatically mapped to these equivalents:
+
+ <url>/(.*)
+ <static_dir>/\1
+
+ For example:
+
+ url: /images
+ static_dir: images_folder
+
+ Is the same as this static_files declaration:
+
+ url: /images/(.*)
+ static_files: images/\1
+ upload: images/(.*)
+ """
+
+ ATTRIBUTES = {
+
+ URL: validation.Optional(_URL_REGEX),
+ LOGIN: validation.Options(LOGIN_OPTIONAL,
+ LOGIN_REQUIRED,
+ LOGIN_ADMIN,
+ default=LOGIN_OPTIONAL),
+
+ SECURE: validation.Options(SECURE_HTTP,
+ SECURE_HTTPS,
+ SECURE_HTTP_OR_HTTPS,
+ default=SECURE_HTTP),
+
+
+
+ HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
+ UPLOAD: validation.Optional(_FILES_REGEX),
+
+
+ HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
+
+
+ MIME_TYPE: validation.Optional(str),
+ EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+
+
+ HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
+
+ REQUIRE_MATCHING_FILE: validation.Optional(bool),
+ }
+
+ COMMON_FIELDS = set([URL, LOGIN, SECURE])
+
+ ALLOWED_FIELDS = {
+ HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION,
+ REQUIRE_MATCHING_FILE),
+ HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION, REQUIRE_MATCHING_FILE),
+ HANDLER_SCRIPT: (),
+ }
+
+ def GetHandler(self):
+ """Get handler for mapping.
+
+ Returns:
+ Value of the handler (determined by handler id attribute).
+ """
+ return getattr(self, self.GetHandlerType())
+
+ def GetHandlerType(self):
+ """Get handler type of mapping.
+
+ Returns:
+ Handler type determined by which handler id attribute is set.
+
+ Raises:
+ UnknownHandlerType when none of the no handler id attributes
+ are set.
+
+ UnexpectedHandlerAttribute when an unexpected attribute
+ is set for the discovered handler type.
+
+ HandlerTypeMissingAttribute when the handler is missing a
+ required attribute for its handler type.
+ """
+ for id_field in URLMap.ALLOWED_FIELDS.iterkeys():
+ if getattr(self, id_field) is not None:
+ mapping_type = id_field
+ break
+ else:
+ raise appinfo_errors.UnknownHandlerType(
+ 'Unknown url handler type.\n%s' % str(self))
+
+ allowed_fields = URLMap.ALLOWED_FIELDS[mapping_type]
+
+ for attribute in self.ATTRIBUTES.iterkeys():
+ if (getattr(self, attribute) is not None and
+ not (attribute in allowed_fields or
+ attribute in URLMap.COMMON_FIELDS or
+ attribute == mapping_type)):
+ raise appinfo_errors.UnexpectedHandlerAttribute(
+ 'Unexpected attribute "%s" for mapping type %s.' %
+ (attribute, mapping_type))
+
+ if mapping_type == HANDLER_STATIC_FILES and not self.upload:
+ raise appinfo_errors.MissingHandlerAttribute(
+ 'Missing "%s" attribute for URL "%s".' % (UPLOAD, self.url))
+
+ return mapping_type
+
+ def CheckInitialized(self):
+ """Adds additional checking to make sure handler has correct fields.
+
+ In addition to normal ValidatedCheck calls GetHandlerType
+ which validates all the handler fields are configured
+ properly.
+
+ Raises:
+ UnknownHandlerType when none of the no handler id attributes
+ are set.
+
+ UnexpectedHandlerAttribute when an unexpected attribute
+ is set for the discovered handler type.
+
+ HandlerTypeMissingAttribute when the handler is missing a
+ required attribute for its handler type.
+ """
+ super(URLMap, self).CheckInitialized()
+ self.GetHandlerType()
+
+
+class AppInfoExternal(validation.Validated):
+ """Class representing users application info.
+
+ This class is passed to a yaml_object builder to provide the validation
+ for the application information file format parser.
+
+ Attributes:
+ application: Unique identifier for application.
+ version: Application's major version number.
+ runtime: Runtime used by application.
+ api_version: Which version of APIs to use.
+ handlers: List of URL handlers.
+ default_expiration: Default time delta to use for cache expiration for
+ all static files, unless they have their own specific 'expiration' set.
+ See the URLMap.expiration field's documentation for more information.
+ skip_files: An re object. Files that match this regular expression will
+ not be uploaded by appcfg.py. For example:
+ skip_files: |
+ .svn.*|
+ #.*#
+ """
+
+ ATTRIBUTES = {
+
+
+ APPLICATION: APPLICATION_RE_STRING,
+ VERSION: VERSION_RE_STRING,
+ RUNTIME: RUNTIME_RE_STRING,
+
+
+ API_VERSION: API_VERSION_RE_STRING,
+ HANDLERS: validation.Optional(validation.Repeated(URLMap)),
+
+ SERVICES: validation.Optional(validation.Repeated(
+ validation.Regex(_SERVICE_RE_STRING))),
+ DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
+ SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
+ }
+
+ def CheckInitialized(self):
+ """Ensures that at least one url mapping is provided.
+
+ Raises:
+ MissingURLMapping when no URLMap objects are present in object.
+ TooManyURLMappings when there are too many URLMap entries.
+ """
+ super(AppInfoExternal, self).CheckInitialized()
+ if not self.handlers:
+ raise appinfo_errors.MissingURLMapping(
+ 'No URLMap entries found in application configuration')
+ if len(self.handlers) > MAX_URL_MAPS:
+ raise appinfo_errors.TooManyURLMappings(
+ 'Found more than %d URLMap entries in application configuration' %
+ MAX_URL_MAPS)
+
+
+def LoadSingleAppInfo(app_info):
+ """Load a single AppInfo object where one and only one is expected.
+
+ Args:
+ app_info: A file-like object or string. If it is a string, parse it as
+ a configuration file. If it is a file-like object, read in data and
+ parse.
+
+ Returns:
+ An instance of AppInfoExternal as loaded from a YAML file.
+
+ Raises:
+ ValueError: if a specified service is not valid.
+ EmptyConfigurationFile: when there are no documents in YAML file.
+ MultipleConfigurationFile: when there is more than one document in YAML
+ file.
+ """
+ builder = yaml_object.ObjectBuilder(AppInfoExternal)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+ listener.Parse(app_info)
+
+ app_infos = handler.GetResults()
+ if len(app_infos) < 1:
+ raise appinfo_errors.EmptyConfigurationFile()
+ if len(app_infos) > 1:
+ raise appinfo_errors.MultipleConfigurationFile()
+ return app_infos[0]
+
+
+def ParseExpiration(expiration):
+ """Parses an expiration delta string.
+
+ Args:
+ expiration: String that matches _DELTA_REGEX.
+
+ Returns:
+ Time delta in seconds.
+ """
+ delta = 0
+ for match in re.finditer(_DELTA_REGEX, expiration):
+ amount = int(match.group(1))
+ units = _EXPIRATION_CONVERSIONS.get(match.group(2).lower(), 1)
+ delta += amount * units
+ return delta
+
+
+
+_file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/\$-]{1,256}$')
+
+_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-|^_ah/')
+
+_file_path_negative_2_re = re.compile(r'//|/$')
+
+_file_path_negative_3_re = re.compile(r'^ | $|/ | /')
+
+
+def ValidFilename(filename):
+ """Determines if filename is valid.
+
+ filename must be a valid pathname.
+ - It must contain only letters, numbers, _, +, /, $, ., and -.
+ - It must be less than 256 chars.
+ - It must not contain "/./", "/../", or "//".
+ - It must not end in "/".
+ - All spaces must be in the middle of a directory or file name.
+
+ Args:
+ filename: The filename to validate.
+
+ Returns:
+ An error string if the filename is invalid. Returns '' if the filename
+ is valid.
+ """
+ if _file_path_positive_re.match(filename) is None:
+ return 'Invalid character in filename: %s' % filename
+ if _file_path_negative_1_re.search(filename) is not None:
+ return ('Filename cannot contain "." or ".." '
+ 'or start with "-" or "_ah/": %s' %
+ filename)
+ if _file_path_negative_2_re.search(filename) is not None:
+ return 'Filename cannot have trailing / or contain //: %s' % filename
+ if _file_path_negative_3_re.search(filename) is not None:
+ return 'Any spaces must be in the middle of a filename: %s' % filename
+ return ''
diff --git a/google_appengine/google/appengine/api/appinfo.pyc b/google_appengine/google/appengine/api/appinfo.pyc
new file mode 100644
index 0000000..af39ab3
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/appinfo_errors.py b/google_appengine/google/appengine/api/appinfo_errors.py
new file mode 100755
index 0000000..a79c623
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo_errors.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Errors used in the Python appinfo API, used by app developers."""
+
+
+
+
+
+class Error(Exception):
+ """Base datastore AppInfo type."""
+
+class EmptyConfigurationFile(Error):
+ """Tried to load empty configuration file"""
+
+class MultipleConfigurationFile(Error):
+ """Tried to load configuration file with multiple AppInfo objects"""
+
+class UnknownHandlerType(Error):
+ """Raised when it is not possible to determine URL mapping type."""
+
+class UnexpectedHandlerAttribute(Error):
+ """Raised when a handler type has an attribute that it does not use."""
+
+class MissingHandlerAttribute(Error):
+ """Raised when a handler is missing an attribute required by its type."""
+
+class MissingURLMapping(Error):
+ """Raised when there are no URL mappings in external appinfo."""
+
+class TooManyURLMappings(Error):
+ """Raised when there are too many URL mappings in external appinfo."""
diff --git a/google_appengine/google/appengine/api/appinfo_errors.pyc b/google_appengine/google/appengine/api/appinfo_errors.pyc
new file mode 100644
index 0000000..3207355
--- /dev/null
+++ b/google_appengine/google/appengine/api/appinfo_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/capabilities/__init__.py b/google_appengine/google/appengine/api/capabilities/__init__.py
new file mode 100755
index 0000000..f672cbb
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/__init__.py
@@ -0,0 +1,172 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Allows applications to identify API outages and scheduled downtime.
+
+Some examples:
+ def StoreUploadedProfileImage(self):
+ uploaded_image = self.request.get('img')
+ # If the images API is unavailable, we'll just skip the resize.
+ if CapabilitySet('images').is_enabled():
+ uploaded_image = images.resize(uploaded_image, 64, 64)
+ store(uploaded_image)
+
+ def RenderHTMLForm(self):
+ datastore_readonly = CapabilitySet('datastore_v3', capabilities=['write'])
+ if datastore_readonly.may_be_disabled_in(60):
+ # self.response.out('<p>Not accepting submissions right now: %s</p>' %
+ datastore_readonly.admin_message())
+ # ...render form with form elements disabled...
+ else:
+ # ...render form normally...
+
+ Individual API wrapper modules should expose CapabilitySet objects
+ for users rather than relying on users to create them. They may
+ also create convenience methods (e.g. db.IsReadOnly()) that delegate
+ to the relevant CapabilitySet.
+
+Classes defined here:
+ CapabilitySet: encapsulates one or more capabilities, allows introspection.
+ UnknownCapabilityError: thrown when an unknown capability is requested.
+"""
+
+
+
+
+
+from google.appengine.api.capabilities import capability_service_pb
+from google.appengine.base import capabilities_pb
+from google.appengine.api import apiproxy_stub_map
+
+
+IsEnabledRequest = capability_service_pb.IsEnabledRequest
+IsEnabledResponse = capability_service_pb.IsEnabledResponse
+CapabilityConfig = capabilities_pb.CapabilityConfig
+
+
+class UnknownCapabilityError(Exception):
+ """An unknown capability was requested."""
+
+
+class CapabilitySet(object):
+ """Encapsulates one or more capabilities.
+
+ Capabilities can either be named explicitly, or inferred from the
+ list of methods provided. If no capabilities or methods are
+ provided, this will check whether the entire package is enabled.
+ """
+ def __init__(self, package, capabilities=None, methods=None,
+ stub_map=apiproxy_stub_map):
+ """Constructor.
+
+ Args:
+ capabilities: list of strings
+ methods: list of strings
+ """
+ if capabilities is None:
+ capabilities = []
+ if methods is None:
+ methods = []
+ self._package = package
+ self._capabilities = ['*'] + capabilities
+ self._methods = methods
+ self._stub_map = stub_map
+
+ def is_enabled(self):
+ """Tests whether the capabilities is currently enabled.
+
+ Returns:
+ True if API calls that require these capabillities will succeed.
+
+ Raises:
+ UnknownCapabilityError, if a specified capability was not recognized.
+ """
+ config = self._get_status()
+ return config.summary_status() in (IsEnabledResponse.ENABLED,
+ IsEnabledResponse.SCHEDULED_FUTURE,
+ IsEnabledResponse.SCHEDULED_NOW)
+
+ def will_remain_enabled_for(self, time=60):
+ """Returns true if it will remain enabled for the specified amount of time.
+
+ Args:
+ time: Number of seconds in the future to look when checking for scheduled
+ downtime.
+
+ Returns:
+ True if there is no scheduled downtime for the specified capability
+ within the amount of time specified.
+
+ Raises:
+ UnknownCapabilityError, if a specified capability was not recognized.
+ """
+ config = self._get_status()
+
+ status = config.summary_status()
+ if status == IsEnabledResponse.ENABLED:
+ return True
+ elif status == IsEnabledResponse.SCHEDULED_NOW:
+ return False
+ elif status == IsEnabledResponse.SCHEDULED_FUTURE:
+ if config.has_time_until_scheduled():
+ return config.time_until_scheduled() >= time
+ else:
+ return True
+ elif status == IsEnabledResponse.DISABLED:
+ return False
+ else:
+ return False
+
+ def admin_message(self):
+ """Get any administrator notice messages for these capabilities.
+
+ Returns:
+ A string containing one or more admin messages, or an empty string.
+
+ Raises:
+ UnknownCapabilityError, if a specified capability was not recognized.
+ """
+ message_list = []
+ for config in self._get_status().config_list():
+ message = config.admin_message()
+ if message and message not in message_list:
+ message_list.append(message)
+ return ' '.join(message_list)
+
+ def _get_status(self):
+ """Get an IsEnabledResponse for the capabilities listed.
+
+ Returns:
+ IsEnabledResponse for the specified capabilities.
+
+ Raises:
+ UnknownCapabilityError: If an unknown capability was requested.
+ """
+ req = IsEnabledRequest()
+ req.set_package(self._package)
+ for capability in self._capabilities:
+ req.add_capability(capability)
+ for method in self._methods:
+ req.add_call(method)
+
+ resp = capability_service_pb.IsEnabledResponse()
+ self._stub_map.MakeSyncCall('capability_service', 'IsEnabled', req, resp)
+
+ if resp.summary_status() == IsEnabledResponse.UNKNOWN:
+ raise UnknownCapabilityError()
+
+ return resp
diff --git a/google_appengine/google/appengine/api/capabilities/__init__.pyc b/google_appengine/google/appengine/api/capabilities/__init__.pyc
new file mode 100644
index 0000000..c8ac026
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/capabilities/capability_service_pb.py b/google_appengine/google/appengine/api/capabilities/capability_service_pb.py
new file mode 100644
index 0000000..9f9ba29
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_service_pb.py
@@ -0,0 +1,366 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.base.capabilities_pb import CapabilityConfig
+class IsEnabledRequest(ProtocolBuffer.ProtocolMessage):
+ has_package_ = 0
+ package_ = ""
+
+ def __init__(self, contents=None):
+ self.capability_ = []
+ self.call_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def package(self): return self.package_
+
+ def set_package(self, x):
+ self.has_package_ = 1
+ self.package_ = x
+
+ def clear_package(self):
+ if self.has_package_:
+ self.has_package_ = 0
+ self.package_ = ""
+
+ def has_package(self): return self.has_package_
+
+ def capability_size(self): return len(self.capability_)
+ def capability_list(self): return self.capability_
+
+ def capability(self, i):
+ return self.capability_[i]
+
+ def set_capability(self, i, x):
+ self.capability_[i] = x
+
+ def add_capability(self, x):
+ self.capability_.append(x)
+
+ def clear_capability(self):
+ self.capability_ = []
+
+ def call_size(self): return len(self.call_)
+ def call_list(self): return self.call_
+
+ def call(self, i):
+ return self.call_[i]
+
+ def set_call(self, i, x):
+ self.call_[i] = x
+
+ def add_call(self, x):
+ self.call_.append(x)
+
+ def clear_call(self):
+ self.call_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_package()): self.set_package(x.package())
+ for i in xrange(x.capability_size()): self.add_capability(x.capability(i))
+ for i in xrange(x.call_size()): self.add_call(x.call(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_package_ != x.has_package_: return 0
+ if self.has_package_ and self.package_ != x.package_: return 0
+ if len(self.capability_) != len(x.capability_): return 0
+ for e1, e2 in zip(self.capability_, x.capability_):
+ if e1 != e2: return 0
+ if len(self.call_) != len(x.call_): return 0
+ for e1, e2 in zip(self.call_, x.call_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_package_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: package not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.package_))
+ n += 1 * len(self.capability_)
+ for i in xrange(len(self.capability_)): n += self.lengthString(len(self.capability_[i]))
+ n += 1 * len(self.call_)
+ for i in xrange(len(self.call_)): n += self.lengthString(len(self.call_[i]))
+ return n + 1
+
+ def Clear(self):
+ self.clear_package()
+ self.clear_capability()
+ self.clear_call()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.package_)
+ for i in xrange(len(self.capability_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.capability_[i])
+ for i in xrange(len(self.call_)):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.call_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_package(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.add_capability(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.add_call(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_package_: res+=prefix+("package: %s\n" % self.DebugFormatString(self.package_))
+ cnt=0
+ for e in self.capability_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("capability%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ cnt=0
+ for e in self.call_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("call%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kpackage = 1
+ kcapability = 2
+ kcall = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "package",
+ 2: "capability",
+ 3: "call",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class IsEnabledResponse(ProtocolBuffer.ProtocolMessage):
+
+ ENABLED = 1
+ SCHEDULED_FUTURE = 2
+ SCHEDULED_NOW = 3
+ DISABLED = 4
+ UNKNOWN = 5
+
+ _SummaryStatus_NAMES = {
+ 1: "ENABLED",
+ 2: "SCHEDULED_FUTURE",
+ 3: "SCHEDULED_NOW",
+ 4: "DISABLED",
+ 5: "UNKNOWN",
+ }
+
+ def SummaryStatus_Name(cls, x): return cls._SummaryStatus_NAMES.get(x, "")
+ SummaryStatus_Name = classmethod(SummaryStatus_Name)
+
+ has_summary_status_ = 0
+ summary_status_ = 0
+ has_time_until_scheduled_ = 0
+ time_until_scheduled_ = 0
+
+ def __init__(self, contents=None):
+ self.config_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def summary_status(self): return self.summary_status_
+
+ def set_summary_status(self, x):
+ self.has_summary_status_ = 1
+ self.summary_status_ = x
+
+ def clear_summary_status(self):
+ if self.has_summary_status_:
+ self.has_summary_status_ = 0
+ self.summary_status_ = 0
+
+ def has_summary_status(self): return self.has_summary_status_
+
+ def time_until_scheduled(self): return self.time_until_scheduled_
+
+ def set_time_until_scheduled(self, x):
+ self.has_time_until_scheduled_ = 1
+ self.time_until_scheduled_ = x
+
+ def clear_time_until_scheduled(self):
+ if self.has_time_until_scheduled_:
+ self.has_time_until_scheduled_ = 0
+ self.time_until_scheduled_ = 0
+
+ def has_time_until_scheduled(self): return self.has_time_until_scheduled_
+
+ def config_size(self): return len(self.config_)
+ def config_list(self): return self.config_
+
+ def config(self, i):
+ return self.config_[i]
+
+ def mutable_config(self, i):
+ return self.config_[i]
+
+ def add_config(self):
+ x = CapabilityConfig()
+ self.config_.append(x)
+ return x
+
+ def clear_config(self):
+ self.config_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_summary_status()): self.set_summary_status(x.summary_status())
+ if (x.has_time_until_scheduled()): self.set_time_until_scheduled(x.time_until_scheduled())
+ for i in xrange(x.config_size()): self.add_config().CopyFrom(x.config(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_summary_status_ != x.has_summary_status_: return 0
+ if self.has_summary_status_ and self.summary_status_ != x.summary_status_: return 0
+ if self.has_time_until_scheduled_ != x.has_time_until_scheduled_: return 0
+ if self.has_time_until_scheduled_ and self.time_until_scheduled_ != x.time_until_scheduled_: return 0
+ if len(self.config_) != len(x.config_): return 0
+ for e1, e2 in zip(self.config_, x.config_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_summary_status_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: summary_status not set.')
+ for p in self.config_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.summary_status_)
+ if (self.has_time_until_scheduled_): n += 1 + self.lengthVarInt64(self.time_until_scheduled_)
+ n += 1 * len(self.config_)
+ for i in xrange(len(self.config_)): n += self.lengthString(self.config_[i].ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_summary_status()
+ self.clear_time_until_scheduled()
+ self.clear_config()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.summary_status_)
+ if (self.has_time_until_scheduled_):
+ out.putVarInt32(16)
+ out.putVarInt64(self.time_until_scheduled_)
+ for i in xrange(len(self.config_)):
+ out.putVarInt32(26)
+ out.putVarInt32(self.config_[i].ByteSize())
+ self.config_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_summary_status(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_time_until_scheduled(d.getVarInt64())
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_config().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_summary_status_: res+=prefix+("summary_status: %s\n" % self.DebugFormatInt32(self.summary_status_))
+ if self.has_time_until_scheduled_: res+=prefix+("time_until_scheduled: %s\n" % self.DebugFormatInt64(self.time_until_scheduled_))
+ cnt=0
+ for e in self.config_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("config%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ ksummary_status = 1
+ ktime_until_scheduled = 2
+ kconfig = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "summary_status",
+ 2: "time_until_scheduled",
+ 3: "config",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['IsEnabledRequest','IsEnabledResponse']
diff --git a/google_appengine/google/appengine/api/capabilities/capability_service_pb.pyc b/google_appengine/google/appengine/api/capabilities/capability_service_pb.pyc
new file mode 100644
index 0000000..d1a68c2
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/capabilities/capability_stub.py b/google_appengine/google/appengine/api/capabilities/capability_stub.py
new file mode 100755
index 0000000..6d33d7e
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_stub.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the capability service API, everything is always enabled."""
+
+
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import capabilities
+
+IsEnabledRequest = capabilities.IsEnabledRequest
+IsEnabledResponse = capabilities.IsEnabledResponse
+CapabilityConfig = capabilities.CapabilityConfig
+
+class CapabilityServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only capability service stub."""
+
+ def __init__(self, service_name='capability_service'):
+ """Constructor.
+
+ Args:
+ service_name: Service name expected for all calls.
+ """
+ super(CapabilityServiceStub, self).__init__(service_name)
+
+
+ def _Dynamic_IsEnabled(self, request, response):
+ """Implementation of CapabilityService::IsEnabled().
+
+ Args:
+ request: An IsEnabledRequest.
+ response: An IsEnabledResponse.
+ """
+ response.set_summary_status(IsEnabledResponse.ENABLED)
+
+ default_config = response.add_config()
+ default_config.set_package('')
+ default_config.set_capability('')
+ default_config.set_status(CapabilityConfig.ENABLED)
diff --git a/google_appengine/google/appengine/api/capabilities/capability_stub.pyc b/google_appengine/google/appengine/api/capabilities/capability_stub.pyc
new file mode 100644
index 0000000..6336e60
--- /dev/null
+++ b/google_appengine/google/appengine/api/capabilities/capability_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/croninfo.py b/google_appengine/google/appengine/api/croninfo.py
new file mode 100755
index 0000000..0eab26e
--- /dev/null
+++ b/google_appengine/google/appengine/api/croninfo.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""CronInfo tools.
+
+A library for working with CronInfo records, describing cron entries for an
+application. Supports loading the records from yaml.
+"""
+
+
+
+import logging
+import sys
+import traceback
+
+try:
+ import pytz
+except ImportError:
+ pytz = None
+
+from google.appengine.cron import groc
+from google.appengine.api import validation
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_object
+
+_URL_REGEX = r'^/.*$'
+_TIMEZONE_REGEX = r'^.{0,100}$'
+_DESCRIPTION_REGEX = r'^.{0,499}$'
+
+
+class GrocValidator(validation.Validator):
+ """Checks that a schedule is in valid groc format."""
+
+ def Validate(self, value):
+ """Validates a schedule."""
+ if value is None:
+ raise validation.MissingAttribute('schedule must be specified')
+ if not isinstance(value, basestring):
+ raise TypeError('schedule must be a string, not \'%r\''%type(value))
+ schedule = groc.CreateParser(value)
+ try:
+ schedule.timespec()
+ except groc.GrocException, e:
+ raise validation.ValidationError('schedule \'%s\' failed to parse: %s'%(
+ value, e.args[0]))
+ return value
+
+
+class TimezoneValidator(validation.Validator):
+ """Checks that a timezone can be correctly parsed and is known."""
+
+ def Validate(self, value):
+ """Validates a timezone."""
+ if value is None:
+ return
+ if not isinstance(value, basestring):
+ raise TypeError('timezone must be a string, not \'%r\'' % type(value))
+ if pytz is None:
+ return value
+ try:
+ pytz.timezone(value)
+ except pytz.UnknownTimeZoneError:
+ raise validation.ValidationError('timezone \'%s\' is unknown' % value)
+ except IOError:
+ return value
+ except:
+ unused_e, v, t = sys.exc_info()
+ logging.warning('pytz raised an unexpected error: %s.\n' % (v) +
+ 'Traceback:\n' + '\n'.join(traceback.format_tb(t)))
+ raise
+ return value
+
+
+CRON = 'cron'
+
+URL = 'url'
+SCHEDULE = 'schedule'
+TIMEZONE = 'timezone'
+DESCRIPTION = 'description'
+
+
+class MalformedCronfigurationFile(Exception):
+ """Configuration file for Cron is malformed."""
+ pass
+
+
+class CronEntry(validation.Validated):
+ """A cron entry describes a single cron job."""
+ ATTRIBUTES = {
+ URL: _URL_REGEX,
+ SCHEDULE: GrocValidator(),
+ TIMEZONE: TimezoneValidator(),
+ DESCRIPTION: validation.Optional(_DESCRIPTION_REGEX)
+ }
+
+
+class CronInfoExternal(validation.Validated):
+ """CronInfoExternal describes all cron entries for an application."""
+ ATTRIBUTES = {
+ CRON: validation.Optional(validation.Repeated(CronEntry))
+ }
+
+
+def LoadSingleCron(cron_info):
+ """Load a cron.yaml file or string and return a CronInfoExternal object."""
+ builder = yaml_object.ObjectBuilder(CronInfoExternal)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+ listener.Parse(cron_info)
+
+ cron_info = handler.GetResults()
+ if len(cron_info) < 1:
+ raise MalformedCronfigurationFile('Empty cron configuration.')
+ if len(cron_info) > 1:
+ raise MalformedCronfigurationFile('Multiple cron sections '
+ 'in configuration.')
+ return cron_info[0]
diff --git a/google_appengine/google/appengine/api/croninfo.pyc b/google_appengine/google/appengine/api/croninfo.pyc
new file mode 100644
index 0000000..5540fb8
--- /dev/null
+++ b/google_appengine/google/appengine/api/croninfo.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore.py b/google_appengine/google/appengine/api/datastore.py
new file mode 100755
index 0000000..6931db8
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore.py
@@ -0,0 +1,2170 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""The Python datastore API used by app developers.
+
+Defines Entity, Query, and Iterator classes, as well as methods for all of the
+datastore's calls. Also defines conversions between the Python classes and
+their PB counterparts.
+
+The datastore errors are defined in the datastore_errors module. That module is
+only required to avoid circular imports. datastore imports datastore_types,
+which needs BadValueError, so it can't be defined in datastore.
+"""
+
+
+
+
+
+
+import heapq
+import itertools
+import logging
+import re
+import string
+import sys
+import traceback
+from xml.sax import saxutils
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.datastore import datastore_index
+from google.appengine.datastore import datastore_pb
+from google.appengine.runtime import apiproxy_errors
+from google.appengine.datastore import entity_pb
+
+try:
+ from google.appengine.api.labs.taskqueue import taskqueue_service_pb
+except ImportError:
+ from google.appengine.api.taskqueue import taskqueue_service_pb
+
+MAX_ALLOWABLE_QUERIES = 30
+
+DEFAULT_TRANSACTION_RETRIES = 3
+
+_MAX_INDEXED_PROPERTIES = 5000
+
+_MAX_ID_BATCH_SIZE = 1000 * 1000 * 1000
+
+Key = datastore_types.Key
+typename = datastore_types.typename
+
+_txes = {}
+
+
+def NormalizeAndTypeCheck(arg, types):
+ """Normalizes and type checks the given argument.
+
+ Args:
+ arg: an instance, tuple, list, iterator, or generator of the given type(s)
+ types: allowed type or tuple of types
+
+ Returns:
+ A (list, bool) tuple. The list is a normalized, shallow copy of the
+ argument. The boolean is True if the argument was a sequence, False
+ if it was a single object.
+
+ Raises:
+ AssertionError: types includes list or tuple.
+ BadArgumentError: arg is not an instance or sequence of one of the given
+ types.
+ """
+ if not isinstance(types, (list, tuple)):
+ types = (types,)
+
+ assert list not in types and tuple not in types
+
+ if isinstance(arg, types):
+ return ([arg], False)
+ else:
+ try:
+ for val in arg:
+ if not isinstance(val, types):
+ raise datastore_errors.BadArgumentError(
+ 'Expected one of %s; received %s (a %s).' %
+ (types, val, typename(val)))
+ except TypeError:
+ raise datastore_errors.BadArgumentError(
+ 'Expected an instance or sequence of %s; received %s (a %s).' %
+ (types, arg, typename(arg)))
+
+ return (list(arg), True)
+
+
+def NormalizeAndTypeCheckKeys(keys):
+ """Normalizes and type checks that the given argument is a valid key or keys.
+
+ A wrapper around NormalizeAndTypeCheck() that accepts strings, Keys, and
+ Entities, and normalizes to Keys.
+
+ Args:
+ keys: a Key or sequence of Keys
+
+ Returns:
+ A (list of Keys, bool) tuple. See NormalizeAndTypeCheck.
+
+ Raises:
+ BadArgumentError: arg is not an instance or sequence of one of the given
+ types.
+ """
+ keys, multiple = NormalizeAndTypeCheck(keys, (basestring, Entity, Key))
+
+ keys = [_GetCompleteKeyOrError(key) for key in keys]
+
+ return (keys, multiple)
+
+
+def Put(entities):
+ """Store one or more entities in the datastore.
+
+ The entities may be new or previously existing. For new entities, Put() will
+ fill in the app id and key assigned by the datastore.
+
+ If the argument is a single Entity, a single Key will be returned. If the
+ argument is a list of Entity, a list of Keys will be returned.
+
+ Args:
+ entities: Entity or list of Entities
+
+ Returns:
+ Key or list of Keys
+
+ Raises:
+ TransactionFailedError, if the Put could not be committed.
+ """
+ entities, multiple = NormalizeAndTypeCheck(entities, Entity)
+
+ if multiple and not entities:
+ return []
+
+ for entity in entities:
+ if not entity.kind() or not entity.app_id_namespace():
+ raise datastore_errors.BadRequestError(
+ 'App and kind must not be empty, in entity: %s' % entity)
+
+ req = datastore_pb.PutRequest()
+ req.entity_list().extend([e._ToPb() for e in entities])
+
+ keys = [e.key() for e in entities]
+ tx = _MaybeSetupTransaction(req, keys)
+
+ resp = datastore_pb.PutResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ keys = resp.key_list()
+ num_keys = len(keys)
+ num_entities = len(entities)
+ if num_keys != num_entities:
+ raise datastore_errors.InternalError(
+ 'Put accepted %d entities but returned %d keys.' %
+ (num_entities, num_keys))
+
+ for entity, key in zip(entities, keys):
+ entity._Entity__key._Key__reference.CopyFrom(key)
+
+ if tx:
+ tx.entity_group = entities[0].entity_group()
+
+ if multiple:
+ return [Key._FromPb(k) for k in keys]
+ else:
+ return Key._FromPb(resp.key(0))
+
+
+def Get(keys):
+ """Retrieves one or more entities from the datastore.
+
+ Retrieves the entity or entities with the given key(s) from the datastore
+ and returns them as fully populated Entity objects, as defined below. If
+ there is an error, raises a subclass of datastore_errors.Error.
+
+ If keys is a single key or string, an Entity will be returned, or
+ EntityNotFoundError will be raised if no existing entity matches the key.
+
+ However, if keys is a list or tuple, a list of entities will be returned
+ that corresponds to the sequence of keys. It will include entities for keys
+ that were found and None placeholders for keys that were not found.
+
+ Args:
+ # the primary key(s) of the entity(ies) to retrieve
+ keys: Key or string or list of Keys or strings
+
+ Returns:
+ Entity or list of Entity objects
+ """
+ keys, multiple = NormalizeAndTypeCheckKeys(keys)
+
+ if multiple and not keys:
+ return []
+ req = datastore_pb.GetRequest()
+ req.key_list().extend([key._Key__reference for key in keys])
+ _MaybeSetupTransaction(req, keys)
+
+ resp = datastore_pb.GetResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ entities = []
+ for group in resp.entity_list():
+ if group.has_entity():
+ entities.append(Entity._FromPb(group.entity()))
+ else:
+ entities.append(None)
+
+ if multiple:
+ return entities
+ else:
+ if entities[0] is None:
+ raise datastore_errors.EntityNotFoundError()
+ return entities[0]
+
+
+def Delete(keys):
+ """Deletes one or more entities from the datastore. Use with care!
+
+ Deletes the given entity(ies) from the datastore. You can only delete
+ entities from your app. If there is an error, raises a subclass of
+ datastore_errors.Error.
+
+ Args:
+ # the primary key(s) of the entity(ies) to delete
+ keys: Key or string or list of Keys or strings
+
+ Raises:
+ TransactionFailedError, if the Delete could not be committed.
+ """
+ keys, multiple = NormalizeAndTypeCheckKeys(keys)
+
+ if multiple and not keys:
+ return
+
+ req = datastore_pb.DeleteRequest()
+ req.key_list().extend([key._Key__reference for key in keys])
+
+ tx = _MaybeSetupTransaction(req, keys)
+
+ resp = datastore_pb.DeleteResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+
+class Entity(dict):
+ """A datastore entity.
+
+ Includes read-only accessors for app id, kind, and primary key. Also
+ provides dictionary-style access to properties.
+ """
+ def __init__(self, kind, parent=None, _app=None, name=None, id=None,
+ unindexed_properties=[], _namespace=None):
+ """Constructor. Takes the kind and transaction root, which cannot be
+ changed after the entity is constructed, and an optional parent. Raises
+ BadArgumentError or BadKeyError if kind is invalid or parent is not an
+ existing Entity or Key in the datastore.
+
+ Args:
+ # this entity's kind
+ kind: string
+ # if provided, this entity's parent. Its key must be complete.
+ parent: Entity or Key
+ # if provided, this entity's name.
+ name: string
+ # if provided, this entity's id.
+ id: integer
+ # if provided, a sequence of property names that should not be indexed
+ # by the built-in single property indices.
+ unindexed_properties: list or tuple of strings
+ """
+ ref = entity_pb.Reference()
+ _app_namespace = datastore_types.ResolveAppIdNamespace(_app, _namespace)
+ ref.set_app(_app_namespace.to_encoded())
+
+ datastore_types.ValidateString(kind, 'kind',
+ datastore_errors.BadArgumentError)
+ if parent is not None:
+ parent = _GetCompleteKeyOrError(parent)
+ if _app_namespace != parent.app_id_namespace():
+ raise datastore_errors.BadArgumentError(
+ " %s doesn't match parent's app_namespace %s" %
+ (_app_namespace, parent.app_id_namespace()))
+ ref.CopyFrom(parent._Key__reference)
+
+ last_path = ref.mutable_path().add_element()
+ last_path.set_type(kind.encode('utf-8'))
+
+ if name is not None and id is not None:
+ raise datastore_errors.BadArgumentError(
+ "Cannot set both name and id on an Entity")
+
+ if name is not None:
+ datastore_types.ValidateString(name, 'name')
+ last_path.set_name(name.encode('utf-8'))
+
+ if id is not None:
+ datastore_types.ValidateInteger(id, 'id')
+ last_path.set_id(id)
+
+ unindexed_properties, multiple = NormalizeAndTypeCheck(unindexed_properties, basestring)
+ if not multiple:
+ raise datastore_errors.BadArgumentError(
+ 'unindexed_properties must be a sequence; received %s (a %s).' %
+ (unindexed_properties, typename(unindexed_properties)))
+ for prop in unindexed_properties:
+ datastore_types.ValidateProperty(prop, None)
+ self.__unindexed_properties = frozenset(unindexed_properties)
+
+ self.__key = Key._FromPb(ref)
+
+ def app(self):
+ """Returns the name of the application that created this entity, a
+ string or None if not set.
+ """
+ return self.__key.app()
+
+ def namespace(self):
+ """Returns the namespace of this entity, a string or None.
+ """
+ return self.__key.namespace()
+
+ def app_id_namespace(self):
+ """Returns the AppIdNamespace of this entity or None if not set.
+ """
+ return self.__key.app_id_namespace()
+
+ def kind(self):
+ """Returns this entity's kind, a string.
+ """
+ return self.__key.kind()
+
+ def is_saved(self):
+ """Returns if this entity has been saved to the datastore
+ """
+ last_path = self.__key._Key__reference.path().element_list()[-1]
+ return ((last_path.has_name() ^ last_path.has_id()) and
+ self.__key.has_id_or_name())
+
+ def key(self):
+ """Returns this entity's primary key, a Key instance.
+ """
+ return self.__key
+
+ def parent(self):
+ """Returns this entity's parent, as a Key. If this entity has no parent,
+ returns None.
+ """
+ return self.key().parent()
+
+ def entity_group(self):
+ """Returns this entity's entity group as a Key.
+
+ Note that the returned Key will be incomplete if this is a a root entity
+ and its key is incomplete.
+ """
+ return self.key().entity_group()
+
+ def unindexed_properties(self):
+ """Returns this entity's unindexed properties, as a frozenset of strings."""
+ return getattr(self, '_Entity__unindexed_properties', [])
+
+ def __setitem__(self, name, value):
+ """Implements the [] operator. Used to set property value(s).
+
+ If the property name is the empty string or not a string, raises
+ BadPropertyError. If the value is not a supported type, raises
+ BadValueError.
+ """
+ datastore_types.ValidateProperty(name, value)
+ dict.__setitem__(self, name, value)
+
+ def setdefault(self, name, value):
+ """If the property exists, returns its value. Otherwise sets it to value.
+
+ If the property name is the empty string or not a string, raises
+ BadPropertyError. If the value is not a supported type, raises
+ BadValueError.
+ """
+ datastore_types.ValidateProperty(name, value)
+ return dict.setdefault(self, name, value)
+
+ def update(self, other):
+ """Updates this entity's properties from the values in other.
+
+ If any property name is the empty string or not a string, raises
+ BadPropertyError. If any value is not a supported type, raises
+ BadValueError.
+ """
+ for name, value in other.items():
+ self.__setitem__(name, value)
+
+ def copy(self):
+ """The copy method is not supported.
+ """
+ raise NotImplementedError('Entity does not support the copy() method.')
+
+ def ToXml(self):
+ """Returns an XML representation of this entity. Atom and gd:namespace
+ properties are converted to XML according to their respective schemas. For
+ more information, see:
+
+ http://www.atomenabled.org/developers/syndication/
+ http://code.google.com/apis/gdata/common-elements.html
+
+ This is *not* optimized. It shouldn't be used anywhere near code that's
+ performance-critical.
+ """
+ xml = u'<entity kind=%s' % saxutils.quoteattr(self.kind())
+ if self.__key.has_id_or_name():
+ xml += ' key=%s' % saxutils.quoteattr(str(self.__key))
+ xml += '>'
+ if self.__key.has_id_or_name():
+ xml += '\n <key>%s</key>' % self.__key.ToTagUri()
+
+
+ properties = self.keys()
+ if properties:
+ properties.sort()
+ xml += '\n ' + '\n '.join(self._PropertiesToXml(properties))
+
+ xml += '\n</entity>\n'
+ return xml
+
+ def _PropertiesToXml(self, properties):
+ """ Returns a list of the XML representations of each of the given
+ properties. Ignores properties that don't exist in this entity.
+
+ Arg:
+ properties: string or list of strings
+
+ Returns:
+ list of strings
+ """
+ xml_properties = []
+
+ for propname in properties:
+ if not self.has_key(propname):
+ continue
+
+ propname_xml = saxutils.quoteattr(propname)
+
+ values = self[propname]
+ if not isinstance(values, list):
+ values = [values]
+
+ proptype = datastore_types.PropertyTypeName(values[0])
+ proptype_xml = saxutils.quoteattr(proptype)
+
+ escaped_values = self._XmlEscapeValues(propname)
+ open_tag = u'<property name=%s type=%s>' % (propname_xml, proptype_xml)
+ close_tag = u'</property>'
+ xml_properties += [open_tag + val + close_tag for val in escaped_values]
+
+ return xml_properties
+
+ def _XmlEscapeValues(self, property):
+ """ Returns a list of the XML-escaped string values for the given property.
+ Raises an AssertionError if the property doesn't exist.
+
+ Arg:
+ property: string
+
+ Returns:
+ list of strings
+ """
+ assert self.has_key(property)
+ xml = []
+
+ values = self[property]
+ if not isinstance(values, list):
+ values = [values]
+
+ for val in values:
+ if hasattr(val, 'ToXml'):
+ xml.append(val.ToXml())
+ else:
+ if val is None:
+ xml.append('')
+ else:
+ xml.append(saxutils.escape(unicode(val)))
+
+ return xml
+
+ def ToPb(self):
+ """Converts this Entity to its protocol buffer representation.
+
+ Returns:
+ entity_pb.Entity
+ """
+ return self._ToPb(False)
+
+ def _ToPb(self, mark_key_as_saved=True):
+ """Converts this Entity to its protocol buffer representation. Not
+ intended to be used by application developers.
+
+ Returns:
+ entity_pb.Entity
+ """
+
+ pb = entity_pb.EntityProto()
+ pb.mutable_key().CopyFrom(self.key()._ToPb())
+ last_path = pb.key().path().element_list()[-1]
+ if mark_key_as_saved and last_path.has_name() and last_path.has_id():
+ last_path.clear_id()
+
+ group = pb.mutable_entity_group()
+ if self.__key.has_id_or_name():
+ root = pb.key().path().element(0)
+ group.add_element().CopyFrom(root)
+
+ properties = self.items()
+ properties.sort()
+ for (name, values) in properties:
+ properties = datastore_types.ToPropertyPb(name, values)
+ if not isinstance(properties, list):
+ properties = [properties]
+
+ sample = values
+ if isinstance(sample, list):
+ sample = values[0]
+
+ if (isinstance(sample, datastore_types._RAW_PROPERTY_TYPES) or
+ name in self.unindexed_properties()):
+ pb.raw_property_list().extend(properties)
+ else:
+ pb.property_list().extend(properties)
+
+ if pb.property_size() > _MAX_INDEXED_PROPERTIES:
+ raise datastore_errors.BadRequestError(
+ 'Too many indexed properties for entity %r.' % self.key())
+
+ return pb
+
+ @staticmethod
+ def FromPb(pb):
+ """Static factory method. Returns the Entity representation of the
+ given protocol buffer (datastore_pb.Entity).
+
+ Args:
+ pb: datastore_pb.Entity or str encoding of a datastore_pb.Entity
+
+ Returns:
+ Entity: the Entity representation of pb
+ """
+ if isinstance(pb, str):
+ real_pb = entity_pb.EntityProto()
+ real_pb.ParseFromString(pb)
+ pb = real_pb
+
+ return Entity._FromPb(pb, require_valid_key=False)
+
+ @staticmethod
+ def _FromPb(pb, require_valid_key=True):
+ """Static factory method. Returns the Entity representation of the
+ given protocol buffer (datastore_pb.Entity). Not intended to be used by
+ application developers.
+
+ The Entity PB's key must be complete. If it isn't, an AssertionError is
+ raised.
+
+ Args:
+ # a protocol buffer Entity
+ pb: datastore_pb.Entity
+
+ Returns:
+ # the Entity representation of the argument
+ Entity
+ """
+ assert pb.key().path().element_size() > 0
+
+ last_path = pb.key().path().element_list()[-1]
+ if require_valid_key:
+ assert last_path.has_id() ^ last_path.has_name()
+ if last_path.has_id():
+ assert last_path.id() != 0
+ else:
+ assert last_path.has_name()
+ assert last_path.name()
+
+ unindexed_properties = [p.name() for p in pb.raw_property_list()]
+
+ e = Entity(unicode(last_path.type().decode('utf-8')),
+ unindexed_properties=unindexed_properties)
+ ref = e.__key._Key__reference
+ ref.CopyFrom(pb.key())
+
+ temporary_values = {}
+
+ for prop_list in (pb.property_list(), pb.raw_property_list()):
+ for prop in prop_list:
+ try:
+ value = datastore_types.FromPropertyPb(prop)
+ except (AssertionError, AttributeError, TypeError, ValueError), e:
+ raise datastore_errors.Error(
+ 'Property %s is corrupt in the datastore. %s: %s' %
+ (e.__class__, prop.name(), e))
+
+ multiple = prop.multiple()
+ if multiple:
+ value = [value]
+
+ name = prop.name()
+ cur_value = temporary_values.get(name)
+ if cur_value is None:
+ temporary_values[name] = value
+ elif not multiple:
+ raise datastore_errors.Error(
+ 'Property %s is corrupt in the datastore; it has multiple '
+ 'values, but is not marked as multiply valued.' % name)
+ else:
+ cur_value.extend(value)
+
+ for name, value in temporary_values.iteritems():
+ decoded_name = unicode(name.decode('utf-8'))
+
+ datastore_types.ValidateReadProperty(decoded_name, value)
+
+ dict.__setitem__(e, decoded_name, value)
+
+ return e
+
+
+class Query(dict):
+ """A datastore query.
+
+ (Instead of this, consider using appengine.ext.gql.Query! It provides a
+ query language interface on top of the same functionality.)
+
+ Queries are used to retrieve entities that match certain criteria, including
+ app id, kind, and property filters. Results may also be sorted by properties.
+
+ App id and kind are required. Only entities from the given app, of the given
+ type, are returned. If an ancestor is set, with Ancestor(), only entities
+ with that ancestor are returned.
+
+ Property filters are used to provide criteria based on individual property
+ values. A filter compares a specific property in each entity to a given
+ value or list of possible values.
+
+ An entity is returned if its property values match *all* of the query's
+ filters. In other words, filters are combined with AND, not OR. If an
+ entity does not have a value for a property used in a filter, it is not
+ returned.
+
+ Property filters map filter strings of the form '<property name> <operator>'
+ to filter values. Use dictionary accessors to set property filters, like so:
+
+ > query = Query('Person')
+ > query['name ='] = 'Ryan'
+ > query['age >='] = 21
+
+ This query returns all Person entities where the name property is 'Ryan',
+ 'Ken', or 'Bret', and the age property is at least 21.
+
+ Another way to build this query is:
+
+ > query = Query('Person')
+ > query.update({'name =': 'Ryan', 'age >=': 21})
+
+ The supported operators are =, >, <, >=, and <=. Only one inequality
+ filter may be used per query. Any number of equals filters may be used in
+ a single Query.
+
+ A filter value may be a list or tuple of values. This is interpreted as
+ multiple filters with the same filter string and different values, all ANDed
+ together. For example, this query returns everyone with the tags "google"
+ and "app engine":
+
+ > Query('Person', {'tag =': ('google', 'app engine')})
+
+ Result entities can be returned in different orders. Use the Order()
+ method to specify properties that results will be sorted by, and in which
+ direction.
+
+ Note that filters and orderings may be provided at any time before the query
+ is run. When the query is fully specified, Run() runs the query and returns
+ an iterator. The query results can be accessed through the iterator.
+
+ A query object may be reused after it's been run. Its filters and
+ orderings can be changed to create a modified query.
+
+ If you know how many result entities you need, use Get() to fetch them:
+
+ > query = Query('Person', {'age >': 21})
+ > for person in query.Get(4):
+ > print 'I have four pints left. Have one on me, %s!' % person['name']
+
+ If you don't know how many results you need, or if you need them all, you
+ can get an iterator over the results by calling Run():
+
+ > for person in Query('Person', {'age >': 21}).Run():
+ > print 'Have a pint on me, %s!' % person['name']
+
+ Get() is more efficient than Run(), so use Get() whenever possible.
+
+ Finally, the Count() method returns the number of result entities matched by
+ the query. The returned count is cached; successive Count() calls will not
+ re-scan the datastore unless the query is changed.
+ """
+ ASCENDING = datastore_pb.Query_Order.ASCENDING
+ DESCENDING = datastore_pb.Query_Order.DESCENDING
+
+ ORDER_FIRST = datastore_pb.Query.ORDER_FIRST
+ ANCESTOR_FIRST = datastore_pb.Query.ANCESTOR_FIRST
+ FILTER_FIRST = datastore_pb.Query.FILTER_FIRST
+
+ OPERATORS = {'<': datastore_pb.Query_Filter.LESS_THAN,
+ '<=': datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
+ '>': datastore_pb.Query_Filter.GREATER_THAN,
+ '>=': datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
+ '=': datastore_pb.Query_Filter.EQUAL,
+ '==': datastore_pb.Query_Filter.EQUAL,
+ }
+ INEQUALITY_OPERATORS = frozenset(['<', '<=', '>', '>='])
+ FILTER_REGEX = re.compile(
+ '^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS.keys()),
+ re.IGNORECASE | re.UNICODE)
+
+ __kind = None
+ __app = None
+ __orderings = None
+ __cached_count = None
+ __hint = None
+ __ancestor = None
+
+ __filter_order = None
+ __filter_counter = 0
+
+ __inequality_prop = None
+ __inequality_count = 0
+
+ def __init__(self, kind=None, filters={}, _app=None, keys_only=False,
+ _namespace=None):
+ """Constructor.
+
+ Raises BadArgumentError if kind is not a string. Raises BadValueError or
+ BadFilterError if filters is not a dictionary of valid filters.
+
+ Args:
+ # kind is required. filters is optional; if provided, it's used
+ # as an initial set of property filters. keys_only defaults to False.
+ kind: string
+ filters: dict
+ keys_only: boolean
+ """
+ if kind is not None:
+ datastore_types.ValidateString(kind, 'kind',
+ datastore_errors.BadArgumentError)
+
+ self.__kind = kind
+ self.__orderings = []
+ self.__filter_order = {}
+ self.update(filters)
+
+ self.__app = datastore_types.ResolveAppIdNamespace(_app,
+ _namespace).to_encoded()
+ self.__keys_only = keys_only
+
+ def Order(self, *orderings):
+ """Specify how the query results should be sorted.
+
+ Result entities will be sorted by the first property argument, then by the
+ second, and so on. For example, this:
+
+ > query = Query('Person')
+ > query.Order('bday', ('age', Query.DESCENDING))
+
+ sorts everyone in order of their birthday, starting with January 1.
+ People with the same birthday are sorted by age, oldest to youngest.
+
+ The direction for each sort property may be provided; if omitted, it
+ defaults to ascending.
+
+ Order() may be called multiple times. Each call resets the sort order
+ from scratch.
+
+ If an inequality filter exists in this Query it must be the first property
+ passed to Order. Any number of sort orders may be used after the
+ inequality filter property. Without inequality filters, any number of
+ filters with different orders may be specified.
+
+ Entities with multiple values for an order property are sorted by their
+ lowest value.
+
+ Note that a sort order implies an existence filter! In other words,
+ Entities without the sort order property are filtered out, and *not*
+ included in the query results.
+
+ If the sort order property has different types in different entities - ie,
+ if bob['id'] is an int and fred['id'] is a string - the entities will be
+ grouped first by the property type, then sorted within type. No attempt is
+ made to compare property values across types.
+
+ Raises BadArgumentError if any argument is of the wrong format.
+
+ Args:
+ # the properties to sort by, in sort order. each argument may be either a
+ # string or (string, direction) 2-tuple.
+
+ Returns:
+ # this query
+ Query
+ """
+ orderings = list(orderings)
+
+ for (order, i) in zip(orderings, range(len(orderings))):
+ if not (isinstance(order, basestring) or
+ (isinstance(order, tuple) and len(order) in [2, 3])):
+ raise datastore_errors.BadArgumentError(
+ 'Order() expects strings or 2- or 3-tuples; received %s (a %s). ' %
+ (order, typename(order)))
+
+ if isinstance(order, basestring):
+ order = (order,)
+
+ datastore_types.ValidateString(order[0], 'sort order property',
+ datastore_errors.BadArgumentError)
+ property = order[0]
+
+ direction = order[-1]
+ if direction not in (Query.ASCENDING, Query.DESCENDING):
+ if len(order) == 3:
+ raise datastore_errors.BadArgumentError(
+ 'Order() expects Query.ASCENDING or DESCENDING; received %s' %
+ str(direction))
+ direction = Query.ASCENDING
+
+ if (self.__kind is None and
+ (property != datastore_types._KEY_SPECIAL_PROPERTY or
+ direction != Query.ASCENDING)):
+ raise datastore_errors.BadArgumentError(
+ 'Only %s ascending orders are supported on kindless queries' %
+ datastore_types._KEY_SPECIAL_PROPERTY)
+
+ orderings[i] = (property, direction)
+
+ if (orderings and self.__inequality_prop and
+ orderings[0][0] != self.__inequality_prop):
+ raise datastore_errors.BadArgumentError(
+ 'First ordering property must be the same as inequality filter '
+ 'property, if specified for this query; received %s, expected %s' %
+ (orderings[0][0], self.__inequality_prop))
+
+ self.__orderings = orderings
+ return self
+
+ def Hint(self, hint):
+ """Sets a hint for how this query should run.
+
+ The query hint gives us information about how best to execute your query.
+ Currently, we can only do one index scan, so the query hint should be used
+ to indicates which index we should scan against.
+
+ Use FILTER_FIRST if your first filter will only match a few results. In
+ this case, it will be most efficient to scan against the index for this
+ property, load the results into memory, and apply the remaining filters
+ and sort orders there.
+
+ Similarly, use ANCESTOR_FIRST if the query's ancestor only has a few
+ descendants. In this case, it will be most efficient to scan all entities
+ below the ancestor and load them into memory first.
+
+ Use ORDER_FIRST if the query has a sort order and the result set is large
+ or you only plan to fetch the first few results. In that case, we
+ shouldn't try to load all of the results into memory; instead, we should
+ scan the index for this property, which is in sorted order.
+
+ Note that hints are currently ignored in the v3 datastore!
+
+ Arg:
+ one of datastore.Query.[ORDER_FIRST, ANCESTOR_FIRST, FILTER_FIRST]
+
+ Returns:
+ # this query
+ Query
+ """
+ if hint not in [self.ORDER_FIRST, self.ANCESTOR_FIRST, self.FILTER_FIRST]:
+ raise datastore_errors.BadArgumentError(
+ 'Query hint must be ORDER_FIRST, ANCESTOR_FIRST, or FILTER_FIRST.')
+
+ self.__hint = hint
+ return self
+
+ def Ancestor(self, ancestor):
+ """Sets an ancestor for this query.
+
+ This restricts the query to only return result entities that are descended
+ from a given entity. In other words, all of the results will have the
+ ancestor as their parent, or parent's parent, or etc.
+
+ Raises BadArgumentError or BadKeyError if parent is not an existing Entity
+ or Key in the datastore.
+
+ Args:
+ # the key must be complete
+ ancestor: Entity or Key
+
+ Returns:
+ # this query
+ Query
+ """
+ self.__ancestor = _GetCompleteKeyOrError(ancestor)
+ return self
+
+ def IsKeysOnly(self):
+ """Returns True if this query is keys only, false otherwise."""
+ return self.__keys_only
+
+ def Run(self):
+ """Runs this query.
+
+ If a filter string is invalid, raises BadFilterError. If a filter value is
+ invalid, raises BadValueError. If an IN filter is provided, and a sort
+ order on another property is provided, raises BadQueryError.
+
+ If you know in advance how many results you want, use Get() instead. It's
+ more efficient.
+
+ Returns:
+ # an iterator that provides access to the query results
+ Iterator
+ """
+ return self._Run()
+
+ def _Run(self, limit=None, offset=None,
+ prefetch_count=None, next_count=None):
+ """Runs this query, with an optional result limit and an optional offset.
+
+ Identical to Run, with the extra optional limit, offset, prefetch_count,
+ next_count parameters. These parameters must be integers >= 0.
+
+ This is not intended to be used by application developers. Use Get()
+ instead!
+ """
+ pb = self._ToPb(limit, offset, prefetch_count)
+ result = datastore_pb.QueryResult()
+
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', pb, result)
+ except apiproxy_errors.ApplicationError, err:
+ try:
+ _ToDatastoreError(err)
+ except datastore_errors.NeedIndexError, exc:
+ yaml = datastore_index.IndexYamlForQuery(
+ *datastore_index.CompositeIndexForQuery(pb)[1:-1])
+ raise datastore_errors.NeedIndexError(
+ str(exc) + '\nThis query needs this index:\n' + yaml)
+
+ return Iterator(result, batch_size=next_count)
+
+ def Get(self, limit, offset=0):
+ """Fetches and returns a maximum number of results from the query.
+
+ This method fetches and returns a list of resulting entities that matched
+ the query. If the query specified a sort order, entities are returned in
+ that order. Otherwise, the order is undefined.
+
+ The limit argument specifies the maximum number of entities to return. If
+ it's greater than the number of remaining entities, all of the remaining
+ entities are returned. In that case, the length of the returned list will
+ be smaller than limit.
+
+ The offset argument specifies the number of entities that matched the
+ query criteria to skip before starting to return results. The limit is
+ applied after the offset, so if you provide a limit of 10 and an offset of 5
+ and your query matches 20 records, the records whose index is 0 through 4
+ will be skipped and the records whose index is 5 through 14 will be
+ returned.
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ If you know in advance how many results you want, this method is more
+ efficient than Run(), since it fetches all of the results at once. (The
+ datastore backend sets the the limit on the underlying
+ scan, which makes the scan significantly faster.)
+
+ Args:
+ # the maximum number of entities to return
+ int or long
+ # the number of entities to skip
+ int or long
+
+ Returns:
+ # a list of entities
+ [Entity, ...]
+ """
+ if not isinstance(limit, (int, long)) or limit <= 0:
+ raise datastore_errors.BadArgumentError(
+ 'Argument to Get named \'limit\' must be an int greater than 0; '
+ 'received %s (a %s)' % (limit, typename(limit)))
+
+ if not isinstance(offset, (int, long)) or offset < 0:
+ raise datastore_errors.BadArgumentError(
+ 'Argument to Get named \'offset\' must be an int greater than or '
+ 'equal to 0; received %s (a %s)' % (offset, typename(offset)))
+
+ return self._Run(limit=limit, offset=offset,
+ prefetch_count=limit)._Get(limit)
+
+ def Count(self, limit=None):
+ """Returns the number of entities that this query matches. The returned
+ count is cached; successive Count() calls will not re-scan the datastore
+ unless the query is changed.
+
+ Args:
+ limit, a number. If there are more results than this, stop short and
+ just return this number. Providing this argument makes the count
+ operation more efficient.
+ Returns:
+ The number of results.
+ """
+ if self.__cached_count:
+ return self.__cached_count
+
+ resp = api_base_pb.Integer64Proto()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Count',
+ self._ToPb(limit=limit), resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+ else:
+ self.__cached_count = resp.value()
+
+ return self.__cached_count
+
+ def __iter__(self):
+ raise NotImplementedError(
+ 'Query objects should not be used as iterators. Call Run() first.')
+
+ def __setitem__(self, filter, value):
+ """Implements the [] operator. Used to set filters.
+
+ If the filter string is empty or not a string, raises BadFilterError. If
+ the value is not a supported type, raises BadValueError.
+ """
+ if isinstance(value, tuple):
+ value = list(value)
+
+ datastore_types.ValidateProperty(' ', value, read_only=True)
+ match = self._CheckFilter(filter, value)
+ property = match.group(1)
+ operator = match.group(3)
+
+ dict.__setitem__(self, filter, value)
+
+ if operator in self.INEQUALITY_OPERATORS:
+ if self.__inequality_prop is None:
+ self.__inequality_prop = property
+ else:
+ assert self.__inequality_prop == property
+ self.__inequality_count += 1
+
+ if filter not in self.__filter_order:
+ self.__filter_order[filter] = self.__filter_counter
+ self.__filter_counter += 1
+
+ self.__cached_count = None
+
+ def setdefault(self, filter, value):
+ """If the filter exists, returns its value. Otherwise sets it to value.
+
+ If the property name is the empty string or not a string, raises
+ BadPropertyError. If the value is not a supported type, raises
+ BadValueError.
+ """
+ datastore_types.ValidateProperty(' ', value)
+ self._CheckFilter(filter, value)
+ self.__cached_count = None
+ return dict.setdefault(self, filter, value)
+
+ def __delitem__(self, filter):
+ """Implements the del [] operator. Used to remove filters.
+ """
+ dict.__delitem__(self, filter)
+ del self.__filter_order[filter]
+ self.__cached_count = None
+
+ match = Query.FILTER_REGEX.match(filter)
+ property = match.group(1)
+ operator = match.group(3)
+
+ if operator in self.INEQUALITY_OPERATORS:
+ assert self.__inequality_count >= 1
+ assert property == self.__inequality_prop
+ self.__inequality_count -= 1
+ if self.__inequality_count == 0:
+ self.__inequality_prop = None
+
+ def update(self, other):
+ """Updates this query's filters from the ones in other.
+
+ If any filter string is invalid, raises BadFilterError. If any value is
+ not a supported type, raises BadValueError.
+ """
+ for filter, value in other.items():
+ self.__setitem__(filter, value)
+
+ def copy(self):
+ """The copy method is not supported.
+ """
+ raise NotImplementedError('Query does not support the copy() method.')
+
+ def _CheckFilter(self, filter, values):
+ """Type check a filter string and list of values.
+
+ Raises BadFilterError if the filter string is empty, not a string, or
+ invalid. Raises BadValueError if the value type is not supported.
+
+ Args:
+ filter: String containing the filter text.
+ values: List of associated filter values.
+
+ Returns:
+ re.MatchObject (never None) that matches the 'filter'. Group 1 is the
+ property name, group 3 is the operator. (Group 2 is unused.)
+ """
+ try:
+ match = Query.FILTER_REGEX.match(filter)
+ if not match:
+ raise datastore_errors.BadFilterError(
+ 'Could not parse filter string: %s' % str(filter))
+ except TypeError:
+ raise datastore_errors.BadFilterError(
+ 'Could not parse filter string: %s' % str(filter))
+
+ property = match.group(1)
+ operator = match.group(3)
+ if operator is None:
+ operator = '='
+
+ if isinstance(values, tuple):
+ values = list(values)
+ elif not isinstance(values, list):
+ values = [values]
+ if isinstance(values[0], datastore_types._RAW_PROPERTY_TYPES):
+ raise datastore_errors.BadValueError(
+ 'Filtering on %s properties is not supported.' % typename(values[0]))
+
+ if operator in self.INEQUALITY_OPERATORS:
+ if self.__inequality_prop and property != self.__inequality_prop:
+ raise datastore_errors.BadFilterError(
+ 'Only one property per query may have inequality filters (%s).' %
+ ', '.join(self.INEQUALITY_OPERATORS))
+ elif len(self.__orderings) >= 1 and self.__orderings[0][0] != property:
+ raise datastore_errors.BadFilterError(
+ 'Inequality operators (%s) must be on the same property as the '
+ 'first sort order, if any sort orders are supplied' %
+ ', '.join(self.INEQUALITY_OPERATORS))
+
+ if (self.__kind is None and
+ property != datastore_types._KEY_SPECIAL_PROPERTY):
+ raise datastore_errors.BadFilterError(
+ 'Only %s filters are allowed on kindless queries.' %
+ datastore_types._KEY_SPECIAL_PROPERTY)
+
+ if property in datastore_types._SPECIAL_PROPERTIES:
+ if property == datastore_types._KEY_SPECIAL_PROPERTY:
+ for value in values:
+ if not isinstance(value, Key):
+ raise datastore_errors.BadFilterError(
+ '%s filter value must be a Key; received %s (a %s)' %
+ (datastore_types._KEY_SPECIAL_PROPERTY, value, typename(value)))
+
+ return match
+
+ def _ToPb(self, limit=None, offset=None, count=None):
+ """Converts this Query to its protocol buffer representation. Not
+ intended to be used by application developers. Enforced by hiding the
+ datastore_pb classes.
+
+ Args:
+ # an upper bound on the number of results returned by the query.
+ limit: int
+ # number of results that match the query to skip. limit is applied
+ # after the offset is fulfilled
+ offset: int
+ # the requested initial batch size
+ count: int
+
+ Returns:
+ # the PB representation of this Query
+ datastore_pb.Query
+
+ Raises:
+ BadRequestError if called inside a transaction and the query does not
+ include an ancestor.
+ """
+
+ if not self.__ancestor and _CurrentTransactionKey():
+ raise datastore_errors.BadRequestError(
+ 'Only ancestor queries are allowed inside transactions.')
+
+ pb = datastore_pb.Query()
+ _MaybeSetupTransaction(pb, [self.__ancestor])
+
+ if self.__kind is not None:
+ pb.set_kind(self.__kind.encode('utf-8'))
+ pb.set_keys_only(bool(self.__keys_only))
+ if self.__app:
+ pb.set_app(self.__app.encode('utf-8'))
+ if limit is not None:
+ pb.set_limit(limit)
+ if offset is not None:
+ pb.set_offset(offset)
+ if count is not None:
+ pb.set_count(count)
+ if self.__ancestor:
+ pb.mutable_ancestor().CopyFrom(self.__ancestor._Key__reference)
+
+ if ((self.__hint == self.ORDER_FIRST and self.__orderings) or
+ (self.__hint == self.ANCESTOR_FIRST and self.__ancestor) or
+ (self.__hint == self.FILTER_FIRST and len(self) > 0)):
+ pb.set_hint(self.__hint)
+
+ ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
+ ordered_filters.sort()
+
+ for i, filter_str in ordered_filters:
+ if filter_str not in self:
+ continue
+
+ values = self[filter_str]
+ match = self._CheckFilter(filter_str, values)
+ name = match.group(1)
+
+ props = datastore_types.ToPropertyPb(name, values)
+ if not isinstance(props, list):
+ props = [props]
+
+ op = match.group(3)
+ if op is None:
+ op = '='
+
+ for prop in props:
+ filter = pb.add_filter()
+ filter.set_op(self.OPERATORS[op])
+ filter.add_property().CopyFrom(prop)
+
+ for property, direction in self.__orderings:
+ order = pb.add_order()
+ order.set_property(property.encode('utf-8'))
+ order.set_direction(direction)
+
+ return pb
+
+
+def AllocateIds(model_key, size):
+ """Allocates a range of IDs of size for the key defined by model_key
+
+ Allocates a range of IDs in the datastore such that those IDs will not
+ be automatically assigned to new entities. You can only allocate IDs
+ for model keys from your app. If there is an error, raises a subclass of
+ datastore_errors.Error.
+
+ Args:
+ model_key: Key or string to serve as a model specifying the ID sequence
+ in which to allocate IDs
+
+ Returns:
+ (start, end) of the allocated range, inclusive.
+ """
+ keys, multiple = NormalizeAndTypeCheckKeys(model_key)
+
+ if len(keys) > 1:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot allocate IDs for more than one model key at a time')
+
+ if size > _MAX_ID_BATCH_SIZE:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot allocate more than %s ids at a time' % _MAX_ID_BATCH_SIZE)
+
+ req = datastore_pb.AllocateIdsRequest()
+ req.mutable_model_key().CopyFrom(keys[0]._Key__reference)
+ req.set_size(size)
+
+ resp = datastore_pb.AllocateIdsResponse()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'AllocateIds', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ return resp.start(), resp.end()
+
+
+class MultiQuery(Query):
+ """Class representing a query which requires multiple datastore queries.
+
+ This class is actually a subclass of datastore.Query as it is intended to act
+ like a normal Query object (supporting the same interface).
+
+ Does not support keys only queries, since it needs whole entities in order
+ to merge sort them. (That's not true if there are no sort orders, or if the
+ sort order is on __key__, but allowing keys only queries in those cases, but
+ not in others, would be confusing.)
+ """
+
+ def __init__(self, bound_queries, orderings):
+ if len(bound_queries) > MAX_ALLOWABLE_QUERIES:
+ raise datastore_errors.BadArgumentError(
+ 'Cannot satisfy query -- too many subqueries (max: %d, got %d).'
+ ' Probable cause: too many IN/!= filters in query.' %
+ (MAX_ALLOWABLE_QUERIES, len(bound_queries)))
+
+ for query in bound_queries:
+ if query.IsKeysOnly():
+ raise datastore_errors.BadQueryError(
+ 'MultiQuery does not support keys_only.')
+
+ self.__bound_queries = bound_queries
+ self.__orderings = orderings
+
+ def __str__(self):
+ res = 'MultiQuery: '
+ for query in self.__bound_queries:
+ res = '%s %s' % (res, str(query))
+ return res
+
+ def Get(self, limit, offset=0):
+ """Get results of the query with a limit on the number of results.
+
+ Args:
+ limit: maximum number of values to return.
+ offset: offset requested -- if nonzero, this will override the offset in
+ the original query
+
+ Returns:
+ A list of entities with at most "limit" entries (less if the query
+ completes before reading limit values).
+ """
+ count = 1
+ result = []
+
+ iterator = self.Run()
+
+ try:
+ for i in xrange(offset):
+ val = iterator.next()
+ except StopIteration:
+ pass
+
+ try:
+ while count <= limit:
+ val = iterator.next()
+ result.append(val)
+ count += 1
+ except StopIteration:
+ pass
+ return result
+
+ class SortOrderEntity(object):
+ """Allow entity comparisons using provided orderings.
+
+ The iterator passed to the constructor is eventually consumed via
+ calls to GetNext(), which generate new SortOrderEntity s with the
+ same orderings.
+ """
+
+ def __init__(self, entity_iterator, orderings):
+ """Ctor.
+
+ Args:
+ entity_iterator: an iterator of entities which will be wrapped.
+ orderings: an iterable of (identifier, order) pairs. order
+ should be either Query.ASCENDING or Query.DESCENDING.
+ """
+ self.__entity_iterator = entity_iterator
+ self.__entity = None
+ self.__min_max_value_cache = {}
+ try:
+ self.__entity = entity_iterator.next()
+ except StopIteration:
+ pass
+ else:
+ self.__orderings = orderings
+
+ def __str__(self):
+ return str(self.__entity)
+
+ def GetEntity(self):
+ """Gets the wrapped entity."""
+ return self.__entity
+
+ def GetNext(self):
+ """Wrap and return the next entity.
+
+ The entity is retrieved from the iterator given at construction time.
+ """
+ return MultiQuery.SortOrderEntity(self.__entity_iterator,
+ self.__orderings)
+
+ def CmpProperties(self, that):
+ """Compare two entities and return their relative order.
+
+ Compares self to that based on the current sort orderings and the
+ key orders between them. Returns negative, 0, or positive depending on
+ whether self is less, equal to, or greater than that. This
+ comparison returns as if all values were to be placed in ascending order
+ (highest value last). Only uses the sort orderings to compare (ignores
+ keys).
+
+ Args:
+ that: SortOrderEntity
+
+ Returns:
+ Negative if self < that
+ Zero if self == that
+ Positive if self > that
+ """
+ if not self.__entity:
+ return cmp(self.__entity, that.__entity)
+
+ for (identifier, order) in self.__orderings:
+ value1 = self.__GetValueForId(self, identifier, order)
+ value2 = self.__GetValueForId(that, identifier, order)
+
+ result = cmp(value1, value2)
+ if order == Query.DESCENDING:
+ result = -result
+ if result:
+ return result
+ return 0
+
+ def __GetValueForId(self, sort_order_entity, identifier, sort_order):
+ value = _GetPropertyValue(sort_order_entity.__entity, identifier)
+ entity_key = sort_order_entity.__entity.key()
+ if (entity_key, identifier) in self.__min_max_value_cache:
+ value = self.__min_max_value_cache[(entity_key, identifier)]
+ elif isinstance(value, list):
+ if sort_order == Query.DESCENDING:
+ value = min(value)
+ else:
+ value = max(value)
+ self.__min_max_value_cache[(entity_key, identifier)] = value
+
+ return value
+
+ def __cmp__(self, that):
+ """Compare self to that w.r.t. values defined in the sort order.
+
+ Compare an entity with another, using sort-order first, then the key
+ order to break ties. This can be used in a heap to have faster min-value
+ lookup.
+
+ Args:
+ that: other entity to compare to
+ Returns:
+ negative: if self is less than that in sort order
+ zero: if self is equal to that in sort order
+ positive: if self is greater than that in sort order
+ """
+ property_compare = self.CmpProperties(that)
+ if property_compare:
+ return property_compare
+ else:
+ return cmp(self.__entity.key(), that.__entity.key())
+
+ def Run(self):
+ """Return an iterable output with all results in order."""
+ results = []
+ count = 1
+ log_level = logging.DEBUG - 1
+ for bound_query in self.__bound_queries:
+ logging.log(log_level, 'Running query #%i' % count)
+ results.append(bound_query.Run())
+ count += 1
+
+ def IterateResults(results):
+ """Iterator function to return all results in sorted order.
+
+ Iterate over the array of results, yielding the next element, in
+ sorted order. This function is destructive (results will be empty
+ when the operation is complete).
+
+ Args:
+ results: list of result iterators to merge and iterate through
+
+ Yields:
+ The next result in sorted order.
+ """
+ result_heap = []
+ for result in results:
+ heap_value = MultiQuery.SortOrderEntity(result, self.__orderings)
+ if heap_value.GetEntity():
+ heapq.heappush(result_heap, heap_value)
+
+ used_keys = set()
+
+ while result_heap:
+ top_result = heapq.heappop(result_heap)
+
+ results_to_push = []
+ if top_result.GetEntity().key() not in used_keys:
+ yield top_result.GetEntity()
+ else:
+ pass
+
+ used_keys.add(top_result.GetEntity().key())
+
+ results_to_push = []
+ while result_heap:
+ next = heapq.heappop(result_heap)
+ if cmp(top_result, next):
+ results_to_push.append(next)
+ break
+ else:
+ results_to_push.append(next.GetNext())
+ results_to_push.append(top_result.GetNext())
+
+ for popped_result in results_to_push:
+ if popped_result.GetEntity():
+ heapq.heappush(result_heap, popped_result)
+
+ return IterateResults(results)
+
+ def Count(self, limit=None):
+ """Return the number of matched entities for this query.
+
+ Will return the de-duplicated count of results. Will call the more
+ efficient Get() function if a limit is given.
+
+ Args:
+ limit: maximum number of entries to count (for any result > limit, return
+ limit).
+ Returns:
+ count of the number of entries returned.
+ """
+ if limit is None:
+ count = 0
+ for i in self.Run():
+ count += 1
+ return count
+ else:
+ return len(self.Get(limit))
+
+ def __setitem__(self, query_filter, value):
+ """Add a new filter by setting it on all subqueries.
+
+ If any of the setting operations raise an exception, the ones
+ that succeeded are undone and the exception is propagated
+ upward.
+
+ Args:
+ query_filter: a string of the form "property operand".
+ value: the value that the given property is compared against.
+ """
+ saved_items = []
+ for index, query in enumerate(self.__bound_queries):
+ saved_items.append(query.get(query_filter, None))
+ try:
+ query[query_filter] = value
+ except:
+ for q, old_value in itertools.izip(self.__bound_queries[:index],
+ saved_items):
+ if old_value is not None:
+ q[query_filter] = old_value
+ else:
+ del q[query_filter]
+ raise
+
+ def __delitem__(self, query_filter):
+ """Delete a filter by deleting it from all subqueries.
+
+ If a KeyError is raised during the attempt, it is ignored, unless
+ every subquery raised a KeyError. If any other exception is
+ raised, any deletes will be rolled back.
+
+ Args:
+ query_filter: the filter to delete.
+
+ Raises:
+ KeyError: No subquery had an entry containing query_filter.
+ """
+ subquery_count = len(self.__bound_queries)
+ keyerror_count = 0
+ saved_items = []
+ for index, query in enumerate(self.__bound_queries):
+ try:
+ saved_items.append(query.get(query_filter, None))
+ del query[query_filter]
+ except KeyError:
+ keyerror_count += 1
+ except:
+ for q, old_value in itertools.izip(self.__bound_queries[:index],
+ saved_items):
+ if old_value is not None:
+ q[query_filter] = old_value
+ raise
+
+ if keyerror_count == subquery_count:
+ raise KeyError(query_filter)
+
+ def __iter__(self):
+ return iter(self.__bound_queries)
+
+
+class Iterator(object):
+ """An iterator over the results of a datastore query.
+
+ Iterators are used to access the results of a Query. An iterator is
+ obtained by building a Query, then calling Run() on it.
+
+ Iterator implements Python's iterator protocol, so results can be accessed
+ with the for and in statements:
+
+ > it = Query('Person').Run()
+ > for person in it:
+ > print 'Hi, %s!' % person['name']
+ """
+ def __init__(self, query_result_pb, batch_size=None):
+ self.__cursor = query_result_pb.cursor()
+ self.__keys_only = query_result_pb.keys_only()
+ self.__batch_size = batch_size
+ self.__buffer = self._ProcessQueryResult(query_result_pb)
+
+ def _Get(self, count):
+ """Gets the next count result(s) of the query.
+
+ Not intended to be used by application developers. Use the python
+ iterator protocol instead.
+
+ This method uses _Next to returns the next entities or keys from the list of
+ matching results. If the query specified a sort order, results are returned
+ in that order. Otherwise, the order is undefined.
+
+ The argument, count, specifies the number of results to return. However, the
+ length of the returned list may be smaller than count. This is the case only
+ if count is greater than the number of remaining results.
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ Args:
+ # the number of results to return; must be >= 1
+ count: int or long
+
+ Returns:
+ # a list of entities or keys
+ [Entity or Key, ...]
+ """
+ entity_list = self._Next(count)
+ while len(entity_list) < count and self.__more_results:
+ next_results = self._Next(count - len(entity_list), self.__batch_size)
+ if not next_results:
+ break
+ entity_list += next_results
+ return entity_list;
+
+ def _Next(self, count=None):
+ """Returns the next batch of results.
+
+ Not intended to be used by application developers. Use the python
+ iterator protocol instead.
+
+ This method returns the next entities or keys from the list of matching
+ results. If the query specified a sort order, results are returned in that
+ order. Otherwise, the order is undefined.
+
+ The optional argument, count, specifies the number of results to return.
+ However, the length of the returned list may be smaller than count. This is
+ the case if count is greater than the number of remaining results or the
+ size of the remaining results exceeds the RPC buffer limit. Use _Get to
+ insure all possible entities are retrieved.
+
+ If the count is omitted, the datastore backend decides how many entities to
+ send.
+
+ There is an internal buffer for use with the next() method. If this buffer
+ is not empty, up to 'count' values are removed from this buffer and
+ returned. It's best not to mix _Next() and next().
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ Args:
+ # the number of results to return; must be >= 1
+ count: int or long or None
+
+ Returns:
+ # a list of entities or keys
+ [Entity or Key, ...]
+ """
+ if count is not None and (not isinstance(count, (int, long)) or count <= 0):
+ raise datastore_errors.BadArgumentError(
+ 'Argument to _Next must be an int greater than 0; received %s (a %s)' %
+ (count, typename(count)))
+
+ if self.__buffer:
+ if count is None:
+ entity_list = self.__buffer
+ self.__buffer = []
+ return entity_list
+ elif count <= len(self.__buffer):
+ entity_list = self.__buffer[:count]
+ del self.__buffer[:count]
+ return entity_list
+ else:
+ entity_list = self.__buffer
+ self.__buffer = []
+ count -= len(entity_list)
+ else:
+ entity_list = []
+
+
+ if not self.__more_results:
+ return entity_list
+
+ req = datastore_pb.NextRequest()
+ if count is not None:
+ req.set_count(count)
+ req.mutable_cursor().CopyFrom(self.__cursor)
+ result = datastore_pb.QueryResult()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', req, result)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ return entity_list + self._ProcessQueryResult(result)
+
+ def _ProcessQueryResult(self, result):
+ """Returns all results from datastore_pb.QueryResult and updates
+ self.__more_results
+
+ Not intended to be used by application developers. Use the python
+ iterator protocol instead.
+
+ The results are always returned as a list. If there are no results left,
+ an empty list is returned.
+
+ Args:
+ # the instance of datastore_pb.QueryResult to be stored
+ result: datastore_pb.QueryResult
+
+ Returns:
+ # a list of entities or keys
+ [Entity or Key, ...]
+ """
+ self.__more_results = result.more_results()
+
+ if self.__keys_only:
+ return [Key._FromPb(e.key()) for e in result.result_list()]
+ else:
+ return [Entity._FromPb(e) for e in result.result_list()]
+
+ def next(self):
+ if not self.__buffer:
+ self.__buffer = self._Next(self.__batch_size)
+ try:
+ return self.__buffer.pop(0)
+ except IndexError:
+ raise StopIteration
+
+ def __iter__(self): return self
+
+class _Transaction(object):
+ """Encapsulates a transaction currently in progress.
+
+ If we know the entity group for this transaction, it's stored in the
+ entity_group attribute, which is set by RunInTransaction().
+
+ modified_keys is a set containing the Keys of all entities modified (ie put
+ or deleted) in this transaction. If an entity is modified more than once, a
+ BadRequestError is raised.
+ """
+ def __init__(self, handle):
+ """Initializes the transaction.
+
+ Args:
+ handle: a datastore_pb.Transaction returned by a BeginTransaction call
+ """
+ assert isinstance(handle, datastore_pb.Transaction)
+ explanation = []
+ assert handle.IsInitialized(explanation), explanation
+
+ self.handle = handle
+ self.entity_group = None
+ self.modified_keys = None
+ self.modified_keys = set()
+
+
+def RunInTransaction(function, *args, **kwargs):
+ """Runs a function inside a datastore transaction.
+
+ Runs the user-provided function inside transaction, retries default
+ number of times.
+
+ Args:
+ # a function to be run inside the transaction
+ function: callable
+ # positional arguments to pass to the function
+ args: variable number of any type
+
+ Returns:
+ the function's return value, if any
+
+ Raises:
+ TransactionFailedError, if the transaction could not be committed.
+ """
+ return RunInTransactionCustomRetries(
+ DEFAULT_TRANSACTION_RETRIES, function, *args, **kwargs)
+
+
+def RunInTransactionCustomRetries(retries, function, *args, **kwargs):
+ """Runs a function inside a datastore transaction.
+
+ Runs the user-provided function inside a full-featured, ACID datastore
+ transaction. Every Put, Get, and Delete call in the function is made within
+ the transaction. All entities involved in these calls must belong to the
+ same entity group. Queries are not supported.
+
+ The trailing arguments are passed to the function as positional arguments.
+ If the function returns a value, that value will be returned by
+ RunInTransaction. Otherwise, it will return None.
+
+ The function may raise any exception to roll back the transaction instead of
+ committing it. If this happens, the transaction will be rolled back and the
+ exception will be re-raised up to RunInTransaction's caller.
+
+ If you want to roll back intentionally, but don't have an appropriate
+ exception to raise, you can raise an instance of datastore_errors.Rollback.
+ It will cause a rollback, but will *not* be re-raised up to the caller.
+
+ The function may be run more than once, so it should be idempotent. It
+ should avoid side effects, and it shouldn't have *any* side effects that
+ aren't safe to occur multiple times. This includes modifying the arguments,
+ since they persist across invocations of the function. However, this doesn't
+ include Put, Get, and Delete calls, of course.
+
+ Example usage:
+
+ > def decrement(key, amount=1):
+ > counter = datastore.Get(key)
+ > counter['count'] -= amount
+ > if counter['count'] < 0: # don't let the counter go negative
+ > raise datastore_errors.Rollback()
+ > datastore.Put(counter)
+ >
+ > counter = datastore.Query('Counter', {'name': 'foo'})
+ > datastore.RunInTransaction(decrement, counter.key(), amount=5)
+
+ Transactions satisfy the traditional ACID properties. They are:
+
+ - Atomic. All of a transaction's operations are executed or none of them are.
+
+ - Consistent. The datastore's state is consistent before and after a
+ transaction, whether it committed or rolled back. Invariants such as
+ "every entity has a primary key" are preserved.
+
+ - Isolated. Transactions operate on a snapshot of the datastore. Other
+ datastore operations do not see intermediated effects of the transaction;
+ they only see its effects after it has committed.
+
+ - Durable. On commit, all writes are persisted to the datastore.
+
+ Nested transactions are not supported.
+
+ Args:
+ # number of retries
+ retries: integer
+ # a function to be run inside the transaction
+ function: callable
+ # positional arguments to pass to the function
+ args: variable number of any type
+
+ Returns:
+ the function's return value, if any
+
+ Raises:
+ TransactionFailedError, if the transaction could not be committed.
+ """
+
+ if _CurrentTransactionKey():
+ raise datastore_errors.BadRequestError(
+ 'Nested transactions are not supported.')
+
+ if retries < 0:
+ raise datastore_errors.BadRequestError(
+ 'Number of retries should be non-negative number.')
+
+ tx_key = None
+
+ try:
+ tx_key = _NewTransactionKey()
+
+ for i in range(0, retries + 1):
+ handle = datastore_pb.Transaction()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
+ api_base_pb.VoidProto(), handle)
+ except apiproxy_errors.ApplicationError, err:
+ raise _ToDatastoreError(err)
+
+ tx = _Transaction(handle)
+ _txes[tx_key] = tx
+
+ try:
+ result = function(*args, **kwargs)
+ except:
+ original_exception = sys.exc_info()
+
+ try:
+ resp = api_base_pb.VoidProto()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
+ tx.handle, resp)
+ except:
+ exc_info = sys.exc_info()
+ logging.info('Exception sending Rollback:\n' +
+ ''.join(traceback.format_exception(*exc_info)))
+
+ type, value, trace = original_exception
+ if type is datastore_errors.Rollback:
+ return
+ else:
+ raise type, value, trace
+
+ try:
+ resp = datastore_pb.CommitResponse()
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
+ tx.handle, resp)
+ except apiproxy_errors.ApplicationError, err:
+ if (err.application_error ==
+ datastore_pb.Error.CONCURRENT_TRANSACTION):
+ logging.warning('Transaction collision for entity group with '
+ 'key %r. Retrying...', tx.entity_group)
+ tx.handle = None
+ tx.entity_group = None
+ continue
+ else:
+ raise _ToDatastoreError(err)
+
+ return result
+
+ raise datastore_errors.TransactionFailedError(
+ 'The transaction could not be committed. Please try again.')
+
+ finally:
+ if tx_key in _txes:
+ del _txes[tx_key]
+ del tx_key
+
+
+def _MaybeSetupTransaction(request, keys):
+ """If we're in a transaction, validates and populates it in the request.
+
+ If we're currently inside a transaction, this records the entity group,
+ checks that the keys are all in that entity group, and populates the
+ transaction handle in the request.
+
+ Raises BadRequestError if the entity has a different entity group than the
+ current transaction.
+
+ Args:
+ request: GetRequest, PutRequest, DeleteRequest, or Query
+ keys: sequence of Keys
+
+ Returns:
+ _Transaction if we're inside a transaction, otherwise None
+ """
+ assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
+ datastore_pb.DeleteRequest, datastore_pb.Query,
+ taskqueue_service_pb.TaskQueueAddRequest,
+ )), request.__class__
+ tx_key = None
+
+ try:
+ tx_key = _CurrentTransactionKey()
+ if tx_key:
+ tx = _txes[tx_key]
+
+ groups = [k.entity_group() for k in keys]
+ if tx.entity_group:
+ expected_group = tx.entity_group
+ elif groups:
+ expected_group = groups[0]
+ else:
+ expected_group = None
+
+ for group in groups:
+ if (group != expected_group or
+
+
+
+
+
+
+
+ (not group.has_id_or_name() and group is not expected_group)):
+ raise _DifferentEntityGroupError(expected_group, group)
+
+ if not tx.entity_group and group.has_id_or_name():
+ tx.entity_group = group
+
+ assert tx.handle.IsInitialized()
+ request.mutable_transaction().CopyFrom(tx.handle)
+
+ return tx
+
+ finally:
+ del tx_key
+
+
+def _DifferentEntityGroupError(a, b):
+ """Raises a BadRequestError that says the given entity groups are different.
+
+ Includes the two entity groups in the message, formatted more clearly and
+ concisely than repr(Key).
+
+ Args:
+ a, b are both Keys that represent entity groups.
+ """
+ def id_or_name(key):
+ if key.name():
+ return 'name=%r' % key.name()
+ else:
+ return 'id=%r' % key.id()
+
+ raise datastore_errors.BadRequestError(
+ 'Cannot operate on different entity groups in a transaction: '
+ '(kind=%r, %s) and (kind=%r, %s).' % (a.kind(), id_or_name(a),
+ b.kind(), id_or_name(b)))
+
+
+def _FindTransactionFrameInStack():
+ """Walks the stack to find a RunInTransaction() call.
+
+ Returns:
+ # this is the RunInTransactionCustomRetries() frame record, if found
+ frame record or None
+ """
+ frame = sys._getframe()
+ filename = frame.f_code.co_filename
+
+ frame = frame.f_back.f_back
+ while frame:
+ if (frame.f_code.co_filename == filename and
+ frame.f_code.co_name == 'RunInTransactionCustomRetries'):
+ return frame
+ frame = frame.f_back
+
+ return None
+
+_CurrentTransactionKey = _FindTransactionFrameInStack
+
+_NewTransactionKey = sys._getframe
+
+
+def _GetCompleteKeyOrError(arg):
+ """Expects an Entity or a Key, and returns the corresponding Key. Raises
+ BadArgumentError or BadKeyError if arg is a different type or is incomplete.
+
+ Args:
+ arg: Entity or Key
+
+ Returns:
+ Key
+ """
+ if isinstance(arg, Key):
+ key = arg
+ elif isinstance(arg, basestring):
+ key = Key(arg)
+ elif isinstance(arg, Entity):
+ key = arg.key()
+ elif not isinstance(arg, Key):
+ raise datastore_errors.BadArgumentError(
+ 'Expects argument to be an Entity or Key; received %s (a %s).' %
+ (arg, typename(arg)))
+ assert isinstance(key, Key)
+
+ if not key.has_id_or_name():
+ raise datastore_errors.BadKeyError('Key %r is not complete.' % key)
+
+ return key
+
+
+def _GetPropertyValue(entity, property):
+ """Returns an entity's value for a given property name.
+
+ Handles special properties like __key__ as well as normal properties.
+
+ Args:
+ entity: datastore.Entity
+ property: str; the property name
+
+ Returns:
+ property value. For __key__, a datastore_types.Key.
+
+ Raises:
+ KeyError, if the entity does not have the given property.
+ """
+ if property in datastore_types._SPECIAL_PROPERTIES:
+ assert property == datastore_types._KEY_SPECIAL_PROPERTY
+ return entity.key()
+ else:
+ return entity[property]
+
+
+def _AddOrAppend(dictionary, key, value):
+ """Adds the value to the existing values in the dictionary, if any.
+
+ If dictionary[key] doesn't exist, sets dictionary[key] to value.
+
+ If dictionary[key] is not a list, sets dictionary[key] to [old_value, value].
+
+ If dictionary[key] is a list, appends value to that list.
+
+ Args:
+ dictionary: a dict
+ key, value: anything
+ """
+ if key in dictionary:
+ existing_value = dictionary[key]
+ if isinstance(existing_value, list):
+ existing_value.append(value)
+ else:
+ dictionary[key] = [existing_value, value]
+ else:
+ dictionary[key] = value
+
+
+def _ToDatastoreError(err):
+ """Converts an apiproxy.ApplicationError to an error in datastore_errors.
+
+ Args:
+ err: apiproxy.ApplicationError
+
+ Returns:
+ a subclass of datastore_errors.Error
+ """
+ errors = {
+ datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
+ datastore_pb.Error.CONCURRENT_TRANSACTION:
+ datastore_errors.TransactionFailedError,
+ datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
+ datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
+ datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
+ }
+
+ if err.application_error in errors:
+ raise errors[err.application_error](err.error_detail)
+ else:
+ raise datastore_errors.Error(err.error_detail)
diff --git a/google_appengine/google/appengine/api/datastore.pyc b/google_appengine/google/appengine/api/datastore.pyc
new file mode 100644
index 0000000..9056a21
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_admin.py b/google_appengine/google/appengine/api/datastore_admin.py
new file mode 100755
index 0000000..da2b6c7
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_admin.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""The Python datastore admin API for managing indices and schemas.
+"""
+
+
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.datastore import datastore_index
+from google.appengine.datastore import datastore_pb
+from google.appengine.runtime import apiproxy_errors
+from google.appengine.datastore import entity_pb
+
+
+_DIRECTION_MAP = {
+ 'asc': entity_pb.Index_Property.ASCENDING,
+ 'ascending': entity_pb.Index_Property.ASCENDING,
+ 'desc': entity_pb.Index_Property.DESCENDING,
+ 'descending': entity_pb.Index_Property.DESCENDING,
+ }
+
+
+def GetSchema(_app=None, properties=True, start_kind=None, end_kind=None):
+ """Infers an app's schema from the entities in the datastore.
+
+ Note that the PropertyValue PBs in the returned EntityProtos are empty
+ placeholders, so they may cause problems if you try to convert them to
+ python values with e.g. datastore_types. In particular, user values will
+ throw UserNotFoundError because their email and auth domain fields will be
+ empty.
+
+ Args:
+ properties: boolean, whether to include property names and types
+ start_kind, end_kind: optional range endpoints for the kinds to return,
+ compared lexicographically
+
+ Returns:
+ list of entity_pb.EntityProto, with kind and property names and types
+ """
+ req = datastore_pb.GetSchemaRequest()
+ req.set_app(datastore_types.ResolveAppId(_app))
+ req.set_properties(properties)
+ if start_kind is not None:
+ req.set_start_kind(start_kind)
+ if end_kind is not None:
+ req.set_end_kind(end_kind)
+ resp = datastore_pb.Schema()
+
+ _Call('GetSchema', req, resp)
+ return resp.kind_list()
+
+
+def GetIndices(_app=None):
+ """Fetches all composite indices in the datastore for this app.
+
+ Returns:
+ list of entity_pb.CompositeIndex
+ """
+ req = api_base_pb.StringProto()
+ req.set_value(datastore_types.ResolveAppId(_app))
+ resp = datastore_pb.CompositeIndices()
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', 'GetIndices', req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise datastore._ToDatastoreError(err)
+
+ return resp.index_list()
+
+
+def CreateIndex(index):
+ """Creates a new composite index in the datastore for this app.
+
+ Args:
+ index: entity_pb.CompositeIndex
+
+ Returns:
+ int, the id allocated to the index
+ """
+ resp = api_base_pb.Integer64Proto()
+ _Call('CreateIndex', index, resp)
+ return resp.value()
+
+
+def UpdateIndex(index):
+ """Updates an index's status. The entire index definition must be present.
+
+ Args:
+ index: entity_pb.CompositeIndex
+ """
+ _Call('UpdateIndex', index, api_base_pb.VoidProto())
+
+
+def DeleteIndex(index):
+ """Deletes an index. The entire index definition must be present.
+
+ Args:
+ index: entity_pb.CompositeIndex
+ """
+ _Call('DeleteIndex', index, api_base_pb.VoidProto())
+
+
+def _Call(call, req, resp):
+ """Generic method for making a datastore API call.
+
+ Args:
+ call: string, the name of the RPC call
+ req: the request PB. if the app_id field is not set, it defaults to the
+ local app.
+ resp: the response PB
+ """
+ if hasattr(req, 'app_id'):
+ req.set_app_id(datastore_types.ResolveAppId(req.app_id(), 'req.app_id()'))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall('datastore_v3', call, req, resp)
+ except apiproxy_errors.ApplicationError, err:
+ raise datastore._ToDatastoreError(err)
+
+
+def IndexDefinitionToProto(app_id, index_definition):
+ """Transform individual Index definition to protocol buffer.
+
+ Args:
+ app_id: Application id for new protocol buffer CompositeIndex.
+ index_definition: datastore_index.Index object to transform.
+
+ Returns:
+ New entity_pb.CompositeIndex with default values set and index
+ information filled in.
+ """
+ proto = entity_pb.CompositeIndex()
+
+ proto.set_app_id(app_id)
+ proto.set_id(0)
+ proto.set_state(entity_pb.CompositeIndex.WRITE_ONLY)
+
+ definition_proto = proto.mutable_definition()
+ definition_proto.set_entity_type(index_definition.kind)
+ definition_proto.set_ancestor(index_definition.ancestor)
+
+ if index_definition.properties is not None:
+ for prop in index_definition.properties:
+ prop_proto = definition_proto.add_property()
+ prop_proto.set_name(prop.name)
+ prop_proto.set_direction(_DIRECTION_MAP[prop.direction])
+
+ return proto
+
+
+def IndexDefinitionsToProtos(app_id, index_definitions):
+ """Transform multiple index definitions to composite index records
+
+ Args:
+ app_id: Application id for new protocol buffer CompositeIndex.
+ index_definition: A list of datastore_index.Index objects to transform.
+
+ Returns:
+ A list of tranformed entity_pb.Compositeindex entities with default values
+ set and index information filled in.
+ """
+ return [IndexDefinitionToProto(app_id, index)
+ for index in index_definitions]
+
+
+def ProtoToIndexDefinition(proto):
+ """Transform individual index protocol buffer to index definition.
+
+ Args:
+ proto: An instance of entity_pb.CompositeIndex to transform.
+
+ Returns:
+ A new instance of datastore_index.Index.
+ """
+ properties = []
+ proto_index = proto.definition()
+ for prop_proto in proto_index.property_list():
+ prop_definition = datastore_index.Property(name=prop_proto.name())
+ if prop_proto.direction() == entity_pb.Index_Property.DESCENDING:
+ prop_definition.direction = 'descending'
+ properties.append(prop_definition)
+
+ index = datastore_index.Index(kind=proto_index.entity_type(),
+ properties=properties)
+ if proto_index.ancestor():
+ index.ancestor = True
+ return index
+
+def ProtosToIndexDefinitions(protos):
+ """Transform multiple index protocol buffers to index definitions.
+
+ Args:
+ A list of entity_pb.Index records.
+ """
+ return [ProtoToIndexDefinition(definition) for definition in protos]
diff --git a/google_appengine/google/appengine/api/datastore_admin.pyc b/google_appengine/google/appengine/api/datastore_admin.pyc
new file mode 100644
index 0000000..302bf52
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_admin.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_entities.py b/google_appengine/google/appengine/api/datastore_entities.py
new file mode 100755
index 0000000..93ffdb5
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_entities.py
@@ -0,0 +1,343 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Classes for common kinds, including Contact, Message, and Event.
+
+Most of these kinds are based on the gd namespace "kinds" from GData:
+
+ http://code.google.com/apis/gdata/common-elements.html
+"""
+
+
+
+
+
+import types
+import urlparse
+from xml.sax import saxutils
+from google.appengine.datastore import datastore_pb
+from google.appengine.api import datastore
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+
+class GdKind(datastore.Entity):
+ """ A base class for gd namespace kinds.
+
+ This class contains common logic for all gd namespace kinds. For example,
+ this class translates datastore (app id, kind, key) tuples to tag:
+ URIs appropriate for use in <key> tags.
+ """
+
+ HEADER = u"""<entry xmlns:gd='http://schemas.google.com/g/2005'>
+ <category scheme='http://schemas.google.com/g/2005#kind'
+ term='http://schemas.google.com/g/2005#%s' />"""
+ FOOTER = u"""
+</entry>"""
+
+ _kind_properties = set()
+ _contact_properties = set()
+
+ def __init__(self, kind, title, kind_properties, contact_properties=[]):
+ """ Ctor.
+
+ title is the name of this particular entity, e.g. Bob Jones or Mom's
+ Birthday Party.
+
+ kind_properties is a list of property names that should be included in
+ this entity's XML encoding as first-class XML elements, instead of
+ <property> elements. 'title' and 'content' are added to kind_properties
+ automatically, and may not appear in contact_properties.
+
+ contact_properties is a list of property names that are Keys that point to
+ Contact entities, and should be included in this entity's XML encoding as
+ <gd:who> elements. If a property name is included in both kind_properties
+ and contact_properties, it is treated as a Contact property.
+
+ Args:
+ kind: string
+ title: string
+ kind_properties: list of strings
+ contact_properties: list of string
+ """
+ datastore.Entity.__init__(self, kind)
+
+ if not isinstance(title, types.StringTypes):
+ raise datastore_errors.BadValueError(
+ 'Expected a string for title; received %s (a %s).' %
+ (title, datastore_types.typename(title)))
+ self['title'] = title
+ self['content'] = ''
+
+ self._contact_properties = set(contact_properties)
+ assert not self._contact_properties.intersection(self.keys())
+
+ self._kind_properties = set(kind_properties) - self._contact_properties
+ self._kind_properties.add('title')
+ self._kind_properties.add('content')
+
+ def _KindPropertiesToXml(self):
+ """ Convert the properties that are part of this gd kind to XML. For
+ testability, the XML elements in the output are sorted alphabetically
+ by property name.
+
+ Returns:
+ string # the XML representation of the gd kind properties
+ """
+ properties = self._kind_properties.intersection(set(self.keys()))
+
+ xml = u''
+ for prop in sorted(properties):
+ prop_xml = saxutils.quoteattr(prop)[1:-1]
+
+ value = self[prop]
+ has_toxml = (hasattr(value, 'ToXml') or
+ isinstance(value, list) and hasattr(value[0], 'ToXml'))
+
+ for val in self._XmlEscapeValues(prop):
+ if has_toxml:
+ xml += '\n %s' % val
+ else:
+ xml += '\n <%s>%s</%s>' % (prop_xml, val, prop_xml)
+
+ return xml
+
+
+ def _ContactPropertiesToXml(self):
+ """ Convert this kind's Contact properties kind to XML. For testability,
+ the XML elements in the output are sorted alphabetically by property name.
+
+ Returns:
+ string # the XML representation of the Contact properties
+ """
+ properties = self._contact_properties.intersection(set(self.keys()))
+
+ xml = u''
+ for prop in sorted(properties):
+ values = self[prop]
+ if not isinstance(values, list):
+ values = [values]
+
+ for value in values:
+ assert isinstance(value, datastore_types.Key)
+ xml += """
+ <gd:who rel="http://schemas.google.com/g/2005#%s.%s>
+ <gd:entryLink href="%s" />
+ </gd:who>""" % (self.kind().lower(), prop, value.ToTagUri())
+
+ return xml
+
+
+ def _LeftoverPropertiesToXml(self):
+ """ Convert all of this entity's properties that *aren't* part of this gd
+ kind to XML.
+
+ Returns:
+ string # the XML representation of the leftover properties
+ """
+ leftovers = set(self.keys())
+ leftovers -= self._kind_properties
+ leftovers -= self._contact_properties
+ if leftovers:
+ return u'\n ' + '\n '.join(self._PropertiesToXml(leftovers))
+ else:
+ return u''
+
+ def ToXml(self):
+ """ Returns an XML representation of this entity, as a string.
+ """
+ xml = GdKind.HEADER % self.kind().lower()
+ xml += self._KindPropertiesToXml()
+ xml += self._ContactPropertiesToXml()
+ xml += self._LeftoverPropertiesToXml()
+ xml += GdKind.FOOTER
+ return xml
+
+
+class Message(GdKind):
+ """A message, such as an email, a discussion group posting, or a comment.
+
+ Includes the message title, contents, participants, and other properties.
+
+ This is the gd Message kind. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdMessageKind
+
+ These properties are meaningful. They are all optional.
+
+ property name property type meaning
+ -------------------------------------
+ title string message subject
+ content string message body
+ from Contact* sender
+ to Contact* primary recipient
+ cc Contact* CC recipient
+ bcc Contact* BCC recipient
+ reply-to Contact* intended recipient of replies
+ link Link* attachment
+ category Category* tag or label associated with this message
+ geoPt GeoPt* geographic location the message was posted from
+ rating Rating* message rating, as defined by the application
+
+ * means this property may be repeated.
+
+ The Contact properties should be Keys of Contact entities. They are
+ represented in the XML encoding as linked <gd:who> elements.
+ """
+ KIND_PROPERTIES = ['title', 'content', 'link', 'category', 'geoPt', 'rating']
+ CONTACT_PROPERTIES = ['from', 'to', 'cc', 'bcc', 'reply-to']
+
+ def __init__(self, title, kind='Message'):
+ GdKind.__init__(self, kind, title, Message.KIND_PROPERTIES,
+ Message.CONTACT_PROPERTIES)
+
+
+class Event(GdKind):
+ """A calendar event.
+
+ Includes the event title, description, location, organizer, start and end
+ time, and other details.
+
+ This is the gd Event kind. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdEventKind
+
+ These properties are meaningful. They are all optional.
+
+ property name property type meaning
+ -------------------------------------
+ title string event name
+ content string event description
+ author string the organizer's name
+ where string* human-readable location (not a GeoPt)
+ startTime timestamp start time
+ endTime timestamp end time
+ eventStatus string one of the Event.Status values
+ link Link* page with more information
+ category Category* tag or label associated with this event
+ attendee Contact* attendees and other related people
+
+ * means this property may be repeated.
+
+ The Contact properties should be Keys of Contact entities. They are
+ represented in the XML encoding as linked <gd:who> elements.
+ """
+ KIND_PROPERTIES = ['title', 'content', 'author', 'where', 'startTime',
+ 'endTime', 'eventStatus', 'link', 'category']
+ CONTACT_PROPERTIES = ['attendee']
+
+ class Status:
+ CONFIRMED = 'confirmed'
+ TENTATIVE = 'tentative'
+ CANCELED = 'canceled'
+
+ def __init__(self, title, kind='Event'):
+ GdKind.__init__(self, kind, title, Event.KIND_PROPERTIES,
+ Event.CONTACT_PROPERTIES)
+
+ def ToXml(self):
+ """ Override GdKind.ToXml() to special-case author, gd:where, gd:when, and
+ gd:eventStatus.
+ """
+ xml = GdKind.HEADER % self.kind().lower()
+
+ self._kind_properties = set(Contact.KIND_PROPERTIES)
+ xml += self._KindPropertiesToXml()
+
+ if 'author' in self:
+ xml += """
+ <author><name>%s</name></author>""" % self['author']
+
+ if 'eventStatus' in self:
+ xml += """
+ <gd:eventStatus value="http://schemas.google.com/g/2005#event.%s" />""" % (
+ self['eventStatus'])
+
+ if 'where' in self:
+ lines = ['<gd:where valueString="%s" />' % val
+ for val in self._XmlEscapeValues('where')]
+ xml += '\n ' + '\n '.join(lines)
+
+ iso_format = '%Y-%m-%dT%H:%M:%S'
+ xml += '\n <gd:when'
+ for key in ['startTime', 'endTime']:
+ if key in self:
+ xml += ' %s="%s"' % (key, self[key].isoformat())
+ xml += ' />'
+
+ self._kind_properties.update(['author', 'where', 'startTime', 'endTime',
+ 'eventStatus'])
+ xml += self._ContactPropertiesToXml()
+ xml += self._LeftoverPropertiesToXml()
+ xml += GdKind.FOOTER
+ return xml
+
+
+class Contact(GdKind):
+ """A contact: a person, a venue such as a club or a restaurant, or an
+ organization.
+
+ This is the gd Contact kind. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdContactKind
+
+ Most of the information about the contact is in the <gd:contactSection>
+ element; see the reference section for that element for details.
+
+ These properties are meaningful. They are all optional.
+
+ property name property type meaning
+ -------------------------------------
+ title string contact's name
+ content string notes
+ email Email* email address
+ geoPt GeoPt* geographic location
+ im IM* IM address
+ phoneNumber Phonenumber* phone number
+ postalAddress PostalAddress* mailing address
+ link Link* link to more information
+ category Category* tag or label associated with this contact
+
+ * means this property may be repeated.
+ """
+ CONTACT_SECTION_HEADER = """
+ <gd:contactSection>"""
+ CONTACT_SECTION_FOOTER = """
+ </gd:contactSection>"""
+
+ KIND_PROPERTIES = ['title', 'content', 'link', 'category']
+
+ CONTACT_SECTION_PROPERTIES = ['email', 'geoPt', 'im', 'phoneNumber',
+ 'postalAddress']
+
+ def __init__(self, title, kind='Contact'):
+ GdKind.__init__(self, kind, title, Contact.KIND_PROPERTIES)
+
+ def ToXml(self):
+ """ Override GdKind.ToXml() to put some properties inside a
+ gd:contactSection.
+ """
+ xml = GdKind.HEADER % self.kind().lower()
+
+ self._kind_properties = set(Contact.KIND_PROPERTIES)
+ xml += self._KindPropertiesToXml()
+
+ xml += Contact.CONTACT_SECTION_HEADER
+ self._kind_properties = set(Contact.CONTACT_SECTION_PROPERTIES)
+ xml += self._KindPropertiesToXml()
+ xml += Contact.CONTACT_SECTION_FOOTER
+
+ self._kind_properties.update(Contact.KIND_PROPERTIES)
+ xml += self._LeftoverPropertiesToXml()
+ xml += GdKind.FOOTER
+ return xml
diff --git a/google_appengine/google/appengine/api/datastore_errors.py b/google_appengine/google/appengine/api/datastore_errors.py
new file mode 100755
index 0000000..ff53ba2
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_errors.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Errors used in the Python datastore API."""
+
+
+
+
+
+
+class Error(Exception):
+ """Base datastore error type.
+ """
+
+class BadValueError(Error):
+ """Raised by Entity.__setitem__(), Query.__setitem__(), Get(), and others
+ when a property value or filter value is invalid.
+ """
+
+class BadPropertyError(Error):
+ """Raised by Entity.__setitem__() when a property name isn't a string.
+ """
+
+class BadRequestError(Error):
+ """Raised by datastore calls when the parameter(s) are invalid.
+ """
+
+class EntityNotFoundError(Error):
+ """DEPRECATED: Raised by Get() when the requested entity is not found.
+ """
+
+class BadArgumentError(Error):
+ """Raised by Query.Order(), Iterator.Next(), and others when they're
+ passed an invalid argument.
+ """
+
+class QueryNotFoundError(Error):
+ """DEPRECATED: Raised by Iterator methods when the Iterator is invalid. This
+ should not happen during normal usage; it protects against malicious users
+ and system errors.
+ """
+
+class TransactionNotFoundError(Error):
+ """DEPRECATED: Raised by RunInTransaction. This is an internal error; you
+ should not see this.
+ """
+
+class Rollback(Error):
+ """May be raised by transaction functions when they want to roll back
+ instead of committing. Note that *any* exception raised by a transaction
+ function will cause a rollback. This is purely for convenience. See
+ datastore.RunInTransaction for details.
+ """
+
+class TransactionFailedError(Error):
+ """Raised by RunInTransaction methods when the transaction could not be
+ committed, even after retrying. This is usually due to high contention.
+ """
+
+class BadFilterError(Error):
+ """Raised by Query.__setitem__() and Query.Run() when a filter string is
+ invalid.
+ """
+ def __init__(self, filter):
+ self.filter = filter
+
+ def __str__(self):
+ return (u'BadFilterError: invalid filter: %s.' % self.filter)
+
+class BadQueryError(Error):
+ """Raised by Query when a query or query string is invalid.
+ """
+
+class BadKeyError(Error):
+ """Raised by Key.__str__ when the key is invalid.
+ """
+
+class InternalError(Error):
+ """An internal datastore error. Please report this to Google.
+ """
+
+class NeedIndexError(Error):
+ """No matching index was found for a query that requires an index. Check
+ the Indexes page in the Admin Console and your index.yaml file.
+ """
+
+class Timeout(Error):
+ """The datastore operation timed out. This can happen when you attempt to
+ put, get, or delete too many entities or an entity with too many properties,
+ or if the datastore is overloaded or having trouble.
+ """
diff --git a/google_appengine/google/appengine/api/datastore_errors.pyc b/google_appengine/google/appengine/api/datastore_errors.pyc
new file mode 100644
index 0000000..4f947c1
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_file_stub.py b/google_appengine/google/appengine/api/datastore_file_stub.py
new file mode 100755
index 0000000..ebd47fe
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_file_stub.py
@@ -0,0 +1,1061 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""
+In-memory persistent stub for the Python datastore API. Gets, queries,
+and searches are implemented as in-memory scans over all entities.
+
+Stores entities across sessions as pickled proto bufs in a single file. On
+startup, all entities are read from the file and loaded into memory. On
+every Put(), the file is wiped and all entities are written from scratch.
+Clients can also manually Read() and Write() the file themselves.
+
+Transactions are serialized through __tx_lock. Each transaction acquires it
+when it begins and releases it when it commits or rolls back. This is
+important, since there are other member variables like __tx_snapshot that are
+per-transaction, so they should only be used by one tx at a time.
+"""
+
+
+
+
+
+
+import datetime
+import logging
+import md5
+import os
+import struct
+import sys
+import tempfile
+import threading
+import warnings
+
+import cPickle as pickle
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import datastore
+from google.appengine.api import datastore_admin
+from google.appengine.api import datastore_errors
+from google.appengine.api import datastore_types
+from google.appengine.api import users
+from google.appengine.datastore import datastore_pb
+from google.appengine.datastore import datastore_index
+from google.appengine.runtime import apiproxy_errors
+from google.net.proto import ProtocolBuffer
+from google.appengine.datastore import entity_pb
+
+warnings.filterwarnings('ignore', 'tempnam is a potential security risk')
+
+
+entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())
+datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
+
+
+_MAXIMUM_RESULTS = 1000
+
+
+_MAX_QUERY_OFFSET = 1000
+
+
+_MAX_QUERY_COMPONENTS = 100
+
+_BATCH_SIZE = 20
+
+class _StoredEntity(object):
+ """Simple wrapper around an entity stored by the stub.
+
+ Public properties:
+ protobuf: Native protobuf Python object, entity_pb.EntityProto.
+ encoded_protobuf: Encoded binary representation of above protobuf.
+ native: datastore.Entity instance.
+ """
+
+ def __init__(self, entity):
+ """Create a _StoredEntity object and store an entity.
+
+ Args:
+ entity: entity_pb.EntityProto to store.
+ """
+ self.protobuf = entity
+
+ self.encoded_protobuf = entity.Encode()
+
+ self.native = datastore.Entity._FromPb(entity)
+
+
+class _Cursor(object):
+ """A query cursor.
+
+ Public properties:
+ cursor: the integer cursor
+ count: the original total number of results
+ keys_only: whether the query is keys_only
+
+ Class attributes:
+ _next_cursor: the next cursor to allocate
+ _next_cursor_lock: protects _next_cursor
+ """
+ _next_cursor = 1
+ _next_cursor_lock = threading.Lock()
+
+ def __init__(self, results, keys_only):
+ """Constructor.
+
+ Args:
+ # the query results, in order, such that pop(0) is the next result
+ results: list of entity_pb.EntityProto
+ keys_only: integer
+ """
+ self.__results = results
+ self.count = len(results)
+ self.keys_only = keys_only
+
+ self._next_cursor_lock.acquire()
+ try:
+ self.cursor = _Cursor._next_cursor
+ _Cursor._next_cursor += 1
+ finally:
+ self._next_cursor_lock.release()
+
+ def PopulateQueryResult(self, result, count):
+ """Populates a QueryResult with this cursor and the given number of results.
+
+ Args:
+ result: datastore_pb.QueryResult
+ count: integer
+ """
+ result.mutable_cursor().set_cursor(self.cursor)
+ result.set_keys_only(self.keys_only)
+
+ results_pbs = [r._ToPb() for r in self.__results[:count]]
+ result.result_list().extend(results_pbs)
+ del self.__results[:count]
+
+ result.set_more_results(len(self.__results) > 0)
+
+
+class DatastoreFileStub(apiproxy_stub.APIProxyStub):
+ """ Persistent stub for the Python datastore API.
+
+ Stores all entities in memory, and persists them to a file as pickled
+ protocol buffers. A DatastoreFileStub instance handles a single app's data
+ and is backed by files on disk.
+ """
+
+ _PROPERTY_TYPE_TAGS = {
+ datastore_types.Blob: entity_pb.PropertyValue.kstringValue,
+ bool: entity_pb.PropertyValue.kbooleanValue,
+ datastore_types.Category: entity_pb.PropertyValue.kstringValue,
+ datetime.datetime: entity_pb.PropertyValue.kint64Value,
+ datastore_types.Email: entity_pb.PropertyValue.kstringValue,
+ float: entity_pb.PropertyValue.kdoubleValue,
+ datastore_types.GeoPt: entity_pb.PropertyValue.kPointValueGroup,
+ datastore_types.IM: entity_pb.PropertyValue.kstringValue,
+ int: entity_pb.PropertyValue.kint64Value,
+ datastore_types.Key: entity_pb.PropertyValue.kReferenceValueGroup,
+ datastore_types.Link: entity_pb.PropertyValue.kstringValue,
+ long: entity_pb.PropertyValue.kint64Value,
+ datastore_types.PhoneNumber: entity_pb.PropertyValue.kstringValue,
+ datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
+ datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
+ str: entity_pb.PropertyValue.kstringValue,
+ datastore_types.Text: entity_pb.PropertyValue.kstringValue,
+ type(None): 0,
+ unicode: entity_pb.PropertyValue.kstringValue,
+ users.User: entity_pb.PropertyValue.kUserValueGroup,
+ }
+
+ WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY
+ READ_WRITE = entity_pb.CompositeIndex.READ_WRITE
+ DELETED = entity_pb.CompositeIndex.DELETED
+ ERROR = entity_pb.CompositeIndex.ERROR
+
+ _INDEX_STATE_TRANSITIONS = {
+ WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)),
+ READ_WRITE: frozenset((DELETED,)),
+ ERROR: frozenset((DELETED,)),
+ DELETED: frozenset((ERROR,)),
+ }
+
+ def __init__(self,
+ app_id,
+ datastore_file,
+ history_file,
+ require_indexes=False,
+ service_name='datastore_v3',
+ trusted=False):
+ """Constructor.
+
+ Initializes and loads the datastore from the backing files, if they exist.
+
+ Args:
+ app_id: string
+ datastore_file: string, stores all entities across sessions. Use None
+ not to use a file.
+ history_file: string, stores query history. Use None as with
+ datastore_file.
+ require_indexes: bool, default False. If True, composite indexes must
+ exist in index.yaml for queries that need them.
+ service_name: Service name expected for all calls.
+ trusted: bool, default False. If True, this stub allows an app to
+ access the data of another app.
+ """
+ super(DatastoreFileStub, self).__init__(service_name)
+
+
+ assert isinstance(app_id, basestring) and app_id != ''
+ self.__app_id = app_id
+ self.__datastore_file = datastore_file
+ self.__history_file = history_file
+ self.SetTrusted(trusted)
+
+ self.__entities = {}
+
+ self.__schema_cache = {}
+
+ self.__tx_snapshot = {}
+
+ self.__queries = {}
+
+ self.__transactions = {}
+
+ self.__indexes = {}
+ self.__require_indexes = require_indexes
+
+ self.__query_history = {}
+
+ self.__next_id = 1
+ self.__next_tx_handle = 1
+ self.__next_index_id = 1
+ self.__id_lock = threading.Lock()
+ self.__tx_handle_lock = threading.Lock()
+ self.__index_id_lock = threading.Lock()
+ self.__tx_lock = threading.Lock()
+ self.__entities_lock = threading.Lock()
+ self.__file_lock = threading.Lock()
+ self.__indexes_lock = threading.Lock()
+
+ self.Read()
+
+ def Clear(self):
+ """ Clears the datastore by deleting all currently stored entities and
+ queries. """
+ self.__entities = {}
+ self.__queries = {}
+ self.__transactions = {}
+ self.__query_history = {}
+ self.__schema_cache = {}
+
+ def SetTrusted(self, trusted):
+ """Set/clear the trusted bit in the stub.
+
+ This bit indicates that the app calling the stub is trusted. A
+ trusted app can write to datastores of other apps.
+
+ Args:
+ trusted: boolean.
+ """
+ self.__trusted = trusted
+
+ def __ValidateAppId(self, app_id):
+ """Verify that this is the stub for app_id.
+
+ Args:
+ app_id: An application ID.
+
+ Raises:
+ datastore_errors.BadRequestError: if this is not the stub for app_id.
+ """
+ if not self.__trusted and app_id != self.__app_id:
+ raise datastore_errors.BadRequestError(
+ 'app %s cannot access app %s\'s data' % (self.__app_id, app_id))
+
+ def __ValidateKey(self, key):
+ """Validate this key.
+
+ Args:
+ key: entity_pb.Reference
+
+ Raises:
+ datastore_errors.BadRequestError: if the key is invalid
+ """
+ assert isinstance(key, entity_pb.Reference)
+
+ self.__ValidateAppId(key.app())
+
+ for elem in key.path().element_list():
+ if elem.has_id() == elem.has_name():
+ raise datastore_errors.BadRequestError(
+ 'each key path element should have id or name but not both: %r' % key)
+
+ def _AppIdNamespaceKindForKey(self, key):
+ """ Get (app, kind) tuple from given key.
+
+ The (app, kind) tuple is used as an index into several internal
+ dictionaries, e.g. __entities.
+
+ Args:
+ key: entity_pb.Reference
+
+ Returns:
+ Tuple (app, kind), both are unicode strings.
+ """
+ last_path = key.path().element_list()[-1]
+ return key.app(), last_path.type()
+
+ def _StoreEntity(self, entity):
+ """ Store the given entity.
+
+ Args:
+ entity: entity_pb.EntityProto
+ """
+ key = entity.key()
+ app_kind = self._AppIdNamespaceKindForKey(key)
+ if app_kind not in self.__entities:
+ self.__entities[app_kind] = {}
+ self.__entities[app_kind][key] = _StoredEntity(entity)
+
+ if app_kind in self.__schema_cache:
+ del self.__schema_cache[app_kind]
+
+ READ_PB_EXCEPTIONS = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError,
+ TypeError, ValueError)
+ READ_ERROR_MSG = ('Data in %s is corrupt or a different version. '
+ 'Try running with the --clear_datastore flag.\n%r')
+ READ_PY250_MSG = ('Are you using FloatProperty and/or GeoPtProperty? '
+ 'Unfortunately loading float values from the datastore '
+ 'file does not work with Python 2.5.0. '
+ 'Please upgrade to a newer Python 2.5 release or use '
+ 'the --clear_datastore flag.\n')
+
+ def Read(self):
+ """ Reads the datastore and history files into memory.
+
+ The in-memory query history is cleared, but the datastore is *not*
+ cleared; the entities in the files are merged into the entities in memory.
+ If you want them to overwrite the in-memory datastore, call Clear() before
+ calling Read().
+
+ If the datastore file contains an entity with the same app name, kind, and
+ key as an entity already in the datastore, the entity from the file
+ overwrites the entity in the datastore.
+
+ Also sets __next_id to one greater than the highest id allocated so far.
+ """
+ if self.__datastore_file and self.__datastore_file != '/dev/null':
+ for encoded_entity in self.__ReadPickled(self.__datastore_file):
+ try:
+ entity = entity_pb.EntityProto(encoded_entity)
+ except self.READ_PB_EXCEPTIONS, e:
+ raise datastore_errors.InternalError(self.READ_ERROR_MSG %
+ (self.__datastore_file, e))
+ except struct.error, e:
+ if (sys.version_info[0:3] == (2, 5, 0)
+ and e.message.startswith('unpack requires a string argument')):
+ raise datastore_errors.InternalError(self.READ_PY250_MSG +
+ self.READ_ERROR_MSG %
+ (self.__datastore_file, e))
+ else:
+ raise
+
+ self._StoreEntity(entity)
+
+ last_path = entity.key().path().element_list()[-1]
+ if last_path.has_id() and last_path.id() >= self.__next_id:
+ self.__next_id = last_path.id() + 1
+
+ self.__query_history = {}
+ for encoded_query, count in self.__ReadPickled(self.__history_file):
+ try:
+ query_pb = datastore_pb.Query(encoded_query)
+ except self.READ_PB_EXCEPTIONS, e:
+ raise datastore_errors.InternalError(self.READ_ERROR_MSG %
+ (self.__history_file, e))
+
+ if query_pb in self.__query_history:
+ self.__query_history[query_pb] += count
+ else:
+ self.__query_history[query_pb] = count
+
+ def Write(self):
+ """ Writes out the datastore and history files. Be careful! If the files
+ already exist, this method overwrites them!
+ """
+ self.__WriteDatastore()
+ self.__WriteHistory()
+
+ def __WriteDatastore(self):
+ """ Writes out the datastore file. Be careful! If the file already exist,
+ this method overwrites it!
+ """
+ if self.__datastore_file and self.__datastore_file != '/dev/null':
+ encoded = []
+ for kind_dict in self.__entities.values():
+ for entity in kind_dict.values():
+ encoded.append(entity.encoded_protobuf)
+
+ self.__WritePickled(encoded, self.__datastore_file)
+
+ def __WriteHistory(self):
+ """ Writes out the history file. Be careful! If the file already exist,
+ this method overwrites it!
+ """
+ if self.__history_file and self.__history_file != '/dev/null':
+ encoded = [(query.Encode(), count)
+ for query, count in self.__query_history.items()]
+
+ self.__WritePickled(encoded, self.__history_file)
+
+ def __ReadPickled(self, filename):
+ """Reads a pickled object from the given file and returns it.
+ """
+ self.__file_lock.acquire()
+
+ try:
+ try:
+ if filename and filename != '/dev/null' and os.path.isfile(filename):
+ return pickle.load(open(filename, 'rb'))
+ else:
+ logging.warning('Could not read datastore data from %s', filename)
+ except (AttributeError, LookupError, ImportError, NameError, TypeError,
+ ValueError, struct.error, pickle.PickleError), e:
+ raise datastore_errors.InternalError(
+ 'Could not read data from %s. Try running with the '
+ '--clear_datastore flag. Cause:\n%r' % (filename, e))
+ finally:
+ self.__file_lock.release()
+
+ return []
+
+ def __WritePickled(self, obj, filename, openfile=file):
+ """Pickles the object and writes it to the given file.
+ """
+ if not filename or filename == '/dev/null' or not obj:
+ return
+
+ tmpfile = openfile(os.tempnam(os.path.dirname(filename)), 'wb')
+
+ pickler = pickle.Pickler(tmpfile, protocol=1)
+ pickler.fast = True
+ pickler.dump(obj)
+
+ tmpfile.close()
+
+ self.__file_lock.acquire()
+ try:
+ try:
+ os.rename(tmpfile.name, filename)
+ except OSError:
+ try:
+ os.remove(filename)
+ except:
+ pass
+ os.rename(tmpfile.name, filename)
+ finally:
+ self.__file_lock.release()
+
+ def MakeSyncCall(self, service, call, request, response):
+ """ The main RPC entry point. service must be 'datastore_v3'.
+ """
+ self.assertPbIsInitialized(request)
+ super(DatastoreFileStub, self).MakeSyncCall(service,
+ call,
+ request,
+ response)
+ self.assertPbIsInitialized(response)
+
+ def assertPbIsInitialized(self, pb):
+ """Raises an exception if the given PB is not initialized and valid."""
+ explanation = []
+ assert pb.IsInitialized(explanation), explanation
+ pb.Encode()
+
+ def QueryHistory(self):
+ """Returns a dict that maps Query PBs to times they've been run.
+ """
+ return dict((pb, times) for pb, times in self.__query_history.items()
+ if pb.app() == self.__app_id)
+
+ def _Dynamic_Put(self, put_request, put_response):
+ clones = []
+ for entity in put_request.entity_list():
+ self.__ValidateKey(entity.key())
+
+ clone = entity_pb.EntityProto()
+ clone.CopyFrom(entity)
+
+ for property in clone.property_list():
+ if property.value().has_uservalue():
+ uid = md5.new(property.value().uservalue().email().lower()).digest()
+ uid = '1' + ''.join(['%02d' % ord(x) for x in uid])[:20]
+ property.mutable_value().mutable_uservalue().set_obfuscated_gaiaid(
+ uid)
+
+ clones.append(clone)
+
+ assert clone.has_key()
+ assert clone.key().path().element_size() > 0
+
+ last_path = clone.key().path().element_list()[-1]
+ if last_path.id() == 0 and not last_path.has_name():
+ self.__id_lock.acquire()
+ last_path.set_id(self.__next_id)
+ self.__next_id += 1
+ self.__id_lock.release()
+
+ assert clone.entity_group().element_size() == 0
+ group = clone.mutable_entity_group()
+ root = clone.key().path().element(0)
+ group.add_element().CopyFrom(root)
+
+ else:
+ assert (clone.has_entity_group() and
+ clone.entity_group().element_size() > 0)
+
+ self.__entities_lock.acquire()
+
+ try:
+ for clone in clones:
+ self._StoreEntity(clone)
+ finally:
+ self.__entities_lock.release()
+
+ if not put_request.has_transaction():
+ self.__WriteDatastore()
+
+ put_response.key_list().extend([c.key() for c in clones])
+
+
+ def _Dynamic_Get(self, get_request, get_response):
+ if get_request.has_transaction():
+ entities = self.__tx_snapshot
+ else:
+ entities = self.__entities
+
+ for key in get_request.key_list():
+ self.__ValidateAppId(key.app())
+ app_kind = self._AppIdNamespaceKindForKey(key)
+
+ group = get_response.add_entity()
+ try:
+ entity = entities[app_kind][key].protobuf
+ except KeyError:
+ entity = None
+
+ if entity:
+ group.mutable_entity().CopyFrom(entity)
+
+
+ def _Dynamic_Delete(self, delete_request, delete_response):
+ self.__entities_lock.acquire()
+ try:
+ for key in delete_request.key_list():
+ self.__ValidateAppId(key.app())
+ app_kind = self._AppIdNamespaceKindForKey(key)
+ try:
+ del self.__entities[app_kind][key]
+ if not self.__entities[app_kind]:
+ del self.__entities[app_kind]
+
+ del self.__schema_cache[app_kind]
+ except KeyError:
+ pass
+
+ if not delete_request.has_transaction():
+ self.__WriteDatastore()
+ finally:
+ self.__entities_lock.release()
+
+
+ def _Dynamic_RunQuery(self, query, query_result):
+ if not self.__tx_lock.acquire(False):
+ if not query.has_ancestor():
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Only ancestor queries are allowed inside transactions.')
+ entities = self.__tx_snapshot
+ else:
+ entities = self.__entities
+ self.__tx_lock.release()
+
+ app_id_namespace = datastore_types.parse_app_id_namespace(query.app())
+ app_id = app_id_namespace.app_id()
+ self.__ValidateAppId(app_id)
+
+ if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')
+
+ num_components = len(query.filter_list()) + len(query.order_list())
+ if query.has_ancestor():
+ num_components += 1
+ if num_components > _MAX_QUERY_COMPONENTS:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ ('query is too large. may not have more than %s filters'
+ ' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))
+
+ (filters, orders) = datastore_index.Normalize(query.filter_list(),
+ query.order_list())
+
+ if self.__require_indexes:
+ required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
+ if required:
+ required_key = kind, ancestor, props
+ indexes = self.__indexes.get(app_id)
+ if not indexes:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.NEED_INDEX,
+ "This query requires a composite index, but none are defined. "
+ "You must create an index.yaml file in your application root.")
+ eq_filters_set = set(props[:num_eq_filters])
+ remaining_filters = props[num_eq_filters:]
+ for index in indexes:
+ definition = datastore_admin.ProtoToIndexDefinition(index)
+ index_key = datastore_index.IndexToKey(definition)
+ if required_key == index_key:
+ break
+ if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
+ this_props = index_key[2]
+ this_eq_filters_set = set(this_props[:num_eq_filters])
+ this_remaining_filters = this_props[num_eq_filters:]
+ if (eq_filters_set == this_eq_filters_set and
+ remaining_filters == this_remaining_filters):
+ break
+ else:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.NEED_INDEX,
+ "This query requires a composite index that is not defined. "
+ "You must update the index.yaml file in your application root.")
+
+ try:
+ query.set_app(app_id_namespace.to_encoded())
+ if query.has_kind():
+ results = entities[app_id_namespace.to_encoded(), query.kind()].values()
+ results = [entity.native for entity in results]
+ else:
+ results = []
+ for key in entities:
+ if key[0] == app_id_namespace.to_encoded():
+ results += [entity.native for entity in entities[key].values()]
+ except KeyError:
+ results = []
+
+ if query.has_ancestor():
+ ancestor_path = query.ancestor().path().element_list()
+ def is_descendant(entity):
+ path = entity.key()._Key__reference.path().element_list()
+ return path[:len(ancestor_path)] == ancestor_path
+ results = filter(is_descendant, results)
+
+ operators = {datastore_pb.Query_Filter.LESS_THAN: '<',
+ datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
+ datastore_pb.Query_Filter.GREATER_THAN: '>',
+ datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
+ datastore_pb.Query_Filter.EQUAL: '==',
+ }
+
+ def has_prop_indexed(entity, prop):
+ """Returns True if prop is in the entity and is indexed."""
+ if prop in datastore_types._SPECIAL_PROPERTIES:
+ return True
+ elif prop in entity.unindexed_properties():
+ return False
+
+ values = entity.get(prop, [])
+ if not isinstance(values, (tuple, list)):
+ values = [values]
+
+ for value in values:
+ if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
+ return True
+ return False
+
+ for filt in filters:
+ assert filt.op() != datastore_pb.Query_Filter.IN
+
+ prop = filt.property(0).name().decode('utf-8')
+ op = operators[filt.op()]
+
+ filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
+ for filter_prop in filt.property_list()]
+
+ def passes_filter(entity):
+ """Returns True if the entity passes the filter, False otherwise.
+
+ The filter being evaluated is filt, the current filter that we're on
+ in the list of filters in the query.
+ """
+ if not has_prop_indexed(entity, prop):
+ return False
+
+ try:
+ entity_vals = datastore._GetPropertyValue(entity, prop)
+ except KeyError:
+ entity_vals = []
+
+ if not isinstance(entity_vals, list):
+ entity_vals = [entity_vals]
+
+ for fixed_entity_val in entity_vals:
+ for filter_val in filter_val_list:
+ fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
+ fixed_entity_val.__class__)
+ filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
+ if fixed_entity_type == filter_type:
+ comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
+ elif op != '==':
+ comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
+ else:
+ continue
+
+ logging.log(logging.DEBUG - 1,
+ 'Evaling filter expression "%s"', comp)
+
+ try:
+ ret = eval(comp)
+ if ret and ret != NotImplementedError:
+ return True
+ except TypeError:
+ pass
+
+ return False
+
+ results = filter(passes_filter, results)
+
+ for order in orders:
+ prop = order.property().decode('utf-8')
+ results = [entity for entity in results if has_prop_indexed(entity, prop)]
+
+ def order_compare_entities(a, b):
+ """ Return a negative, zero or positive number depending on whether
+ entity a is considered smaller than, equal to, or larger than b,
+ according to the query's orderings. """
+ cmped = 0
+ for o in orders:
+ prop = o.property().decode('utf-8')
+
+ reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
+
+ a_val = datastore._GetPropertyValue(a, prop)
+ if isinstance(a_val, list):
+ a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
+
+ b_val = datastore._GetPropertyValue(b, prop)
+ if isinstance(b_val, list):
+ b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
+
+ cmped = order_compare_properties(a_val, b_val)
+
+ if o.direction() is datastore_pb.Query_Order.DESCENDING:
+ cmped = -cmped
+
+ if cmped != 0:
+ return cmped
+
+ if cmped == 0:
+ return cmp(a.key(), b.key())
+
+ def order_compare_properties(x, y):
+ """Return a negative, zero or positive number depending on whether
+ property value x is considered smaller than, equal to, or larger than
+ property value y. If x and y are different types, they're compared based
+ on the type ordering used in the real datastore, which is based on the
+ tag numbers in the PropertyValue PB.
+ """
+ if isinstance(x, datetime.datetime):
+ x = datastore_types.DatetimeToTimestamp(x)
+ if isinstance(y, datetime.datetime):
+ y = datastore_types.DatetimeToTimestamp(y)
+
+ x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
+ y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)
+
+ if x_type == y_type:
+ try:
+ return cmp(x, y)
+ except TypeError:
+ return 0
+ else:
+ return cmp(x_type, y_type)
+
+ results.sort(order_compare_entities)
+
+ offset = 0
+ limit = len(results)
+ if query.has_offset():
+ offset = query.offset()
+ if query.has_limit():
+ limit = query.limit()
+ if limit > _MAXIMUM_RESULTS:
+ limit = _MAXIMUM_RESULTS
+ results = results[offset:limit + offset]
+
+ clone = datastore_pb.Query()
+ clone.CopyFrom(query)
+ clone.clear_hint()
+ if clone in self.__query_history:
+ self.__query_history[clone] += 1
+ else:
+ self.__query_history[clone] = 1
+ self.__WriteHistory()
+
+ cursor = _Cursor(results, query.keys_only())
+ self.__queries[cursor.cursor] = cursor
+
+ if query.has_count():
+ count = query.count()
+ elif query.has_limit():
+ count = query.limit()
+ else:
+ count = _BATCH_SIZE
+
+ cursor.PopulateQueryResult(query_result, count)
+
+ def _Dynamic_Next(self, next_request, query_result):
+ cursor_handle = next_request.cursor().cursor()
+
+ try:
+ cursor = self.__queries[cursor_handle]
+ except KeyError:
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST, 'Cursor %d not found' % cursor_handle)
+
+ count = _BATCH_SIZE
+ if next_request.has_count():
+ count = next_request.count()
+ cursor.PopulateQueryResult(query_result, count)
+
+ def _Dynamic_Count(self, query, integer64proto):
+ self.__ValidateAppId(query.app())
+ query_result = datastore_pb.QueryResult()
+ self._Dynamic_RunQuery(query, query_result)
+ cursor = query_result.cursor().cursor()
+ integer64proto.set_value(self.__queries[cursor].count)
+ del self.__queries[cursor]
+
+ def _Dynamic_BeginTransaction(self, request, transaction):
+ self.__tx_handle_lock.acquire()
+ handle = self.__next_tx_handle
+ self.__next_tx_handle += 1
+ self.__tx_handle_lock.release()
+
+ self.__transactions[handle] = None
+ transaction.set_handle(handle)
+
+ self.__tx_lock.acquire()
+ snapshot = [(app_kind, dict(entities))
+ for app_kind, entities in self.__entities.items()]
+ self.__tx_snapshot = dict(snapshot)
+
+ def _Dynamic_Commit(self, transaction, transaction_response):
+ if not self.__transactions.has_key(transaction.handle()):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Transaction handle %d not found' % transaction.handle())
+
+ self.__tx_snapshot = {}
+ try:
+ self.__WriteDatastore()
+ finally:
+ self.__tx_lock.release()
+
+ def _Dynamic_Rollback(self, transaction, transaction_response):
+ if not self.__transactions.has_key(transaction.handle()):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ 'Transaction handle %d not found' % transaction.handle())
+
+ self.__entities = self.__tx_snapshot
+ self.__tx_snapshot = {}
+ self.__tx_lock.release()
+
+ def _Dynamic_GetSchema(self, req, schema):
+ app_str = req.app()
+ self.__ValidateAppId(app_str)
+
+ kinds = []
+
+ for app, kind in self.__entities:
+ if (app != app_str or
+ (req.has_start_kind() and kind < req.start_kind()) or
+ (req.has_end_kind() and kind > req.end_kind())):
+ continue
+
+ app_kind = (app, kind)
+ if app_kind in self.__schema_cache:
+ kinds.append(self.__schema_cache[app_kind])
+ continue
+
+ kind_pb = entity_pb.EntityProto()
+ kind_pb.mutable_key().set_app('')
+ kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
+ kind_pb.mutable_entity_group()
+
+ props = {}
+
+ for entity in self.__entities[app_kind].values():
+ for prop in entity.protobuf.property_list():
+ if prop.name() not in props:
+ props[prop.name()] = entity_pb.PropertyValue()
+ props[prop.name()].MergeFrom(prop.value())
+
+ for value_pb in props.values():
+ if value_pb.has_int64value():
+ value_pb.set_int64value(0)
+ if value_pb.has_booleanvalue():
+ value_pb.set_booleanvalue(False)
+ if value_pb.has_stringvalue():
+ value_pb.set_stringvalue('none')
+ if value_pb.has_doublevalue():
+ value_pb.set_doublevalue(0.0)
+ if value_pb.has_pointvalue():
+ value_pb.mutable_pointvalue().set_x(0.0)
+ value_pb.mutable_pointvalue().set_y(0.0)
+ if value_pb.has_uservalue():
+ value_pb.mutable_uservalue().set_gaiaid(0)
+ value_pb.mutable_uservalue().set_email('none')
+ value_pb.mutable_uservalue().set_auth_domain('none')
+ value_pb.mutable_uservalue().clear_nickname()
+ value_pb.mutable_uservalue().clear_obfuscated_gaiaid()
+ if value_pb.has_referencevalue():
+ value_pb.clear_referencevalue()
+ value_pb.mutable_referencevalue().set_app('none')
+ pathelem = value_pb.mutable_referencevalue().add_pathelement()
+ pathelem.set_type('none')
+ pathelem.set_name('none')
+
+ for name, value_pb in props.items():
+ prop_pb = kind_pb.add_property()
+ prop_pb.set_name(name)
+ prop_pb.set_multiple(False)
+ prop_pb.mutable_value().CopyFrom(value_pb)
+
+ kinds.append(kind_pb)
+ self.__schema_cache[app_kind] = kind_pb
+
+ for kind_pb in kinds:
+ kind = schema.add_kind()
+ kind.CopyFrom(kind_pb)
+ if not req.properties():
+ kind.clear_property()
+
+ schema.set_more_results(False)
+
+ def _Dynamic_AllocateIds(self, allocate_ids_request, allocate_ids_response):
+ model_key = allocate_ids_request.model_key()
+ size = allocate_ids_request.size()
+
+ self.__ValidateAppId(model_key.app())
+
+ try:
+ self.__id_lock.acquire()
+ start = self.__next_id
+ self.__next_id += size
+ end = self.__next_id - 1
+ finally:
+ self.__id_lock.release()
+
+ allocate_ids_response.set_start(start)
+ allocate_ids_response.set_end(end)
+
+ def _Dynamic_CreateIndex(self, index, id_response):
+ self.__ValidateAppId(index.app_id())
+ if index.id() != 0:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'New index id must be 0.')
+ elif self.__FindIndex(index):
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ 'Index already exists.')
+
+ self.__index_id_lock.acquire()
+ index.set_id(self.__next_index_id)
+ id_response.set_value(self.__next_index_id)
+ self.__next_index_id += 1
+ self.__index_id_lock.release()
+
+ clone = entity_pb.CompositeIndex()
+ clone.CopyFrom(index)
+ app = index.app_id()
+ clone.set_app_id(app)
+
+ self.__indexes_lock.acquire()
+ try:
+ if app not in self.__indexes:
+ self.__indexes[app] = []
+ self.__indexes[app].append(clone)
+ finally:
+ self.__indexes_lock.release()
+
+ def _Dynamic_GetIndices(self, app_str, composite_indices):
+ self.__ValidateAppId(app_str.value())
+ composite_indices.index_list().extend(
+ self.__indexes.get(app_str.value(), []))
+
+ def _Dynamic_UpdateIndex(self, index, void):
+ self.__ValidateAppId(index.app_id())
+ stored_index = self.__FindIndex(index)
+ if not stored_index:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ "Index doesn't exist.")
+ elif (index.state() != stored_index.state() and
+ index.state() not in self._INDEX_STATE_TRANSITIONS[stored_index.state()]):
+ raise apiproxy_errors.ApplicationError(
+ datastore_pb.Error.BAD_REQUEST,
+ "cannot move index state from %s to %s" %
+ (entity_pb.CompositeIndex.State_Name(stored_index.state()),
+ (entity_pb.CompositeIndex.State_Name(index.state()))))
+
+ self.__indexes_lock.acquire()
+ try:
+ stored_index.set_state(index.state())
+ finally:
+ self.__indexes_lock.release()
+
+ def _Dynamic_DeleteIndex(self, index, void):
+ self.__ValidateAppId(index.app_id())
+ stored_index = self.__FindIndex(index)
+ if not stored_index:
+ raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
+ "Index doesn't exist.")
+
+ app = index.app_id()
+ self.__indexes_lock.acquire()
+ try:
+ self.__indexes[app].remove(stored_index)
+ finally:
+ self.__indexes_lock.release()
+
+ def __FindIndex(self, index):
+ """Finds an existing index by definition.
+
+ Args:
+ definition: entity_pb.CompositeIndex
+
+ Returns:
+ entity_pb.CompositeIndex, if it exists; otherwise None
+ """
+ app = index.app_id()
+ self.__ValidateAppId(app)
+ if app in self.__indexes:
+ for stored_index in self.__indexes[app]:
+ if index.definition() == stored_index.definition():
+ return stored_index
+
+ return None
diff --git a/google_appengine/google/appengine/api/datastore_file_stub.pyc b/google_appengine/google/appengine/api/datastore_file_stub.pyc
new file mode 100644
index 0000000..2efca54
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_file_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/datastore_types.py b/google_appengine/google/appengine/api/datastore_types.py
new file mode 100755
index 0000000..c2d1d5f
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_types.py
@@ -0,0 +1,1788 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Higher-level, semantic data types for the datastore. These types
+are expected to be set as attributes of Entities. See "Supported Data Types"
+in the API Guide.
+
+Most of these types are based on XML elements from Atom and GData elements
+from the atom and gd namespaces. For more information, see:
+
+ http://www.atomenabled.org/developers/syndication/
+ http://code.google.com/apis/gdata/common-elements.html
+
+The namespace schemas are:
+
+ http://www.w3.org/2005/Atom
+ http://schemas.google.com/g/2005
+"""
+
+
+
+
+
+import base64
+import calendar
+import datetime
+import os
+import re
+import string
+import time
+import urlparse
+from xml.sax import saxutils
+from google.appengine.datastore import datastore_pb
+from google.appengine.api import datastore_errors
+from google.appengine.api import users
+from google.appengine.api import namespace_manager
+from google.net.proto import ProtocolBuffer
+from google.appengine.datastore import entity_pb
+
+_MAX_STRING_LENGTH = 500
+
+_MAX_LINK_PROPERTY_LENGTH = 2083
+
+RESERVED_PROPERTY_NAME = re.compile('^__.*__$')
+
+_KEY_SPECIAL_PROPERTY = '__key__'
+_SPECIAL_PROPERTIES = frozenset([_KEY_SPECIAL_PROPERTY])
+
+_NAMESPACE_SEPARATOR='!'
+
+class UtcTzinfo(datetime.tzinfo):
+ def utcoffset(self, dt): return datetime.timedelta(0)
+ def dst(self, dt): return datetime.timedelta(0)
+ def tzname(self, dt): return 'UTC'
+ def __repr__(self): return 'datastore_types.UTC'
+
+UTC = UtcTzinfo()
+
+
+def typename(obj):
+ """Returns the type of obj as a string. More descriptive and specific than
+ type(obj), and safe for any object, unlike __class__."""
+ if hasattr(obj, '__class__'):
+ return getattr(obj, '__class__').__name__
+ else:
+ return type(obj).__name__
+
+
+def ValidateString(value,
+ name='unused',
+ exception=datastore_errors.BadValueError,
+ max_len=_MAX_STRING_LENGTH,
+ empty_ok=False):
+ """Raises an exception if value is not a valid string or a subclass thereof.
+
+ A string is valid if it's not empty, no more than _MAX_STRING_LENGTH bytes,
+ and not a Blob. The exception type can be specified with the exception
+ argument; it defaults to BadValueError.
+
+ Args:
+ value: the value to validate.
+ name: the name of this value; used in the exception message.
+ exception: the type of exception to raise.
+ max_len: the maximum allowed length, in bytes.
+ empty_ok: allow empty value.
+ """
+ if value is None and empty_ok:
+ return
+ if not isinstance(value, basestring) or isinstance(value, Blob):
+ raise exception('%s should be a string; received %s (a %s):' %
+ (name, value, typename(value)))
+ if not value and not empty_ok:
+ raise exception('%s must not be empty.' % name)
+
+ if len(value.encode('utf-8')) > max_len:
+ raise exception('%s must be under %d bytes.' % (name, max_len))
+
+def ValidateInteger(value,
+ name='unused',
+ exception=datastore_errors.BadValueError,
+ empty_ok=False,
+ zero_ok=False,
+ negative_ok=False):
+ """Raises an exception if value is not a valid integer.
+
+ An integer is valid if it's not negative or empty and is an integer.
+ The exception type can be specified with the exception argument;
+ it defaults to BadValueError.
+
+ Args:
+ value: the value to validate.
+ name: the name of this value; used in the exception message.
+ exception: the type of exception to raise.
+ empty_ok: allow None value.
+ zero_ok: allow zero value.
+ negative_ok: allow negative value.
+ """
+ if value is None and empty_ok:
+ return
+ if not isinstance(value, int):
+ raise exception('%s should be an integer; received %s (a %s).' %
+ (name, value, typename(value)))
+ if not value and not zero_ok:
+ raise exception('%s must not be 0 (zero)' % name)
+ if value < 0 and not negative_ok:
+ raise exception('%s must not be negative.' % name)
+
+def ResolveAppId(app, name='_app'):
+ """Validate app id, providing a default.
+
+ If the argument is None, $APPLICATION_ID is substituted.
+
+ Args:
+ app: The app id argument value to be validated.
+ name: The argument name, for error messages.
+
+ Returns:
+ The value of app, or the substituted default. Always a non-empty string.
+
+ Raises:
+ BadArgumentError if the value is empty or not a string.
+ """
+ if app is None:
+ app = os.environ.get('APPLICATION_ID', '')
+ ValidateString(app, '_app', datastore_errors.BadArgumentError)
+ return app
+
+
+class AppIdNamespace(object):
+ """Combined AppId and Namespace
+
+ An identifier that combines the application identifier and the
+ namespace.
+ """
+ __app_id = None
+ __namespace = None
+
+ def __init__(self, app_id, namespace):
+ """Constructor. Creates a AppIdNamespace from two strings.
+
+ Args:
+ app_id: application identifier string
+ namespace: namespace identifier string
+ Raises:
+ BadArgumentError if the values contain
+ the _NAMESPACE_SEPARATOR character (!) or
+ the app_id is empty.
+ """
+ self.__app_id = app_id
+ if namespace:
+ self.__namespace = namespace
+ else:
+ self.__namespace = None
+ ValidateString(self.__app_id, 'app_id', datastore_errors.BadArgumentError)
+ ValidateString(self.__namespace,
+ 'namespace', datastore_errors.BadArgumentError,
+ empty_ok=True)
+ if _NAMESPACE_SEPARATOR in self.__app_id:
+ raise datastore_errors.BadArgumentError(
+ 'app_id must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+ if self.__namespace and _NAMESPACE_SEPARATOR in self.__namespace:
+ raise datastore_errors.BadArgumentError(
+ 'namespace must not contain a "%s"' % _NAMESPACE_SEPARATOR)
+
+ def __cmp__(self, other):
+ """Returns negative, zero, or positive when comparing two AppIdNamespace.
+
+ Args:
+ other: AppIdNamespace to compare to.
+
+ Returns:
+ Negative if self is less than "other"
+ Zero if "other" is equal to self
+ Positive if self is greater than "other"
+ """
+ if not isinstance(other, AppIdNamespace):
+ return cmp(id(self), id(other))
+ return cmp((self.__app_id, self.__namespace),
+ (other.__app_id, other.__namespace))
+
+ def to_encoded(self):
+ """Returns this AppIdNamespace's string equivalent
+
+ i.e. "app!namespace"
+ """
+ if not self.__namespace:
+ return self.__app_id
+ else:
+ return self.__app_id + _NAMESPACE_SEPARATOR + self.__namespace
+
+ def app_id(self):
+ """Returns this AppId portion of this AppIdNamespace.
+ """
+ return self.__app_id;
+
+ def namespace(self):
+ """Returns this namespace portion of this AppIdNamespace.
+ """
+ return self.__namespace;
+
+
+def PartitionString(value, separator):
+ """Equivalent to python2.5 str.partition()
+ TODO(gmariani) use str.partition() when python 2.5 is adopted.
+
+ Args:
+ value: String to be partitioned
+ separator: Separator string
+ """
+ index = value.find(separator);
+ if index == -1:
+ return (value, '', value[0:0]);
+ else:
+ return (value[0:index], separator, value[index+len(separator):len(value)])
+
+
+def parse_app_id_namespace(app_id_namespace):
+ """
+ An app_id_namespace string is valid if it's not empty, and contains
+ at most one namespace separator ('!'). Also, an app_id_namespace
+ with an empty namespace must not contain a namespace separator.
+
+ Args:
+ app_id_namespace: an encoded app_id_namespace.
+ Raises exception if format of app_id_namespace is invalid.
+ """
+ if not app_id_namespace:
+ raise datastore_errors.BadArgumentError(
+ 'app_id_namespace must be non empty')
+ parts = PartitionString(app_id_namespace, _NAMESPACE_SEPARATOR)
+ if parts[1] == _NAMESPACE_SEPARATOR:
+ if not parts[2]:
+ raise datastore_errors.BadArgumentError(
+ 'app_id_namespace must not contain a "%s" if the namespace is empty' %
+ _NAMESPACE_SEPARATOR)
+ if parts[2]:
+ return AppIdNamespace(parts[0], parts[2])
+ return AppIdNamespace(parts[0], None)
+
+def ResolveAppIdNamespace(
+ app_id=None, namespace=None, app_id_namespace=None):
+ """Validate an app id/namespace and substitute default values.
+
+ If the argument is None, $APPLICATION_ID!$NAMESPACE is substituted.
+
+ Args:
+ app_id: The app id argument value to be validated.
+ namespace: The namespace argument value to be validated.
+ app_id_namespace: An AppId/Namespace pair
+
+ Returns:
+ An AppIdNamespace object initialized with AppId and Namespace.
+
+ Raises:
+ BadArgumentError if the value is empty or not a string.
+ """
+ if app_id_namespace is None:
+ if app_id is None:
+ app_id = os.environ.get('APPLICATION_ID', '')
+ if namespace is None:
+ namespace = namespace_manager.get_request_namespace();
+ else:
+ if not app_id is None:
+ raise datastore_errors.BadArgumentError(
+ 'app_id is overspecified. Cannot define app_id_namespace and app_id')
+ if not namespace is None:
+ raise datastore_errors.BadArgumentError(
+ 'namespace is overspecified. ' +
+ 'Cannot define app_id_namespace and namespace')
+ return parse_app_id_namespace(app_id_namespace)
+
+ return AppIdNamespace(app_id, namespace)
+
+
+class Key(object):
+ """The primary key for a datastore entity.
+
+ A datastore GUID. A Key instance uniquely identifies an entity across all
+ apps, and includes all information necessary to fetch the entity from the
+ datastore with Get().
+
+ Key implements __hash__, and key instances are immutable, so Keys may be
+ used in sets and as dictionary keys.
+ """
+ __reference = None
+
+ def __init__(self, encoded=None):
+ """Constructor. Creates a Key from a string.
+
+ Args:
+ # a base64-encoded primary key, generated by Key.__str__
+ encoded: str
+ """
+ if encoded is not None:
+ if not isinstance(encoded, basestring):
+ try:
+ repr_encoded = repr(encoded)
+ except:
+ repr_encoded = "<couldn't encode>"
+ raise datastore_errors.BadArgumentError(
+ 'Key() expects a string; received %s (a %s).' %
+ (repr_encoded, typename(encoded)))
+ try:
+ modulo = len(encoded) % 4
+ if modulo != 0:
+ encoded += ('=' * (4 - modulo))
+
+ encoded_pb = base64.urlsafe_b64decode(str(encoded))
+ self.__reference = entity_pb.Reference(encoded_pb)
+ assert self.__reference.IsInitialized()
+
+ except (AssertionError, TypeError), e:
+ raise datastore_errors.BadKeyError(
+ 'Invalid string key %s. Details: %s' % (encoded, e))
+ except Exception, e:
+ if e.__class__.__name__ == 'ProtocolBufferDecodeError':
+ raise datastore_errors.BadKeyError('Invalid string key %s.' % encoded)
+ else:
+ raise
+ else:
+ self.__reference = entity_pb.Reference()
+
+ def to_path(self):
+ """Construct the "path" of this key as a list.
+
+ Returns:
+ A list [kind_1, id_or_name_1, ..., kind_n, id_or_name_n] of the key path.
+
+ Raises:
+ datastore_errors.BadKeyError if this key does not have a valid path.
+ """
+ path = []
+ for path_element in self.__reference.path().element_list():
+ path.append(path_element.type().decode('utf-8'))
+ if path_element.has_name():
+ path.append(path_element.name().decode('utf-8'))
+ elif path_element.has_id():
+ path.append(path_element.id())
+ else:
+ raise datastore_errors.BadKeyError('Incomplete key found in to_path')
+ return path
+
+ @staticmethod
+ def from_path(*args, **kwds):
+ """Static method to construct a Key out of a "path" (kind, id or name, ...).
+
+ This is useful when an application wants to use just the id or name portion
+ of a key in e.g. a URL, where the rest of the URL provides enough context to
+ fill in the rest, i.e. the app id (always implicit), the entity kind, and
+ possibly an ancestor key. Since ids and names are usually small, they're
+ more attractive for use in end-user-visible URLs than the full string
+ representation of a key.
+
+ Args:
+ kind: the entity kind (a str or unicode instance)
+ id_or_name: the id (an int or long) or name (a str or unicode instance)
+
+ Additional positional arguments are allowed and should be
+ alternating kind and id/name.
+
+ Keyword args:
+ parent: optional parent Key; default None.
+
+ Returns:
+ A new Key instance whose .kind() and .id() or .name() methods return
+ the *last* kind and id or name positional arguments passed.
+
+ Raises:
+ BadArgumentError for invalid arguments.
+ BadKeyError if the parent key is incomplete.
+ """
+ parent = kwds.pop('parent', None)
+ _app_id_namespace_obj = ResolveAppIdNamespace(
+ kwds.pop('_app', None),
+ kwds.pop('_namespace', None),
+ kwds.pop('_app_id_namespace', None))
+
+ if kwds:
+ raise datastore_errors.BadArgumentError(
+ 'Excess keyword arguments ' + repr(kwds))
+
+ if not args or len(args) % 2:
+ raise datastore_errors.BadArgumentError(
+ 'A non-zero even number of positional arguments is required '
+ '(kind, id or name, kind, id or name, ...); received %s' % repr(args))
+
+ if parent is not None:
+ if not isinstance(parent, Key):
+ raise datastore_errors.BadArgumentError(
+ 'Expected None or a Key as parent; received %r (a %s).' %
+ (parent, typename(parent)))
+ if not parent.has_id_or_name():
+ raise datastore_errors.BadKeyError(
+ 'The parent Key is incomplete.')
+ if _app_id_namespace_obj != parent.app_id_namespace():
+ raise datastore_errors.BadArgumentError(
+ 'The app_id/namespace arguments (%r) should match ' +
+ 'parent.app_id_namespace().to_encoded() (%s)' %
+ (_app_id_namespace_obj, parent.app_id_namespace()))
+
+ key = Key()
+ ref = key.__reference
+ if parent is not None:
+ ref.CopyFrom(parent.__reference)
+ else:
+ ref.set_app(_app_id_namespace_obj.to_encoded())
+
+ path = ref.mutable_path()
+ for i in xrange(0, len(args), 2):
+ kind, id_or_name = args[i:i+2]
+ if isinstance(kind, basestring):
+ kind = kind.encode('utf-8')
+ else:
+ raise datastore_errors.BadArgumentError(
+ 'Expected a string kind as argument %d; received %r (a %s).' %
+ (i + 1, kind, typename(kind)))
+ elem = path.add_element()
+ elem.set_type(kind)
+ if isinstance(id_or_name, (int, long)):
+ elem.set_id(id_or_name)
+ elif isinstance(id_or_name, basestring):
+ ValidateString(id_or_name, 'name')
+ elem.set_name(id_or_name.encode('utf-8'))
+ else:
+ raise datastore_errors.BadArgumentError(
+ 'Expected an integer id or string name as argument %d; '
+ 'received %r (a %s).' % (i + 2, id_or_name, typename(id_or_name)))
+
+ assert ref.IsInitialized()
+ return key
+
+ def app(self):
+ """Returns this entity's app id, a string."""
+ if self.__reference.app():
+ return self.app_id_namespace().app_id().decode('utf-8')
+ else:
+ return None
+
+ def namespace(self):
+ """Returns this entity's app id, a string."""
+ if self.__reference.app():
+ return self.app_id_namespace().namespace().decode('utf-8')
+ else:
+ return None
+
+ def app_id_namespace(self):
+ """Returns this entity's app id/namespace, an appIdNamespace object."""
+ if self.__reference.app():
+ return parse_app_id_namespace(self.__reference.app())
+ else:
+ return None
+
+ def kind(self):
+ """Returns this entity's kind, as a string."""
+ if self.__reference.path().element_size() > 0:
+ encoded = self.__reference.path().element_list()[-1].type()
+ return unicode(encoded.decode('utf-8'))
+ else:
+ return None
+
+ def id(self):
+ """Returns this entity's id, or None if it doesn't have one."""
+ elems = self.__reference.path().element_list()
+ if elems and elems[-1].has_id() and elems[-1].id():
+ return elems[-1].id()
+ else:
+ return None
+
+ def name(self):
+ """Returns this entity's name, or None if it doesn't have one."""
+ elems = self.__reference.path().element_list()
+ if elems and elems[-1].has_name() and elems[-1].name():
+ return elems[-1].name().decode('utf-8')
+ else:
+ return None
+
+ def id_or_name(self):
+ """Returns this entity's id or name, whichever it has, or None."""
+ if self.id() is not None:
+ return self.id()
+ else:
+ return self.name()
+
+ def has_id_or_name(self):
+ """Returns True if this entity has an id or name, False otherwise.
+ """
+ return self.id_or_name() is not None
+
+ def parent(self):
+ """Returns this entity's parent, as a Key. If this entity has no parent,
+ returns None."""
+ if self.__reference.path().element_size() > 1:
+ parent = Key()
+ parent.__reference.CopyFrom(self.__reference)
+ parent.__reference.path().element_list().pop()
+ return parent
+ else:
+ return None
+
+ def ToTagUri(self):
+ """Returns a tag: URI for this entity for use in XML output.
+
+ Foreign keys for entities may be represented in XML output as tag URIs.
+ RFC 4151 describes the tag URI scheme. From http://taguri.org/:
+
+ The tag algorithm lets people mint - create - identifiers that no one
+ else using the same algorithm could ever mint. It is simple enough to do
+ in your head, and the resulting identifiers can be easy to read, write,
+ and remember. The identifiers conform to the URI (URL) Syntax.
+
+ Tag URIs for entities use the app's auth domain and the date that the URI
+ is generated. The namespace-specific part is <kind>[<key>].
+
+ For example, here is the tag URI for a Kitten with the key "Fluffy" in the
+ catsinsinks app:
+
+ tag:catsinsinks.googleapps.com,2006-08-29:Kitten[Fluffy]
+
+ Raises a BadKeyError if this entity's key is incomplete.
+ """
+ if not self.has_id_or_name():
+ raise datastore_errors.BadKeyError(
+ 'ToTagUri() called for an entity with an incomplete key.')
+
+ return u'tag:%s.%s,%s:%s[%s]' % (
+ saxutils.escape(self.app_id_namespace().to_encoded()),
+ os.environ['AUTH_DOMAIN'],
+ datetime.date.today().isoformat(),
+ saxutils.escape(self.kind()),
+ saxutils.escape(str(self)))
+
+ ToXml = ToTagUri
+
+ def entity_group(self):
+ """Returns this key's entity group as a Key.
+
+ Note that the returned Key will be incomplete if this Key is for a root
+ entity and it is incomplete.
+ """
+ group = Key._FromPb(self.__reference)
+ del group.__reference.path().element_list()[1:]
+ return group
+
+ @staticmethod
+ def _FromPb(pb):
+ """Static factory method. Creates a Key from an entity_pb.Reference.
+
+ Not intended to be used by application developers. Enforced by hiding the
+ entity_pb classes.
+
+ Args:
+ pb: entity_pb.Reference
+ """
+ if not isinstance(pb, entity_pb.Reference):
+ raise datastore_errors.BadArgumentError(
+ 'Key constructor takes an entity_pb.Reference; received %s (a %s).' %
+ (pb, typename(pb)))
+
+ key = Key()
+ key.__reference = entity_pb.Reference()
+ key.__reference.CopyFrom(pb)
+ return key
+
+ def _ToPb(self):
+ """Converts this Key to its protocol buffer representation.
+
+ Not intended to be used by application developers. Enforced by hiding the
+ entity_pb classes.
+
+ Returns:
+ # the Reference PB representation of this Key
+ entity_pb.Reference
+ """
+ pb = entity_pb.Reference()
+ pb.CopyFrom(self.__reference)
+ if not self.has_id_or_name():
+ pb.mutable_path().element_list()[-1].set_id(0)
+
+ pb.app().decode('utf-8')
+ for pathelem in pb.path().element_list():
+ pathelem.type().decode('utf-8')
+
+ return pb
+
+ def __str__(self):
+ """Encodes this Key as an opaque string.
+
+ Returns a string representation of this key, suitable for use in HTML,
+ URLs, and other similar use cases. If the entity's key is incomplete,
+ raises a BadKeyError.
+
+ Unfortunately, this string encoding isn't particularly compact, and its
+ length varies with the length of the path. If you want a shorter identifier
+ and you know the kind and parent (if any) ahead of time, consider using just
+ the entity's id or name.
+
+ Returns:
+ string
+ """
+ if (self.has_id_or_name()):
+ encoded = base64.urlsafe_b64encode(self.__reference.Encode())
+ return encoded.replace('=', '')
+ else:
+ raise datastore_errors.BadKeyError(
+ 'Cannot string encode an incomplete key!\n%s' % self.__reference)
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this key.
+
+ Returns a Python string of the form 'datastore_types.Key.from_path(...)'
+ that can be used to recreate this key.
+
+ Returns:
+ string
+ """
+ args = []
+ for elem in self.__reference.path().element_list():
+ args.append(repr(elem.type().decode('utf-8')))
+ if elem.has_name():
+ args.append(repr(elem.name().decode('utf-8')))
+ else:
+ args.append(repr(elem.id()))
+
+ args.append('_app_id_namespace=%r' % self.__reference.app().decode('utf-8'))
+ return u'datastore_types.Key.from_path(%s)' % ', '.join(args)
+
+ def __cmp__(self, other):
+ """Returns negative, zero, or positive when comparing two keys.
+
+ TODO(ryanb): for API v2, we should change this to make incomplete keys, ie
+ keys without an id or name, not equal to any other keys.
+
+ Args:
+ other: Key to compare to.
+
+ Returns:
+ Negative if self is less than "other"
+ Zero if "other" is equal to self
+ Positive if self is greater than "other"
+ """
+ if not isinstance(other, Key):
+ return -2
+
+ self_args = []
+ other_args = []
+
+ self_args.append(self.__reference.app())
+ other_args.append(other.__reference.app())
+
+ for elem in self.__reference.path().element_list():
+ self_args.append(elem.type())
+ if elem.has_name():
+ self_args.append(elem.name())
+ else:
+ self_args.append(elem.id())
+
+ for elem in other.__reference.path().element_list():
+ other_args.append(elem.type())
+ if elem.has_name():
+ other_args.append(elem.name())
+ else:
+ other_args.append(elem.id())
+
+ for self_component, other_component in zip(self_args, other_args):
+ comparison = cmp(self_component, other_component)
+ if comparison != 0:
+ return comparison
+
+ return cmp(len(self_args), len(other_args))
+
+ def __hash__(self):
+ """Returns a 32-bit integer hash of this key.
+
+ Implements Python's hash protocol so that Keys may be used in sets and as
+ dictionary keys.
+
+ Returns:
+ int
+ """
+ return hash(self.__str__())
+
+
+class Category(unicode):
+ """A tag, ie a descriptive word or phrase. Entities may be tagged by users,
+ and later returned by a queries for that tag. Tags can also be used for
+ ranking results (frequency), photo captions, clustering, activity, etc.
+
+ Here's a more in-depth description: http://www.zeldman.com/daily/0405d.shtml
+
+ This is the Atom "category" element. In XML output, the tag is provided as
+ the term attribute. See:
+ http://www.atomenabled.org/developers/syndication/#category
+
+ Raises BadValueError if tag is not a string or subtype.
+ """
+ TERM = 'user-tag'
+
+ def __init__(self, tag):
+ super(Category, self).__init__(self, tag)
+ ValidateString(tag, 'tag')
+
+ def ToXml(self):
+ return u'<category term="%s" label=%s />' % (Category.TERM,
+ saxutils.quoteattr(self))
+
+
+class Link(unicode):
+ """A fully qualified URL. Usually http: scheme, but may also be file:, ftp:,
+ news:, among others.
+
+ If you have email (mailto:) or instant messaging (aim:, xmpp:) links,
+ consider using the Email or IM classes instead.
+
+ This is the Atom "link" element. In XML output, the link is provided as the
+ href attribute. See:
+ http://www.atomenabled.org/developers/syndication/#link
+
+ Raises BadValueError if link is not a fully qualified, well-formed URL.
+ """
+ def __init__(self, link):
+ super(Link, self).__init__(self, link)
+ ValidateString(link, 'link', max_len=_MAX_LINK_PROPERTY_LENGTH)
+
+ scheme, domain, path, params, query, fragment = urlparse.urlparse(link)
+ if (not scheme or (scheme != 'file' and not domain) or
+ (scheme == 'file' and not path)):
+ raise datastore_errors.BadValueError('Invalid URL: %s' % link)
+
+ def ToXml(self):
+ return u'<link href=%s />' % saxutils.quoteattr(self)
+
+
+class Email(unicode):
+ """An RFC2822 email address. Makes no attempt at validation; apart from
+ checking MX records, email address validation is a rathole.
+
+ This is the gd:email element. In XML output, the email address is provided as
+ the address attribute. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdEmail
+
+ Raises BadValueError if email is not a valid email address.
+ """
+ def __init__(self, email):
+ super(Email, self).__init__(self, email)
+ ValidateString(email, 'email')
+
+ def ToXml(self):
+ return u'<gd:email address=%s />' % saxutils.quoteattr(self)
+
+
+class GeoPt(object):
+ """A geographical point, specified by floating-point latitude and longitude
+ coordinates. Often used to integrate with mapping sites like Google Maps.
+ May also be used as ICBM coordinates.
+
+ This is the georss:point element. In XML output, the coordinates are
+ provided as the lat and lon attributes. See: http://georss.org/
+
+ Serializes to '<lat>,<lon>'. Raises BadValueError if it's passed an invalid
+ serialized string, or if lat and lon are not valid floating points in the
+ ranges [-90, 90] and [-180, 180], respectively.
+ """
+ lat = None
+ lon = None
+
+ def __init__(self, lat, lon=None):
+ if lon is None:
+ try:
+ split = lat.split(',')
+ lat, lon = split
+ except (AttributeError, ValueError):
+ raise datastore_errors.BadValueError(
+ 'Expected a "lat,long" formatted string; received %s (a %s).' %
+ (lat, typename(lat)))
+
+ try:
+ lat = float(lat)
+ lon = float(lon)
+ if abs(lat) > 90:
+ raise datastore_errors.BadValueError(
+ 'Latitude must be between -90 and 90; received %f' % lat)
+ if abs(lon) > 180:
+ raise datastore_errors.BadValueError(
+ 'Longitude must be between -180 and 180; received %f' % lon)
+ except (TypeError, ValueError):
+ raise datastore_errors.BadValueError(
+ 'Expected floats for lat and long; received %s (a %s) and %s (a %s).' %
+ (lat, typename(lat), lon, typename(lon)))
+
+ self.lat = lat
+ self.lon = lon
+
+ def __cmp__(self, other):
+ if not isinstance(other, GeoPt):
+ try:
+ other = GeoPt(other)
+ except datastore_errors.BadValueError:
+ return NotImplemented
+
+ lat_cmp = cmp(self.lat, other.lat)
+ if lat_cmp != 0:
+ return lat_cmp
+ else:
+ return cmp(self.lon, other.lon)
+
+ def __hash__(self):
+ """Returns a 32-bit integer hash of this point.
+
+ Implements Python's hash protocol so that GeoPts may be used in sets and
+ as dictionary keys.
+
+ Returns:
+ int
+ """
+ return hash((self.lat, self.lon))
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this GeoPt.
+
+ The returned string is of the form 'datastore_types.GeoPt([lat], [lon])'.
+
+ Returns:
+ string
+ """
+ return 'datastore_types.GeoPt(%r, %r)' % (self.lat, self.lon)
+
+ def __unicode__(self):
+ return u'%s,%s' % (unicode(self.lat), unicode(self.lon))
+
+ __str__ = __unicode__
+
+ def ToXml(self):
+ return u'<georss:point>%s %s</georss:point>' % (unicode(self.lat),
+ unicode(self.lon))
+
+
+class IM(object):
+ """An instant messaging handle. Includes both an address and its protocol.
+ The protocol value is either a standard IM scheme or a URL identifying the
+ IM network for the protocol. Possible values include:
+
+ Value Description
+ sip SIP/SIMPLE
+ unknown Unknown or unspecified
+ xmpp XMPP/Jabber
+ http://aim.com/ AIM
+ http://icq.com/ ICQ
+ http://talk.google.com/ Google Talk
+ http://messenger.msn.com/ MSN Messenger
+ http://messenger.yahoo.com/ Yahoo Messenger
+ http://sametime.com/ Lotus Sametime
+ http://gadu-gadu.pl/ Gadu-Gadu
+
+ This is the gd:im element. In XML output, the address and protocol are
+ provided as the address and protocol attributes, respectively. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdIm
+
+ Serializes to '<protocol> <address>'. Raises BadValueError if tag is not a
+ standard IM scheme or a URL.
+ """
+ PROTOCOLS = [ 'sip', 'unknown', 'xmpp' ]
+
+ protocol = None
+ address = None
+
+ def __init__(self, protocol, address=None):
+ if address is None:
+ try:
+ split = protocol.split(' ')
+ protocol, address = split
+ except (AttributeError, ValueError):
+ raise datastore_errors.BadValueError(
+ 'Expected string of format "protocol address"; received %s' %
+ str(protocol))
+
+ ValidateString(address, 'address')
+ if protocol not in self.PROTOCOLS:
+ Link(protocol)
+
+ self.address = address
+ self.protocol = protocol
+
+ def __cmp__(self, other):
+ if not isinstance(other, IM):
+ try:
+ other = IM(other)
+ except datastore_errors.BadValueError:
+ return NotImplemented
+
+
+ return cmp((self.address, self.protocol),
+ (other.address, other.protocol))
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this IM.
+
+ The returned string is of the form:
+
+ datastore_types.IM('address', 'protocol')
+
+ Returns:
+ string
+ """
+ return 'datastore_types.IM(%r, %r)' % (self.protocol, self.address)
+
+ def __unicode__(self):
+ return u'%s %s' % (self.protocol, self.address)
+
+ __str__ = __unicode__
+
+ def ToXml(self):
+ return (u'<gd:im protocol=%s address=%s />' %
+ (saxutils.quoteattr(self.protocol),
+ saxutils.quoteattr(self.address)))
+
+ def __len__(self):
+ return len(unicode(self))
+
+
+class PhoneNumber(unicode):
+ """A human-readable phone number or address.
+
+ No validation is performed. Phone numbers have many different formats -
+ local, long distance, domestic, international, internal extension, TTY,
+ VOIP, SMS, and alternative networks like Skype, XFire and Roger Wilco. They
+ all have their own numbering and addressing formats.
+
+ This is the gd:phoneNumber element. In XML output, the phone number is
+ provided as the text of the element. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdPhoneNumber
+
+ Raises BadValueError if phone is not a string or subtype.
+ """
+ def __init__(self, phone):
+ super(PhoneNumber, self).__init__(self, phone)
+ ValidateString(phone, 'phone')
+
+ def ToXml(self):
+ return u'<gd:phoneNumber>%s</gd:phoneNumber>' % saxutils.escape(self)
+
+
+class PostalAddress(unicode):
+ """A human-readable mailing address. Again, mailing address formats vary
+ widely, so no validation is performed.
+
+ This is the gd:postalAddress element. In XML output, the address is provided
+ as the text of the element. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdPostalAddress
+
+ Raises BadValueError if address is not a string or subtype.
+ """
+ def __init__(self, address):
+ super(PostalAddress, self).__init__(self, address)
+ ValidateString(address, 'address')
+
+ def ToXml(self):
+ return u'<gd:postalAddress>%s</gd:postalAddress>' % saxutils.escape(self)
+
+
+class Rating(long):
+ """A user-provided integer rating for a piece of content. Normalized to a
+ 0-100 scale.
+
+ This is the gd:rating element. In XML output, the address is provided
+ as the text of the element. See:
+ http://code.google.com/apis/gdata/common-elements.html#gdRating
+
+ Serializes to the decimal string representation of the rating. Raises
+ BadValueError if the rating is not an integer in the range [0, 100].
+ """
+ MIN = 0
+ MAX = 100
+
+ def __init__(self, rating):
+ super(Rating, self).__init__(self, rating)
+ if isinstance(rating, float) or isinstance(rating, complex):
+ raise datastore_errors.BadValueError(
+ 'Expected int or long; received %s (a %s).' %
+ (rating, typename(rating)))
+
+ try:
+ if long(rating) < Rating.MIN or long(rating) > Rating.MAX:
+ raise datastore_errors.BadValueError()
+ except ValueError:
+ raise datastore_errors.BadValueError(
+ 'Expected int or long; received %s (a %s).' %
+ (rating, typename(rating)))
+
+ def ToXml(self):
+ return (u'<gd:rating value="%d" min="%d" max="%d" />' %
+ (self, Rating.MIN, Rating.MAX))
+
+
+class Text(unicode):
+ """A long string type.
+
+ Strings of any length can be stored in the datastore using this
+ type. It behaves identically to the Python unicode type, except for
+ the constructor, which only accepts str and unicode arguments.
+ """
+
+ def __new__(cls, arg=None, encoding=None):
+ """Constructor.
+
+ We only accept unicode and str instances, the latter with encoding.
+
+ Args:
+ arg: optional unicode or str instance; default u''
+ encoding: optional encoding; disallowed when isinstance(arg, unicode),
+ defaults to 'ascii' when isinstance(arg, str);
+ """
+ if arg is None:
+ arg = u''
+ if isinstance(arg, unicode):
+ if encoding is not None:
+ raise TypeError('Text() with a unicode argument '
+ 'should not specify an encoding')
+ return super(Text, cls).__new__(cls, arg)
+
+ if isinstance(arg, str):
+ if encoding is None:
+ encoding = 'ascii'
+ return super(Text, cls).__new__(cls, arg, encoding)
+
+ raise TypeError('Text() argument should be str or unicode, not %s' %
+ type(arg).__name__)
+
+class Blob(str):
+ """A blob type, appropriate for storing binary data of any length.
+
+ This behaves identically to the Python str type, except for the
+ constructor, which only accepts str arguments.
+ """
+
+ def __new__(cls, arg=None):
+ """Constructor.
+
+ We only accept str instances.
+
+ Args:
+ arg: optional str instance (default '')
+ """
+ if arg is None:
+ arg = ''
+ if isinstance(arg, str):
+ return super(Blob, cls).__new__(cls, arg)
+
+ raise TypeError('Blob() argument should be str instance, not %s' %
+ type(arg).__name__)
+
+ def ToXml(self):
+ """Output a blob as XML.
+
+ Returns:
+ Base64 encoded version of itself for safe insertion in to an XML document.
+ """
+ encoded = base64.urlsafe_b64encode(self)
+ return saxutils.escape(encoded)
+
+class ByteString(str):
+ """A byte-string type, appropriate for storing short amounts of indexed data.
+
+ This behaves identically to Blob, except it's used only for short, indexed
+ byte strings.
+ """
+
+ def __new__(cls, arg=None):
+ """Constructor.
+
+ We only accept str instances.
+
+ Args:
+ arg: optional str instance (default '')
+ """
+ if arg is None:
+ arg = ''
+ if isinstance(arg, str):
+ return super(ByteString, cls).__new__(cls, arg)
+
+ raise TypeError('ByteString() argument should be str instance, not %s' %
+ type(arg).__name__)
+
+ def ToXml(self):
+ """Output a ByteString as XML.
+
+ Returns:
+ Base64 encoded version of itself for safe insertion in to an XML document.
+ """
+ encoded = base64.urlsafe_b64encode(self)
+ return saxutils.escape(encoded)
+
+
+class BlobKey(object):
+ """Key used to identify a blob in Blobstore.
+
+ This object wraps a string that gets used internally by the Blobstore API
+ to identify application blobs. The BlobKey corresponds to the entity name
+ of the underlying BlobReference entity. The structure of the key is:
+
+ _<blob-key>
+
+ This class is exposed in the API in both google.appengine.ext.db and
+ google.appengine.ext.blobstore.
+ """
+
+ def __init__(self, blob_key):
+ """Constructor.
+
+ Used to convert a string to a BlobKey. Normally used internally by
+ Blobstore API.
+
+ Args:
+ blob_key: Key name of BlobReference that this key belongs to.
+ """
+ self.__blob_key = blob_key
+
+ def __str__(self):
+ """Convert to string."""
+ return self.__blob_key
+
+ def __repr__(self):
+ """Returns an eval()able string representation of this key.
+
+ Returns a Python string of the form 'datastore_types.BlobKey(...)'
+ that can be used to recreate this key.
+
+ Returns:
+ string
+ """
+ s = type(self).__module__
+ return '%s.%s(%r)' % (type(self).__module__,
+ type(self).__name__,
+ self.__blob_key)
+
+ def __cmp__(self, other):
+ if type(other) is type(self):
+ return cmp(str(self), str(other))
+ elif isinstance(other, basestring):
+ return cmp(self.__blob_key, other)
+ else:
+ return NotImplemented
+
+ def __hash__(self):
+ return hash(self.__blob_key)
+
+ def ToXml(self):
+ return str(self)
+
+
+_PROPERTY_MEANINGS = {
+
+
+
+ Blob: entity_pb.Property.BLOB,
+ ByteString: entity_pb.Property.BYTESTRING,
+ Text: entity_pb.Property.TEXT,
+ datetime.datetime: entity_pb.Property.GD_WHEN,
+ Category: entity_pb.Property.ATOM_CATEGORY,
+ Link: entity_pb.Property.ATOM_LINK,
+ Email: entity_pb.Property.GD_EMAIL,
+ GeoPt: entity_pb.Property.GEORSS_POINT,
+ IM: entity_pb.Property.GD_IM,
+ PhoneNumber: entity_pb.Property.GD_PHONENUMBER,
+ PostalAddress: entity_pb.Property.GD_POSTALADDRESS,
+ Rating: entity_pb.Property.GD_RATING,
+ BlobKey: entity_pb.Property.BLOBKEY,
+}
+
+_PROPERTY_TYPES = frozenset([
+ Blob,
+ ByteString,
+ bool,
+ Category,
+ datetime.datetime,
+ Email,
+ float,
+ GeoPt,
+ IM,
+ int,
+ Key,
+ Link,
+ long,
+ PhoneNumber,
+ PostalAddress,
+ Rating,
+ str,
+ Text,
+ type(None),
+ unicode,
+ users.User,
+ BlobKey,
+])
+
+_RAW_PROPERTY_TYPES = (Blob, Text)
+
+def ValidatePropertyInteger(name, value):
+ """Raises an exception if the supplied integer is invalid.
+
+ Args:
+ name: Name of the property this is for.
+ value: Integer value.
+
+ Raises:
+ OverflowError if the value does not fit within a signed int64.
+ """
+ if not (-0x8000000000000000 <= value <= 0x7fffffffffffffff):
+ raise OverflowError('%d is out of bounds for int64' % value)
+
+
+def ValidateStringLength(name, value, max_len):
+ """Raises an exception if the supplied string is too long.
+
+ Args:
+ name: Name of the property this is for.
+ value: String value.
+ max_len: Maximum length the string may be.
+
+ Raises:
+ OverflowError if the value is larger than the maximum length.
+ """
+ if len(value) > max_len:
+ raise datastore_errors.BadValueError(
+ 'Property %s is %d bytes long; it must be %d or less. '
+ 'Consider Text instead, which can store strings of any length.' %
+ (name, len(value), max_len))
+
+
+def ValidatePropertyString(name, value):
+ """Validates the length of an indexed string property.
+
+ Args:
+ name: Name of the property this is for.
+ value: String value.
+ """
+ ValidateStringLength(name, value, max_len=_MAX_STRING_LENGTH)
+
+
+def ValidatePropertyLink(name, value):
+ """Validates the length of an indexed Link property.
+
+ Args:
+ name: Name of the property this is for.
+ value: String value.
+ """
+ ValidateStringLength(name, value, max_len=_MAX_LINK_PROPERTY_LENGTH)
+
+
+def ValidatePropertyNothing(name, value):
+ """No-op validation function.
+
+ Args:
+ name: Name of the property this is for.
+ value: Not used.
+ """
+ pass
+
+
+def ValidatePropertyKey(name, value):
+ """Raises an exception if the supplied datastore.Key instance is invalid.
+
+ Args:
+ name: Name of the property this is for.
+ value: A datastore.Key instance.
+
+ Raises:
+ datastore_errors.BadValueError if the value is invalid.
+ """
+ if not value.has_id_or_name():
+ raise datastore_errors.BadValueError(
+ 'Incomplete key found for reference property %s.' % name)
+
+
+_VALIDATE_PROPERTY_VALUES = {
+ Blob: ValidatePropertyNothing,
+ ByteString: ValidatePropertyString,
+ bool: ValidatePropertyNothing,
+ Category: ValidatePropertyString,
+ datetime.datetime: ValidatePropertyNothing,
+ Email: ValidatePropertyString,
+ float: ValidatePropertyNothing,
+ GeoPt: ValidatePropertyNothing,
+ IM: ValidatePropertyString,
+ int: ValidatePropertyInteger,
+ Key: ValidatePropertyKey,
+ Link: ValidatePropertyLink,
+ long: ValidatePropertyInteger,
+ PhoneNumber: ValidatePropertyString,
+ PostalAddress: ValidatePropertyString,
+ Rating: ValidatePropertyInteger,
+ str: ValidatePropertyString,
+ Text: ValidatePropertyNothing,
+ type(None): ValidatePropertyNothing,
+ unicode: ValidatePropertyString,
+ users.User: ValidatePropertyNothing,
+ BlobKey: ValidatePropertyString,
+}
+
+assert set(_VALIDATE_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
+
+
+def ValidateProperty(name, values, read_only=False):
+ """Helper function for validating property values.
+
+ Args:
+ name: Name of the property this is for.
+ value: Value for the property as a Python native type.
+
+ Raises:
+ BadPropertyError if the property name is invalid. BadValueError if the
+ property did not validate correctly or the value was an empty list. Other
+ exception types (like OverflowError) if the property value does not meet
+ type-specific criteria.
+ """
+ ValidateString(name, 'property name', datastore_errors.BadPropertyError)
+
+ if not read_only and RESERVED_PROPERTY_NAME.match(name):
+ raise datastore_errors.BadPropertyError(
+ '%s is a reserved property name.' % name)
+
+ values_type = type(values)
+
+ if values_type is tuple:
+ raise datastore_errors.BadValueError(
+ 'May not use tuple property value; property %s is %s.' %
+ (name, repr(values)))
+
+ if values_type is list:
+ multiple = True
+ else:
+ multiple = False
+ values = [values]
+
+ if not values:
+ raise datastore_errors.BadValueError(
+ 'May not use the empty list as a property value; property %s is %s.' %
+ (name, repr(values)))
+
+ try:
+ for v in values:
+ prop_validator = _VALIDATE_PROPERTY_VALUES.get(v.__class__)
+ if prop_validator is None:
+ raise datastore_errors.BadValueError(
+ 'Unsupported type for property %s: %s' % (name, v.__class__))
+ prop_validator(name, v)
+
+ except (KeyError, ValueError, TypeError, IndexError, AttributeError), msg:
+ raise datastore_errors.BadValueError(
+ 'Error type checking values for property %s: %s' % (name, msg))
+
+
+ValidateReadProperty = ValidateProperty
+
+
+def PackBlob(name, value, pbvalue):
+ """Packs a Blob property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A Blob instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_stringvalue(value)
+
+
+def PackString(name, value, pbvalue):
+ """Packs a string-typed property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A string, unicode, or string-like value instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_stringvalue(unicode(value).encode('utf-8'))
+
+
+def PackDatetime(name, value, pbvalue):
+ """Packs a datetime-typed property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A datetime.datetime instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_int64value(DatetimeToTimestamp(value))
+
+
+def DatetimeToTimestamp(value):
+ """Converts a datetime.datetime to microseconds since the epoch, as a float.
+ Args:
+ value: datetime.datetime
+
+ Returns: value as a long
+ """
+ if value.tzinfo:
+ value = value.astimezone(UTC)
+ return long(calendar.timegm(value.timetuple()) * 1000000L) + value.microsecond
+
+
+def PackGeoPt(name, value, pbvalue):
+ """Packs a GeoPt property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A GeoPt instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.mutable_pointvalue().set_x(value.lat)
+ pbvalue.mutable_pointvalue().set_y(value.lon)
+
+
+def PackUser(name, value, pbvalue):
+ """Packs a User property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A users.User instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.mutable_uservalue().set_email(value.email().encode('utf-8'))
+ pbvalue.mutable_uservalue().set_auth_domain(
+ value.auth_domain().encode('utf-8'))
+ pbvalue.mutable_uservalue().set_gaiaid(0)
+
+ if value.user_id() is not None:
+ pbvalue.mutable_uservalue().set_obfuscated_gaiaid(
+ value.user_id().encode('utf-8'))
+
+
+def PackKey(name, value, pbvalue):
+ """Packs a reference property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A Key instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ ref = value._Key__reference
+ pbvalue.mutable_referencevalue().set_app(ref.app())
+ for elem in ref.path().element_list():
+ pbvalue.mutable_referencevalue().add_pathelement().CopyFrom(elem)
+
+
+def PackBool(name, value, pbvalue):
+ """Packs a boolean property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A boolean instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_booleanvalue(value)
+
+
+def PackInteger(name, value, pbvalue):
+ """Packs an integer property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: An int or long instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_int64value(value)
+
+
+def PackFloat(name, value, pbvalue):
+ """Packs a float property into a entity_pb.PropertyValue.
+
+ Args:
+ name: The name of the property as a string.
+ value: A float instance.
+ pbvalue: The entity_pb.PropertyValue to pack this value into.
+ """
+ pbvalue.set_doublevalue(value)
+
+
+_PACK_PROPERTY_VALUES = {
+ Blob: PackBlob,
+ ByteString: PackBlob,
+ bool: PackBool,
+ Category: PackString,
+ datetime.datetime: PackDatetime,
+ Email: PackString,
+ float: PackFloat,
+ GeoPt: PackGeoPt,
+ IM: PackString,
+ int: PackInteger,
+ Key: PackKey,
+ Link: PackString,
+ long: PackInteger,
+ PhoneNumber: PackString,
+ PostalAddress: PackString,
+ Rating: PackInteger,
+ str: PackString,
+ Text: PackString,
+ type(None): lambda name, value, pbvalue: None,
+ unicode: PackString,
+ users.User: PackUser,
+ BlobKey: PackString,
+}
+
+assert set(_PACK_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
+
+
+def ToPropertyPb(name, values):
+ """Creates type-specific entity_pb.PropertyValues.
+
+ Determines the type and meaning of the PropertyValue based on the Python
+ type of the input value(s).
+
+ NOTE: This function does not validate anything!
+
+ Args:
+ name: string or unicode; the property name
+ values: The values for this property, either a single one or a list of them.
+ All values must be a supported type. Lists of values must all be of the
+ same type.
+
+ Returns:
+ A list of entity_pb.PropertyValue instances.
+ """
+ encoded_name = name.encode('utf-8')
+
+ values_type = type(values)
+ if values_type is list:
+ multiple = True
+ else:
+ multiple = False
+ values = [values]
+
+ pbs = []
+ for v in values:
+ pb = entity_pb.Property()
+ pb.set_name(encoded_name)
+ pb.set_multiple(multiple)
+
+ meaning = _PROPERTY_MEANINGS.get(v.__class__)
+ if meaning is not None:
+ pb.set_meaning(meaning)
+
+ pack_prop = _PACK_PROPERTY_VALUES[v.__class__]
+ pbvalue = pack_prop(name, v, pb.mutable_value())
+ pbs.append(pb)
+
+ if multiple:
+ return pbs
+ else:
+ return pbs[0]
+
+
+def FromReferenceProperty(value):
+ """Converts a reference PropertyValue to a Key.
+
+ Args:
+ value: entity_pb.PropertyValue
+
+ Returns:
+ Key
+
+ Raises:
+ BadValueError if the value is not a PropertyValue.
+ """
+ assert isinstance(value, entity_pb.PropertyValue)
+ assert value.has_referencevalue()
+ ref = value.referencevalue()
+
+ key = Key()
+ key_ref = key._Key__reference
+ key_ref.set_app(ref.app())
+
+ for pathelem in ref.pathelement_list():
+ key_ref.mutable_path().add_element().CopyFrom(pathelem)
+
+ return key
+
+
+_EPOCH = datetime.datetime.utcfromtimestamp(0)
+
+_PROPERTY_CONVERSIONS = {
+ entity_pb.Property.GD_WHEN:
+
+
+ lambda val: _EPOCH + datetime.timedelta(microseconds=val),
+ entity_pb.Property.ATOM_CATEGORY: Category,
+ entity_pb.Property.ATOM_LINK: Link,
+ entity_pb.Property.GD_EMAIL: Email,
+ entity_pb.Property.GD_IM: IM,
+ entity_pb.Property.GD_PHONENUMBER: PhoneNumber,
+ entity_pb.Property.GD_POSTALADDRESS: PostalAddress,
+ entity_pb.Property.GD_RATING: Rating,
+ entity_pb.Property.BLOB: Blob,
+ entity_pb.Property.BYTESTRING: ByteString,
+ entity_pb.Property.TEXT: Text,
+ entity_pb.Property.BLOBKEY: BlobKey,
+}
+
+
+def FromPropertyPb(pb):
+ """Converts a property PB to a python value.
+
+ Args:
+ pb: entity_pb.Property
+
+ Returns:
+ # return type is determined by the type of the argument
+ string, int, bool, double, users.User, or one of the atom or gd types
+ """
+ pbval = pb.value()
+ meaning = pb.meaning()
+
+ if pbval.has_stringvalue():
+ value = pbval.stringvalue()
+ if meaning not in (entity_pb.Property.BLOB, entity_pb.Property.BYTESTRING):
+ value = unicode(value.decode('utf-8'))
+ elif pbval.has_int64value():
+ value = long(pbval.int64value())
+ elif pbval.has_booleanvalue():
+ value = bool(pbval.booleanvalue())
+ elif pbval.has_doublevalue():
+ value = pbval.doublevalue()
+ elif pbval.has_referencevalue():
+ value = FromReferenceProperty(pbval)
+ elif pbval.has_pointvalue():
+ value = GeoPt(pbval.pointvalue().x(), pbval.pointvalue().y())
+ elif pbval.has_uservalue():
+ email = unicode(pbval.uservalue().email().decode('utf-8'))
+ auth_domain = unicode(pbval.uservalue().auth_domain().decode('utf-8'))
+ obfuscated_gaiaid = pbval.uservalue().obfuscated_gaiaid().decode('utf-8')
+ obfuscated_gaiaid = unicode(obfuscated_gaiaid)
+ value = users.User(email=email,
+ _auth_domain=auth_domain,
+ _user_id=obfuscated_gaiaid)
+ else:
+ value = None
+
+ try:
+ if pb.has_meaning() and pb.meaning() in _PROPERTY_CONVERSIONS:
+ conversion = _PROPERTY_CONVERSIONS[meaning]
+ value = conversion(value)
+ except (KeyError, ValueError, IndexError, TypeError, AttributeError), msg:
+ raise datastore_errors.BadValueError(
+ 'Error converting pb: %s\nException was: %s' % (pb, msg))
+
+ return value
+
+
+def PropertyTypeName(value):
+ """Returns the name of the type of the given property value, as a string.
+
+ Raises BadValueError if the value is not a valid property type.
+
+ Args:
+ value: any valid property value
+
+ Returns:
+ string
+ """
+ if value.__class__ in _PROPERTY_MEANINGS:
+ meaning = _PROPERTY_MEANINGS[value.__class__]
+ name = entity_pb.Property._Meaning_NAMES[meaning]
+ return name.lower().replace('_', ':')
+ elif isinstance(value, basestring):
+ return 'string'
+ elif isinstance(value, users.User):
+ return 'user'
+ elif isinstance(value, long):
+ return 'int'
+ elif value is None:
+ return 'null'
+ else:
+ return typename(value).lower()
+
+_PROPERTY_TYPE_STRINGS = {
+ 'string': unicode,
+ 'bool': bool,
+ 'int': long,
+ 'null': type(None),
+ 'float': float,
+ 'key': Key,
+ 'blob': Blob,
+ 'bytestring': ByteString,
+ 'text': Text,
+ 'user': users.User,
+ 'atom:category': Category,
+ 'atom:link': Link,
+ 'gd:email': Email,
+ 'gd:when': datetime.datetime,
+ 'georss:point': GeoPt,
+ 'gd:im': IM,
+ 'gd:phonenumber': PhoneNumber,
+ 'gd:postaladdress': PostalAddress,
+ 'gd:rating': Rating,
+ 'blobkey': BlobKey,
+ }
+
+
+def FromPropertyTypeName(type_name):
+ """Returns the python type given a type name.
+
+ Args:
+ type_name: A string representation of a datastore type name.
+
+ Returns:
+ A python type.
+ """
+ return _PROPERTY_TYPE_STRINGS[type_name]
+
+
+def PropertyValueFromString(type_,
+ value_string,
+ _auth_domain=None):
+ """Returns an instance of a property value given a type and string value.
+
+ The reverse of this method is just str() and type() of the python value.
+
+ Note that this does *not* support non-UTC offsets in ISO 8601-formatted
+ datetime strings, e.g. the -08:00 suffix in '2002-12-25 00:00:00-08:00'.
+ It only supports -00:00 and +00:00 suffixes, which are UTC.
+
+ Args:
+ type_: A python class.
+ value_string: A string representation of the value of the property.
+
+ Returns:
+ An instance of 'type'.
+
+ Raises:
+ ValueError if type_ is datetime and value_string has a timezone offset.
+ """
+ if type_ == datetime.datetime:
+ value_string = value_string.strip()
+ if value_string[-6] in ('+', '-'):
+ if value_string[-5:] == '00:00':
+ value_string = value_string[:-6]
+ else:
+ raise ValueError('Non-UTC offsets in datetimes are not supported.')
+
+ split = value_string.split('.')
+ iso_date = split[0]
+ microseconds = 0
+ if len(split) > 1:
+ microseconds = int(split[1])
+
+ time_struct = time.strptime(iso_date, '%Y-%m-%d %H:%M:%S')[0:6]
+ value = datetime.datetime(*(time_struct + (microseconds,)))
+ return value
+ elif type_ == Rating:
+ return Rating(int(value_string))
+ elif type_ == bool:
+ return value_string == 'True'
+ elif type_ == users.User:
+ return users.User(value_string, _auth_domain)
+ elif type_ == type(None):
+ return None
+ return type_(value_string)
diff --git a/google_appengine/google/appengine/api/datastore_types.pyc b/google_appengine/google/appengine/api/datastore_types.pyc
new file mode 100644
index 0000000..5c19ce2
--- /dev/null
+++ b/google_appengine/google/appengine/api/datastore_types.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/__init__.py b/google_appengine/google/appengine/api/images/__init__.py
new file mode 100755
index 0000000..757afcf
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/__init__.py
@@ -0,0 +1,827 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Image manipulation API.
+
+Classes defined in this module:
+ Image: class used to encapsulate image information and transformations for
+ that image.
+
+ The current manipulations that are available are resize, rotate,
+ horizontal_flip, vertical_flip, crop and im_feeling_lucky.
+
+ It should be noted that each transform can only be called once per image
+ per execute_transforms() call.
+"""
+
+
+
+import struct
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api.images import images_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+JPEG = images_service_pb.OutputSettings.JPEG
+PNG = images_service_pb.OutputSettings.PNG
+
+OUTPUT_ENCODING_TYPES = frozenset([JPEG, PNG])
+
+TOP_LEFT = images_service_pb.CompositeImageOptions.TOP_LEFT
+TOP_CENTER = images_service_pb.CompositeImageOptions.TOP
+TOP_RIGHT = images_service_pb.CompositeImageOptions.TOP_RIGHT
+CENTER_LEFT = images_service_pb.CompositeImageOptions.LEFT
+CENTER_CENTER = images_service_pb.CompositeImageOptions.CENTER
+CENTER_RIGHT = images_service_pb.CompositeImageOptions.RIGHT
+BOTTOM_LEFT = images_service_pb.CompositeImageOptions.BOTTOM_LEFT
+BOTTOM_CENTER = images_service_pb.CompositeImageOptions.BOTTOM
+BOTTOM_RIGHT = images_service_pb.CompositeImageOptions.BOTTOM_RIGHT
+
+ANCHOR_TYPES = frozenset([TOP_LEFT, TOP_CENTER, TOP_RIGHT, CENTER_LEFT,
+ CENTER_CENTER, CENTER_RIGHT, BOTTOM_LEFT,
+ BOTTOM_CENTER, BOTTOM_RIGHT])
+
+MAX_TRANSFORMS_PER_REQUEST = 10
+
+MAX_COMPOSITES_PER_REQUEST = 16
+
+
+class Error(Exception):
+ """Base error class for this module."""
+
+
+class TransformationError(Error):
+ """Error while attempting to transform the image."""
+
+
+class BadRequestError(Error):
+ """The parameters given had something wrong with them."""
+
+
+class NotImageError(Error):
+ """The image data given is not recognizable as an image."""
+
+
+class BadImageError(Error):
+ """The image data given is corrupt."""
+
+
+class LargeImageError(Error):
+ """The image data given is too large to process."""
+
+
+class Image(object):
+ """Image object to manipulate."""
+
+ def __init__(self, image_data):
+ """Constructor.
+
+ Args:
+ image_data: str, image data in string form.
+
+ Raises:
+ NotImageError if the given data is empty.
+ """
+ if not image_data:
+ raise NotImageError("Empty image data.")
+
+ self._image_data = image_data
+ self._transforms = []
+ self._width = None
+ self._height = None
+
+ def _check_transform_limits(self):
+ """Ensure some simple limits on the number of transforms allowed.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested for this image
+ """
+ if len(self._transforms) >= MAX_TRANSFORMS_PER_REQUEST:
+ raise BadRequestError("%d transforms have already been requested on this "
+ "image." % MAX_TRANSFORMS_PER_REQUEST)
+
+ def _update_dimensions(self):
+ """Updates the width and height fields of the image.
+
+ Raises:
+ NotImageError if the image data is not an image.
+ BadImageError if the image data is corrupt.
+ """
+ size = len(self._image_data)
+ if size >= 6 and self._image_data.startswith("GIF"):
+ self._update_gif_dimensions()
+ elif size >= 8 and self._image_data.startswith("\x89PNG\x0D\x0A\x1A\x0A"):
+ self._update_png_dimensions()
+ elif size >= 2 and self._image_data.startswith("\xff\xD8"):
+ self._update_jpeg_dimensions()
+ elif (size >= 8 and (self._image_data.startswith("II\x2a\x00") or
+ self._image_data.startswith("MM\x00\x2a"))):
+ self._update_tiff_dimensions()
+ elif size >= 2 and self._image_data.startswith("BM"):
+ self._update_bmp_dimensions()
+ elif size >= 4 and self._image_data.startswith("\x00\x00\x01\x00"):
+ self._update_ico_dimensions()
+ else:
+ raise NotImageError("Unrecognized image format")
+
+ def _update_gif_dimensions(self):
+ """Updates the width and height fields of the gif image.
+
+ Raises:
+ BadImageError if the image string is not a valid gif image.
+ """
+ size = len(self._image_data)
+ if size >= 10:
+ self._width, self._height = struct.unpack("<HH", self._image_data[6:10])
+ else:
+ raise BadImageError("Corrupt GIF format")
+
+ def _update_png_dimensions(self):
+ """Updates the width and height fields of the png image.
+
+ Raises:
+ BadImageError if the image string is not a valid png image.
+ """
+ size = len(self._image_data)
+ if size >= 24 and self._image_data[12:16] == "IHDR":
+ self._width, self._height = struct.unpack(">II", self._image_data[16:24])
+ else:
+ raise BadImageError("Corrupt PNG format")
+
+ def _update_jpeg_dimensions(self):
+ """Updates the width and height fields of the jpeg image.
+
+ Raises:
+ BadImageError if the image string is not a valid jpeg image.
+ """
+ size = len(self._image_data)
+ offset = 2
+ while offset < size:
+ while offset < size and ord(self._image_data[offset]) != 0xFF:
+ offset += 1
+ while offset < size and ord(self._image_data[offset]) == 0xFF:
+ offset += 1
+ if (offset < size and ord(self._image_data[offset]) & 0xF0 == 0xC0 and
+ ord(self._image_data[offset]) != 0xC4):
+ offset += 4
+ if offset + 4 <= size:
+ self._height, self._width = struct.unpack(
+ ">HH",
+ self._image_data[offset:offset + 4])
+ break
+ else:
+ raise BadImageError("Corrupt JPEG format")
+ elif offset + 3 <= size:
+ offset += 1
+ offset += struct.unpack(">H", self._image_data[offset:offset + 2])[0]
+ else:
+ raise BadImageError("Corrupt JPEG format")
+ if self._height is None or self._width is None:
+ raise BadImageError("Corrupt JPEG format")
+
+ def _update_tiff_dimensions(self):
+ """Updates the width and height fields of the tiff image.
+
+ Raises:
+ BadImageError if the image string is not a valid tiff image.
+ """
+ size = len(self._image_data)
+ if self._image_data.startswith("II"):
+ endianness = "<"
+ else:
+ endianness = ">"
+ ifd_offset = struct.unpack(endianness + "I", self._image_data[4:8])[0]
+ if ifd_offset + 14 <= size:
+ ifd_size = struct.unpack(
+ endianness + "H",
+ self._image_data[ifd_offset:ifd_offset + 2])[0]
+ ifd_offset += 2
+ for unused_i in range(0, ifd_size):
+ if ifd_offset + 12 <= size:
+ tag = struct.unpack(
+ endianness + "H",
+ self._image_data[ifd_offset:ifd_offset + 2])[0]
+ if tag == 0x100 or tag == 0x101:
+ value_type = struct.unpack(
+ endianness + "H",
+ self._image_data[ifd_offset + 2:ifd_offset + 4])[0]
+ if value_type == 3:
+ format = endianness + "H"
+ end_offset = ifd_offset + 10
+ elif value_type == 4:
+ format = endianness + "I"
+ end_offset = ifd_offset + 12
+ else:
+ format = endianness + "B"
+ end_offset = ifd_offset + 9
+ if tag == 0x100:
+ self._width = struct.unpack(
+ format,
+ self._image_data[ifd_offset + 8:end_offset])[0]
+ if self._height is not None:
+ break
+ else:
+ self._height = struct.unpack(
+ format,
+ self._image_data[ifd_offset + 8:end_offset])[0]
+ if self._width is not None:
+ break
+ ifd_offset += 12
+ else:
+ raise BadImageError("Corrupt TIFF format")
+ if self._width is None or self._height is None:
+ raise BadImageError("Corrupt TIFF format")
+
+ def _update_bmp_dimensions(self):
+ """Updates the width and height fields of the bmp image.
+
+ Raises:
+ BadImageError if the image string is not a valid bmp image.
+ """
+ size = len(self._image_data)
+ if size >= 18:
+ header_length = struct.unpack("<I", self._image_data[14:18])[0]
+ if ((header_length == 40 or header_length == 108 or
+ header_length == 124 or header_length == 64) and size >= 26):
+ self._width, self._height = struct.unpack("<II",
+ self._image_data[18:26])
+ elif header_length == 12 and size >= 22:
+ self._width, self._height = struct.unpack("<HH",
+ self._image_data[18:22])
+ else:
+ raise BadImageError("Corrupt BMP format")
+ else:
+ raise BadImageError("Corrupt BMP format")
+
+ def _update_ico_dimensions(self):
+ """Updates the width and height fields of the ico image.
+
+ Raises:
+ BadImageError if the image string is not a valid ico image.
+ """
+ size = len(self._image_data)
+ if size >= 8:
+ self._width, self._height = struct.unpack("<BB", self._image_data[6:8])
+ if not self._width:
+ self._width = 256
+ if not self._height:
+ self._height = 256
+ else:
+ raise BadImageError("Corrupt ICO format")
+
+ def resize(self, width=0, height=0):
+ """Resize the image maintaining the aspect ratio.
+
+ If both width and height are specified, the more restricting of the two
+ values will be used when resizing the photo. The maximum dimension allowed
+ for both width and height is 4000 pixels.
+
+ Args:
+ width: int, width (in pixels) to change the image width to.
+ height: int, height (in pixels) to change the image height to.
+
+ Raises:
+ TypeError when width or height is not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given height or
+ width or if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested on this image.
+ """
+ if (not isinstance(width, (int, long)) or
+ not isinstance(height, (int, long))):
+ raise TypeError("Width and height must be integers.")
+ if width < 0 or height < 0:
+ raise BadRequestError("Width and height must be >= 0.")
+
+ if not width and not height:
+ raise BadRequestError("At least one of width or height must be > 0.")
+
+ if width > 4000 or height > 4000:
+ raise BadRequestError("Both width and height must be <= 4000.")
+
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_width(width)
+ transform.set_height(height)
+
+ self._transforms.append(transform)
+
+ def rotate(self, degrees):
+ """Rotate an image a given number of degrees clockwise.
+
+ Args:
+ degrees: int, must be a multiple of 90.
+
+ Raises:
+ TypeError when degrees is not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given degrees or
+ if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested.
+ """
+ if not isinstance(degrees, (int, long)):
+ raise TypeError("Degrees must be integers.")
+
+ if degrees % 90 != 0:
+ raise BadRequestError("degrees argument must be multiple of 90.")
+
+ degrees = degrees % 360
+
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_rotate(degrees)
+
+ self._transforms.append(transform)
+
+ def horizontal_flip(self):
+ """Flip the image horizontally.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested on the image.
+ """
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_horizontal_flip(True)
+
+ self._transforms.append(transform)
+
+ def vertical_flip(self):
+ """Flip the image vertically.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
+ requested on the image.
+ """
+ self._check_transform_limits()
+ transform = images_service_pb.Transform()
+ transform.set_vertical_flip(True)
+
+ self._transforms.append(transform)
+
+ def _validate_crop_arg(self, val, val_name):
+ """Validate the given value of a Crop() method argument.
+
+ Args:
+ val: float, value of the argument.
+ val_name: str, name of the argument.
+
+ Raises:
+ TypeError if the args are not of type 'float'.
+ BadRequestError when there is something wrong with the given bounding box.
+ """
+ if type(val) != float:
+ raise TypeError("arg '%s' must be of type 'float'." % val_name)
+
+ if not (0 <= val <= 1.0):
+ raise BadRequestError("arg '%s' must be between 0.0 and 1.0 "
+ "(inclusive)" % val_name)
+
+ def crop(self, left_x, top_y, right_x, bottom_y):
+ """Crop the image.
+
+ The four arguments are the scaling numbers to describe the bounding box
+ which will crop the image. The upper left point of the bounding box will
+ be at (left_x*image_width, top_y*image_height) the lower right point will
+ be at (right_x*image_width, bottom_y*image_height).
+
+ Args:
+ left_x: float value between 0.0 and 1.0 (inclusive).
+ top_y: float value between 0.0 and 1.0 (inclusive).
+ right_x: float value between 0.0 and 1.0 (inclusive).
+ bottom_y: float value between 0.0 and 1.0 (inclusive).
+
+ Raises:
+ TypeError if the args are not of type 'float'.
+ BadRequestError when there is something wrong with the given bounding box
+ or if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested
+ for this image.
+ """
+ self._validate_crop_arg(left_x, "left_x")
+ self._validate_crop_arg(top_y, "top_y")
+ self._validate_crop_arg(right_x, "right_x")
+ self._validate_crop_arg(bottom_y, "bottom_y")
+
+ if left_x >= right_x:
+ raise BadRequestError("left_x must be less than right_x")
+ if top_y >= bottom_y:
+ raise BadRequestError("top_y must be less than bottom_y")
+
+ self._check_transform_limits()
+
+ transform = images_service_pb.Transform()
+ transform.set_crop_left_x(left_x)
+ transform.set_crop_top_y(top_y)
+ transform.set_crop_right_x(right_x)
+ transform.set_crop_bottom_y(bottom_y)
+
+ self._transforms.append(transform)
+
+ def im_feeling_lucky(self):
+ """Automatically adjust image contrast and color levels.
+
+ This is similar to the "I'm Feeling Lucky" button in Picasa.
+
+ Raises:
+ BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already
+ been requested for this image.
+ """
+ self._check_transform_limits()
+ transform = images_service_pb.Transform()
+ transform.set_autolevels(True)
+
+ self._transforms.append(transform)
+
+ def execute_transforms(self, output_encoding=PNG):
+ """Perform transformations on given image.
+
+ Args:
+ output_encoding: A value from OUTPUT_ENCODING_TYPES.
+
+ Returns:
+ str, image data after the transformations have been performed on it.
+
+ Raises:
+ BadRequestError when there is something wrong with the request
+ specifications.
+ NotImageError when the image data given is not an image.
+ BadImageError when the image data given is corrupt.
+ LargeImageError when the image data given is too large to process.
+ TransformtionError when something errors during image manipulation.
+ Error when something unknown, but bad, happens.
+ """
+ if output_encoding not in OUTPUT_ENCODING_TYPES:
+ raise BadRequestError("Output encoding type not in recognized set "
+ "%s" % OUTPUT_ENCODING_TYPES)
+
+ if not self._transforms:
+ raise BadRequestError("Must specify at least one transformation.")
+
+ request = images_service_pb.ImagesTransformRequest()
+ response = images_service_pb.ImagesTransformResponse()
+
+ request.mutable_image().set_content(self._image_data)
+
+ for transform in self._transforms:
+ request.add_transform().CopyFrom(transform)
+
+ request.mutable_output().set_mime_type(output_encoding)
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("images",
+ "Transform",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA):
+ raise BadRequestError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.NOT_IMAGE):
+ raise NotImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
+ raise BadImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
+ raise LargeImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
+ raise TransformationError()
+ else:
+ raise Error()
+
+ self._image_data = response.image().content()
+ self._transforms = []
+ self._width = None
+ self._height = None
+ return self._image_data
+
+ @property
+ def width(self):
+ """Gets the width of the image."""
+ if self._width is None:
+ self._update_dimensions()
+ return self._width
+
+ @property
+ def height(self):
+ """Gets the height of the image."""
+ if self._height is None:
+ self._update_dimensions()
+ return self._height
+
+ def histogram(self):
+ """Calculates the histogram of the image.
+
+ Returns: 3 256-element lists containing the number of occurences of each
+ value of each color in the order RGB. As described at
+ http://en.wikipedia.org/wiki/Color_histogram for N = 256. i.e. the first
+ value of the first list contains the number of pixels with a red value of
+ 0, the second the number with a red value of 1.
+
+ Raises:
+ NotImageError when the image data given is not an image.
+ BadImageError when the image data given is corrupt.
+ LargeImageError when the image data given is too large to process.
+ Error when something unknown, but bad, happens.
+ """
+ request = images_service_pb.ImagesHistogramRequest()
+ response = images_service_pb.ImagesHistogramResponse()
+
+ request.mutable_image().set_content(self._image_data)
+ try:
+ apiproxy_stub_map.MakeSyncCall("images",
+ "Histogram",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ images_service_pb.ImagesServiceError.NOT_IMAGE):
+ raise NotImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
+ raise BadImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
+ raise LargeImageError()
+ else:
+ raise Error()
+ histogram = response.histogram()
+ return [histogram.red_list(),
+ histogram.green_list(),
+ histogram.blue_list()]
+
+
+def resize(image_data, width=0, height=0, output_encoding=PNG):
+ """Resize a given image file maintaining the aspect ratio.
+
+ If both width and height are specified, the more restricting of the two
+ values will be used when resizing the photo. The maximum dimension allowed
+ for both width and height is 4000 pixels.
+
+ Args:
+ image_data: str, source image data.
+ width: int, width (in pixels) to change the image width to.
+ height: int, height (in pixels) to change the image height to.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ TypeError when width or height not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given height or
+ width.
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.resize(width, height)
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def rotate(image_data, degrees, output_encoding=PNG):
+ """Rotate a given image a given number of degrees clockwise.
+
+ Args:
+ image_data: str, source image data.
+ degrees: value from ROTATE_DEGREE_VALUES.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ TypeError when degrees is not either 'int' or 'long' types.
+ BadRequestError when there is something wrong with the given degrees.
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.rotate(degrees)
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def horizontal_flip(image_data, output_encoding=PNG):
+ """Flip the image horizontally.
+
+ Args:
+ image_data: str, source image data.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.horizontal_flip()
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def vertical_flip(image_data, output_encoding=PNG):
+ """Flip the image vertically.
+
+ Args:
+ image_data: str, source image data.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.vertical_flip()
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def crop(image_data, left_x, top_y, right_x, bottom_y, output_encoding=PNG):
+ """Crop the given image.
+
+ The four arguments are the scaling numbers to describe the bounding box
+ which will crop the image. The upper left point of the bounding box will
+ be at (left_x*image_width, top_y*image_height) the lower right point will
+ be at (right_x*image_width, bottom_y*image_height).
+
+ Args:
+ image_data: str, source image data.
+ left_x: float value between 0.0 and 1.0 (inclusive).
+ top_y: float value between 0.0 and 1.0 (inclusive).
+ right_x: float value between 0.0 and 1.0 (inclusive).
+ bottom_y: float value between 0.0 and 1.0 (inclusive).
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ TypeError if the args are not of type 'float'.
+ BadRequestError when there is something wrong with the given bounding box.
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.crop(left_x, top_y, right_x, bottom_y)
+ return image.execute_transforms(output_encoding=output_encoding)
+
+
+def im_feeling_lucky(image_data, output_encoding=PNG):
+ """Automatically adjust image levels.
+
+ This is similar to the "I'm Feeling Lucky" button in Picasa.
+
+ Args:
+ image_data: str, source image data.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Raises:
+ Error when something went wrong with the call. See Image.ExecuteTransforms
+ for more details.
+ """
+ image = Image(image_data)
+ image.im_feeling_lucky()
+ return image.execute_transforms(output_encoding=output_encoding)
+
+def composite(inputs, width, height, color=0, output_encoding=PNG):
+ """Composite one or more images onto a canvas.
+
+ Args:
+ inputs: a list of tuples (image_data, x_offset, y_offset, opacity, anchor)
+ where
+ image_data: str, source image data.
+ x_offset: x offset in pixels from the anchor position
+ y_offset: y offset in piyels from the anchor position
+ opacity: opacity of the image specified as a float in range [0.0, 1.0]
+ anchor: anchoring point from ANCHOR_POINTS. The anchor point of the image
+ is aligned with the same anchor point of the canvas. e.g. TOP_RIGHT would
+ place the top right corner of the image at the top right corner of the
+ canvas then apply the x and y offsets.
+ width: canvas width in pixels.
+ height: canvas height in pixels.
+ color: canvas background color encoded as a 32 bit unsigned int where each
+ color channel is represented by one byte in order ARGB.
+ output_encoding: a value from OUTPUT_ENCODING_TYPES.
+
+ Returns:
+ str, image data of the composited image.
+
+ Raises:
+ TypeError If width, height, color, x_offset or y_offset are not of type
+ int or long or if opacity is not a float
+ BadRequestError If more than MAX_TRANSFORMS_PER_REQUEST compositions have
+ been requested, if the canvas width or height is greater than 4000 or less
+ than or equal to 0, if the color is invalid or if for any composition
+ option, the opacity is outside the range [0,1] or the anchor is invalid.
+ """
+ if (not isinstance(width, (int, long)) or
+ not isinstance(height, (int, long)) or
+ not isinstance(color, (int, long))):
+ raise TypeError("Width, height and color must be integers.")
+ if output_encoding not in OUTPUT_ENCODING_TYPES:
+ raise BadRequestError("Output encoding type '%s' not in recognized set "
+ "%s" % (output_encoding, OUTPUT_ENCODING_TYPES))
+
+ if not inputs:
+ raise BadRequestError("Must provide at least one input")
+ if len(inputs) > MAX_COMPOSITES_PER_REQUEST:
+ raise BadRequestError("A maximum of %d composition operations can be"
+ "performed in a single request" %
+ MAX_COMPOSITES_PER_REQUEST)
+
+ if width <= 0 or height <= 0:
+ raise BadRequestError("Width and height must be > 0.")
+ if width > 4000 or height > 4000:
+ raise BadRequestError("Width and height must be <= 4000.")
+
+ if color > 0xffffffff or color < 0:
+ raise BadRequestError("Invalid color")
+ if color >= 0x80000000:
+ color -= 0x100000000
+
+ image_map = {}
+
+ request = images_service_pb.ImagesCompositeRequest()
+ response = images_service_pb.ImagesTransformResponse()
+ for (image, x, y, opacity, anchor) in inputs:
+ if not image:
+ raise BadRequestError("Each input must include an image")
+ if (not isinstance(x, (int, long)) or
+ not isinstance(y, (int, long)) or
+ not isinstance(opacity, (float))):
+ raise TypeError("x_offset, y_offset must be integers and opacity must"
+ "be a float")
+ if x > 4000 or x < -4000:
+ raise BadRequestError("xOffsets must be in range [-4000, 4000]")
+ if y > 4000 or y < -4000:
+ raise BadRequestError("yOffsets must be in range [-4000, 4000]")
+ if opacity < 0 or opacity > 1:
+ raise BadRequestError("Opacity must be in the range 0.0 to 1.0")
+ if anchor not in ANCHOR_TYPES:
+ raise BadRequestError("Anchor type '%s' not in recognized set %s" %
+ (anchor, ANCHOR_TYPES))
+ if image not in image_map:
+ image_map[image] = request.image_size()
+ request.add_image().set_content(image)
+
+ option = request.add_options()
+ option.set_x_offset(x)
+ option.set_y_offset(y)
+ option.set_opacity(opacity)
+ option.set_anchor(anchor)
+ option.set_source_index(image_map[image])
+
+ request.mutable_canvas().mutable_output().set_mime_type(output_encoding)
+ request.mutable_canvas().set_width(width)
+ request.mutable_canvas().set_height(height)
+ request.mutable_canvas().set_color(color)
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("images",
+ "Composite",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA):
+ raise BadRequestError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.NOT_IMAGE):
+ raise NotImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
+ raise BadImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
+ raise LargeImageError()
+ elif (e.application_error ==
+ images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
+ raise TransformationError()
+ else:
+ raise Error()
+
+ return response.image().content()
+
+
+def histogram(image_data):
+ """Calculates the histogram of the given image.
+
+ Args:
+ image_data: str, source image data.
+ Returns: 3 256-element lists containing the number of occurences of each
+ value of each color in the order RGB.
+
+ Raises:
+ NotImageError when the image data given is not an image.
+ BadImageError when the image data given is corrupt.
+ LargeImageError when the image data given is too large to process.
+ Error when something unknown, but bad, happens.
+ """
+ image = Image(image_data)
+ return image.histogram()
diff --git a/google_appengine/google/appengine/api/images/__init__.pyc b/google_appengine/google/appengine/api/images/__init__.pyc
new file mode 100644
index 0000000..40ef57a
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/images_not_implemented_stub.py b/google_appengine/google/appengine/api/images/images_not_implemented_stub.py
new file mode 100755
index 0000000..30f6159
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_not_implemented_stub.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""A NotImplemented Images API stub for when the PIL library is not found."""
+
+
+
+class ImagesNotImplementedServiceStub(object):
+ """Stub version of images API which raises a NotImplementedError."""
+
+ def MakeSyncCall(self, service, call, request, response):
+ """Main entry point.
+
+ Args:
+ service: str, must be 'images'.
+ call: str, name of the RPC to make, must be part of ImagesService.
+ request: pb object, corresponding args to the 'call' argument.
+ response: pb object, return value for the 'call' argument.
+ """
+ raise NotImplementedError("Unable to find the Python PIL library. Please "
+ "view the SDK documentation for details about "
+ "installing PIL on your system.")
diff --git a/google_appengine/google/appengine/api/images/images_not_implemented_stub.pyc b/google_appengine/google/appengine/api/images/images_not_implemented_stub.pyc
new file mode 100644
index 0000000..6885635
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_not_implemented_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/images_service_pb.py b/google_appengine/google/appengine/api/images/images_service_pb.py
new file mode 100644
index 0000000..927040c
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_service_pb.py
@@ -0,0 +1,1988 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class ImagesServiceError(ProtocolBuffer.ProtocolMessage):
+
+ UNSPECIFIED_ERROR = 1
+ BAD_TRANSFORM_DATA = 2
+ NOT_IMAGE = 3
+ BAD_IMAGE_DATA = 4
+ IMAGE_TOO_LARGE = 5
+
+ _ErrorCode_NAMES = {
+ 1: "UNSPECIFIED_ERROR",
+ 2: "BAD_TRANSFORM_DATA",
+ 3: "NOT_IMAGE",
+ 4: "BAD_IMAGE_DATA",
+ 5: "IMAGE_TOO_LARGE",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesServiceTransform(ProtocolBuffer.ProtocolMessage):
+
+ RESIZE = 1
+ ROTATE = 2
+ HORIZONTAL_FLIP = 3
+ VERTICAL_FLIP = 4
+ CROP = 5
+ IM_FEELING_LUCKY = 6
+
+ _Type_NAMES = {
+ 1: "RESIZE",
+ 2: "ROTATE",
+ 3: "HORIZONTAL_FLIP",
+ 4: "VERTICAL_FLIP",
+ 5: "CROP",
+ 6: "IM_FEELING_LUCKY",
+ }
+
+ def Type_Name(cls, x): return cls._Type_NAMES.get(x, "")
+ Type_Name = classmethod(Type_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class Transform(ProtocolBuffer.ProtocolMessage):
+ has_width_ = 0
+ width_ = 0
+ has_height_ = 0
+ height_ = 0
+ has_rotate_ = 0
+ rotate_ = 0
+ has_horizontal_flip_ = 0
+ horizontal_flip_ = 0
+ has_vertical_flip_ = 0
+ vertical_flip_ = 0
+ has_crop_left_x_ = 0
+ crop_left_x_ = 0.0
+ has_crop_top_y_ = 0
+ crop_top_y_ = 0.0
+ has_crop_right_x_ = 0
+ crop_right_x_ = 1.0
+ has_crop_bottom_y_ = 0
+ crop_bottom_y_ = 1.0
+ has_autolevels_ = 0
+ autolevels_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def width(self): return self.width_
+
+ def set_width(self, x):
+ self.has_width_ = 1
+ self.width_ = x
+
+ def clear_width(self):
+ if self.has_width_:
+ self.has_width_ = 0
+ self.width_ = 0
+
+ def has_width(self): return self.has_width_
+
+ def height(self): return self.height_
+
+ def set_height(self, x):
+ self.has_height_ = 1
+ self.height_ = x
+
+ def clear_height(self):
+ if self.has_height_:
+ self.has_height_ = 0
+ self.height_ = 0
+
+ def has_height(self): return self.has_height_
+
+ def rotate(self): return self.rotate_
+
+ def set_rotate(self, x):
+ self.has_rotate_ = 1
+ self.rotate_ = x
+
+ def clear_rotate(self):
+ if self.has_rotate_:
+ self.has_rotate_ = 0
+ self.rotate_ = 0
+
+ def has_rotate(self): return self.has_rotate_
+
+ def horizontal_flip(self): return self.horizontal_flip_
+
+ def set_horizontal_flip(self, x):
+ self.has_horizontal_flip_ = 1
+ self.horizontal_flip_ = x
+
+ def clear_horizontal_flip(self):
+ if self.has_horizontal_flip_:
+ self.has_horizontal_flip_ = 0
+ self.horizontal_flip_ = 0
+
+ def has_horizontal_flip(self): return self.has_horizontal_flip_
+
+ def vertical_flip(self): return self.vertical_flip_
+
+ def set_vertical_flip(self, x):
+ self.has_vertical_flip_ = 1
+ self.vertical_flip_ = x
+
+ def clear_vertical_flip(self):
+ if self.has_vertical_flip_:
+ self.has_vertical_flip_ = 0
+ self.vertical_flip_ = 0
+
+ def has_vertical_flip(self): return self.has_vertical_flip_
+
+ def crop_left_x(self): return self.crop_left_x_
+
+ def set_crop_left_x(self, x):
+ self.has_crop_left_x_ = 1
+ self.crop_left_x_ = x
+
+ def clear_crop_left_x(self):
+ if self.has_crop_left_x_:
+ self.has_crop_left_x_ = 0
+ self.crop_left_x_ = 0.0
+
+ def has_crop_left_x(self): return self.has_crop_left_x_
+
+ def crop_top_y(self): return self.crop_top_y_
+
+ def set_crop_top_y(self, x):
+ self.has_crop_top_y_ = 1
+ self.crop_top_y_ = x
+
+ def clear_crop_top_y(self):
+ if self.has_crop_top_y_:
+ self.has_crop_top_y_ = 0
+ self.crop_top_y_ = 0.0
+
+ def has_crop_top_y(self): return self.has_crop_top_y_
+
+ def crop_right_x(self): return self.crop_right_x_
+
+ def set_crop_right_x(self, x):
+ self.has_crop_right_x_ = 1
+ self.crop_right_x_ = x
+
+ def clear_crop_right_x(self):
+ if self.has_crop_right_x_:
+ self.has_crop_right_x_ = 0
+ self.crop_right_x_ = 1.0
+
+ def has_crop_right_x(self): return self.has_crop_right_x_
+
+ def crop_bottom_y(self): return self.crop_bottom_y_
+
+ def set_crop_bottom_y(self, x):
+ self.has_crop_bottom_y_ = 1
+ self.crop_bottom_y_ = x
+
+ def clear_crop_bottom_y(self):
+ if self.has_crop_bottom_y_:
+ self.has_crop_bottom_y_ = 0
+ self.crop_bottom_y_ = 1.0
+
+ def has_crop_bottom_y(self): return self.has_crop_bottom_y_
+
+ def autolevels(self): return self.autolevels_
+
+ def set_autolevels(self, x):
+ self.has_autolevels_ = 1
+ self.autolevels_ = x
+
+ def clear_autolevels(self):
+ if self.has_autolevels_:
+ self.has_autolevels_ = 0
+ self.autolevels_ = 0
+
+ def has_autolevels(self): return self.has_autolevels_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_width()): self.set_width(x.width())
+ if (x.has_height()): self.set_height(x.height())
+ if (x.has_rotate()): self.set_rotate(x.rotate())
+ if (x.has_horizontal_flip()): self.set_horizontal_flip(x.horizontal_flip())
+ if (x.has_vertical_flip()): self.set_vertical_flip(x.vertical_flip())
+ if (x.has_crop_left_x()): self.set_crop_left_x(x.crop_left_x())
+ if (x.has_crop_top_y()): self.set_crop_top_y(x.crop_top_y())
+ if (x.has_crop_right_x()): self.set_crop_right_x(x.crop_right_x())
+ if (x.has_crop_bottom_y()): self.set_crop_bottom_y(x.crop_bottom_y())
+ if (x.has_autolevels()): self.set_autolevels(x.autolevels())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_width_ != x.has_width_: return 0
+ if self.has_width_ and self.width_ != x.width_: return 0
+ if self.has_height_ != x.has_height_: return 0
+ if self.has_height_ and self.height_ != x.height_: return 0
+ if self.has_rotate_ != x.has_rotate_: return 0
+ if self.has_rotate_ and self.rotate_ != x.rotate_: return 0
+ if self.has_horizontal_flip_ != x.has_horizontal_flip_: return 0
+ if self.has_horizontal_flip_ and self.horizontal_flip_ != x.horizontal_flip_: return 0
+ if self.has_vertical_flip_ != x.has_vertical_flip_: return 0
+ if self.has_vertical_flip_ and self.vertical_flip_ != x.vertical_flip_: return 0
+ if self.has_crop_left_x_ != x.has_crop_left_x_: return 0
+ if self.has_crop_left_x_ and self.crop_left_x_ != x.crop_left_x_: return 0
+ if self.has_crop_top_y_ != x.has_crop_top_y_: return 0
+ if self.has_crop_top_y_ and self.crop_top_y_ != x.crop_top_y_: return 0
+ if self.has_crop_right_x_ != x.has_crop_right_x_: return 0
+ if self.has_crop_right_x_ and self.crop_right_x_ != x.crop_right_x_: return 0
+ if self.has_crop_bottom_y_ != x.has_crop_bottom_y_: return 0
+ if self.has_crop_bottom_y_ and self.crop_bottom_y_ != x.crop_bottom_y_: return 0
+ if self.has_autolevels_ != x.has_autolevels_: return 0
+ if self.has_autolevels_ and self.autolevels_ != x.autolevels_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_width_): n += 1 + self.lengthVarInt64(self.width_)
+ if (self.has_height_): n += 1 + self.lengthVarInt64(self.height_)
+ if (self.has_rotate_): n += 1 + self.lengthVarInt64(self.rotate_)
+ if (self.has_horizontal_flip_): n += 2
+ if (self.has_vertical_flip_): n += 2
+ if (self.has_crop_left_x_): n += 5
+ if (self.has_crop_top_y_): n += 5
+ if (self.has_crop_right_x_): n += 5
+ if (self.has_crop_bottom_y_): n += 5
+ if (self.has_autolevels_): n += 2
+ return n + 0
+
+ def Clear(self):
+ self.clear_width()
+ self.clear_height()
+ self.clear_rotate()
+ self.clear_horizontal_flip()
+ self.clear_vertical_flip()
+ self.clear_crop_left_x()
+ self.clear_crop_top_y()
+ self.clear_crop_right_x()
+ self.clear_crop_bottom_y()
+ self.clear_autolevels()
+
+ def OutputUnchecked(self, out):
+ if (self.has_width_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.width_)
+ if (self.has_height_):
+ out.putVarInt32(16)
+ out.putVarInt32(self.height_)
+ if (self.has_rotate_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.rotate_)
+ if (self.has_horizontal_flip_):
+ out.putVarInt32(32)
+ out.putBoolean(self.horizontal_flip_)
+ if (self.has_vertical_flip_):
+ out.putVarInt32(40)
+ out.putBoolean(self.vertical_flip_)
+ if (self.has_crop_left_x_):
+ out.putVarInt32(53)
+ out.putFloat(self.crop_left_x_)
+ if (self.has_crop_top_y_):
+ out.putVarInt32(61)
+ out.putFloat(self.crop_top_y_)
+ if (self.has_crop_right_x_):
+ out.putVarInt32(69)
+ out.putFloat(self.crop_right_x_)
+ if (self.has_crop_bottom_y_):
+ out.putVarInt32(77)
+ out.putFloat(self.crop_bottom_y_)
+ if (self.has_autolevels_):
+ out.putVarInt32(80)
+ out.putBoolean(self.autolevels_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_width(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_height(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_rotate(d.getVarInt32())
+ continue
+ if tt == 32:
+ self.set_horizontal_flip(d.getBoolean())
+ continue
+ if tt == 40:
+ self.set_vertical_flip(d.getBoolean())
+ continue
+ if tt == 53:
+ self.set_crop_left_x(d.getFloat())
+ continue
+ if tt == 61:
+ self.set_crop_top_y(d.getFloat())
+ continue
+ if tt == 69:
+ self.set_crop_right_x(d.getFloat())
+ continue
+ if tt == 77:
+ self.set_crop_bottom_y(d.getFloat())
+ continue
+ if tt == 80:
+ self.set_autolevels(d.getBoolean())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_width_: res+=prefix+("width: %s\n" % self.DebugFormatInt32(self.width_))
+ if self.has_height_: res+=prefix+("height: %s\n" % self.DebugFormatInt32(self.height_))
+ if self.has_rotate_: res+=prefix+("rotate: %s\n" % self.DebugFormatInt32(self.rotate_))
+ if self.has_horizontal_flip_: res+=prefix+("horizontal_flip: %s\n" % self.DebugFormatBool(self.horizontal_flip_))
+ if self.has_vertical_flip_: res+=prefix+("vertical_flip: %s\n" % self.DebugFormatBool(self.vertical_flip_))
+ if self.has_crop_left_x_: res+=prefix+("crop_left_x: %s\n" % self.DebugFormatFloat(self.crop_left_x_))
+ if self.has_crop_top_y_: res+=prefix+("crop_top_y: %s\n" % self.DebugFormatFloat(self.crop_top_y_))
+ if self.has_crop_right_x_: res+=prefix+("crop_right_x: %s\n" % self.DebugFormatFloat(self.crop_right_x_))
+ if self.has_crop_bottom_y_: res+=prefix+("crop_bottom_y: %s\n" % self.DebugFormatFloat(self.crop_bottom_y_))
+ if self.has_autolevels_: res+=prefix+("autolevels: %s\n" % self.DebugFormatBool(self.autolevels_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kwidth = 1
+ kheight = 2
+ krotate = 3
+ khorizontal_flip = 4
+ kvertical_flip = 5
+ kcrop_left_x = 6
+ kcrop_top_y = 7
+ kcrop_right_x = 8
+ kcrop_bottom_y = 9
+ kautolevels = 10
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "width",
+ 2: "height",
+ 3: "rotate",
+ 4: "horizontal_flip",
+ 5: "vertical_flip",
+ 6: "crop_left_x",
+ 7: "crop_top_y",
+ 8: "crop_right_x",
+ 9: "crop_bottom_y",
+ 10: "autolevels",
+ }, 10)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.FLOAT,
+ 7: ProtocolBuffer.Encoder.FLOAT,
+ 8: ProtocolBuffer.Encoder.FLOAT,
+ 9: ProtocolBuffer.Encoder.FLOAT,
+ 10: ProtocolBuffer.Encoder.NUMERIC,
+ }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImageData(ProtocolBuffer.ProtocolMessage):
+ has_content_ = 0
+ content_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def content(self): return self.content_
+
+ def set_content(self, x):
+ self.has_content_ = 1
+ self.content_ = x
+
+ def clear_content(self):
+ if self.has_content_:
+ self.has_content_ = 0
+ self.content_ = ""
+
+ def has_content(self): return self.has_content_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_content()): self.set_content(x.content())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_content_ != x.has_content_: return 0
+ if self.has_content_ and self.content_ != x.content_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_content_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: content not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.content_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_content()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.content_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_content(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_content_: res+=prefix+("content: %s\n" % self.DebugFormatString(self.content_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kcontent = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "content",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class OutputSettings(ProtocolBuffer.ProtocolMessage):
+
+ PNG = 0
+ JPEG = 1
+
+ _MIME_TYPE_NAMES = {
+ 0: "PNG",
+ 1: "JPEG",
+ }
+
+ def MIME_TYPE_Name(cls, x): return cls._MIME_TYPE_NAMES.get(x, "")
+ MIME_TYPE_Name = classmethod(MIME_TYPE_Name)
+
+ has_mime_type_ = 0
+ mime_type_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def mime_type(self): return self.mime_type_
+
+ def set_mime_type(self, x):
+ self.has_mime_type_ = 1
+ self.mime_type_ = x
+
+ def clear_mime_type(self):
+ if self.has_mime_type_:
+ self.has_mime_type_ = 0
+ self.mime_type_ = 0
+
+ def has_mime_type(self): return self.has_mime_type_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_mime_type()): self.set_mime_type(x.mime_type())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_mime_type_ != x.has_mime_type_: return 0
+ if self.has_mime_type_ and self.mime_type_ != x.mime_type_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_mime_type_): n += 1 + self.lengthVarInt64(self.mime_type_)
+ return n + 0
+
+ def Clear(self):
+ self.clear_mime_type()
+
+ def OutputUnchecked(self, out):
+ if (self.has_mime_type_):
+ out.putVarInt32(8)
+ out.putVarInt32(self.mime_type_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_mime_type(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_mime_type_: res+=prefix+("mime_type: %s\n" % self.DebugFormatInt32(self.mime_type_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kmime_type = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "mime_type",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesTransformRequest(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+ has_output_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ self.transform_ = []
+ self.output_ = OutputSettings()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+ def transform_size(self): return len(self.transform_)
+ def transform_list(self): return self.transform_
+
+ def transform(self, i):
+ return self.transform_[i]
+
+ def mutable_transform(self, i):
+ return self.transform_[i]
+
+ def add_transform(self):
+ x = Transform()
+ self.transform_.append(x)
+ return x
+
+ def clear_transform(self):
+ self.transform_ = []
+ def output(self): return self.output_
+
+ def mutable_output(self): self.has_output_ = 1; return self.output_
+
+ def clear_output(self):self.has_output_ = 0; self.output_.Clear()
+
+ def has_output(self): return self.has_output_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+ for i in xrange(x.transform_size()): self.add_transform().CopyFrom(x.transform(i))
+ if (x.has_output()): self.mutable_output().MergeFrom(x.output())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ if len(self.transform_) != len(x.transform_): return 0
+ for e1, e2 in zip(self.transform_, x.transform_):
+ if e1 != e2: return 0
+ if self.has_output_ != x.has_output_: return 0
+ if self.has_output_ and self.output_ != x.output_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ for p in self.transform_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (not self.has_output_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: output not set.')
+ elif not self.output_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ n += 1 * len(self.transform_)
+ for i in xrange(len(self.transform_)): n += self.lengthString(self.transform_[i].ByteSize())
+ n += self.lengthString(self.output_.ByteSize())
+ return n + 2
+
+ def Clear(self):
+ self.clear_image()
+ self.clear_transform()
+ self.clear_output()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+ for i in xrange(len(self.transform_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.transform_[i].ByteSize())
+ self.transform_[i].OutputUnchecked(out)
+ out.putVarInt32(26)
+ out.putVarInt32(self.output_.ByteSize())
+ self.output_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_transform().TryMerge(tmp)
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_output().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt=0
+ for e in self.transform_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("transform%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_output_:
+ res+=prefix+"output <\n"
+ res+=self.output_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+ ktransform = 2
+ koutput = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ 2: "transform",
+ 3: "output",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesTransformResponse(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CompositeImageOptions(ProtocolBuffer.ProtocolMessage):
+
+ TOP_LEFT = 0
+ TOP = 1
+ TOP_RIGHT = 2
+ LEFT = 3
+ CENTER = 4
+ RIGHT = 5
+ BOTTOM_LEFT = 6
+ BOTTOM = 7
+ BOTTOM_RIGHT = 8
+
+ _ANCHOR_NAMES = {
+ 0: "TOP_LEFT",
+ 1: "TOP",
+ 2: "TOP_RIGHT",
+ 3: "LEFT",
+ 4: "CENTER",
+ 5: "RIGHT",
+ 6: "BOTTOM_LEFT",
+ 7: "BOTTOM",
+ 8: "BOTTOM_RIGHT",
+ }
+
+ def ANCHOR_Name(cls, x): return cls._ANCHOR_NAMES.get(x, "")
+ ANCHOR_Name = classmethod(ANCHOR_Name)
+
+ has_source_index_ = 0
+ source_index_ = 0
+ has_x_offset_ = 0
+ x_offset_ = 0
+ has_y_offset_ = 0
+ y_offset_ = 0
+ has_opacity_ = 0
+ opacity_ = 0.0
+ has_anchor_ = 0
+ anchor_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def source_index(self): return self.source_index_
+
+ def set_source_index(self, x):
+ self.has_source_index_ = 1
+ self.source_index_ = x
+
+ def clear_source_index(self):
+ if self.has_source_index_:
+ self.has_source_index_ = 0
+ self.source_index_ = 0
+
+ def has_source_index(self): return self.has_source_index_
+
+ def x_offset(self): return self.x_offset_
+
+ def set_x_offset(self, x):
+ self.has_x_offset_ = 1
+ self.x_offset_ = x
+
+ def clear_x_offset(self):
+ if self.has_x_offset_:
+ self.has_x_offset_ = 0
+ self.x_offset_ = 0
+
+ def has_x_offset(self): return self.has_x_offset_
+
+ def y_offset(self): return self.y_offset_
+
+ def set_y_offset(self, x):
+ self.has_y_offset_ = 1
+ self.y_offset_ = x
+
+ def clear_y_offset(self):
+ if self.has_y_offset_:
+ self.has_y_offset_ = 0
+ self.y_offset_ = 0
+
+ def has_y_offset(self): return self.has_y_offset_
+
+ def opacity(self): return self.opacity_
+
+ def set_opacity(self, x):
+ self.has_opacity_ = 1
+ self.opacity_ = x
+
+ def clear_opacity(self):
+ if self.has_opacity_:
+ self.has_opacity_ = 0
+ self.opacity_ = 0.0
+
+ def has_opacity(self): return self.has_opacity_
+
+ def anchor(self): return self.anchor_
+
+ def set_anchor(self, x):
+ self.has_anchor_ = 1
+ self.anchor_ = x
+
+ def clear_anchor(self):
+ if self.has_anchor_:
+ self.has_anchor_ = 0
+ self.anchor_ = 0
+
+ def has_anchor(self): return self.has_anchor_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_source_index()): self.set_source_index(x.source_index())
+ if (x.has_x_offset()): self.set_x_offset(x.x_offset())
+ if (x.has_y_offset()): self.set_y_offset(x.y_offset())
+ if (x.has_opacity()): self.set_opacity(x.opacity())
+ if (x.has_anchor()): self.set_anchor(x.anchor())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_source_index_ != x.has_source_index_: return 0
+ if self.has_source_index_ and self.source_index_ != x.source_index_: return 0
+ if self.has_x_offset_ != x.has_x_offset_: return 0
+ if self.has_x_offset_ and self.x_offset_ != x.x_offset_: return 0
+ if self.has_y_offset_ != x.has_y_offset_: return 0
+ if self.has_y_offset_ and self.y_offset_ != x.y_offset_: return 0
+ if self.has_opacity_ != x.has_opacity_: return 0
+ if self.has_opacity_ and self.opacity_ != x.opacity_: return 0
+ if self.has_anchor_ != x.has_anchor_: return 0
+ if self.has_anchor_ and self.anchor_ != x.anchor_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_source_index_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: source_index not set.')
+ if (not self.has_x_offset_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: x_offset not set.')
+ if (not self.has_y_offset_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: y_offset not set.')
+ if (not self.has_opacity_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: opacity not set.')
+ if (not self.has_anchor_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: anchor not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.source_index_)
+ n += self.lengthVarInt64(self.x_offset_)
+ n += self.lengthVarInt64(self.y_offset_)
+ n += self.lengthVarInt64(self.anchor_)
+ return n + 9
+
+ def Clear(self):
+ self.clear_source_index()
+ self.clear_x_offset()
+ self.clear_y_offset()
+ self.clear_opacity()
+ self.clear_anchor()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.source_index_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.x_offset_)
+ out.putVarInt32(24)
+ out.putVarInt32(self.y_offset_)
+ out.putVarInt32(37)
+ out.putFloat(self.opacity_)
+ out.putVarInt32(40)
+ out.putVarInt32(self.anchor_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_source_index(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_x_offset(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_y_offset(d.getVarInt32())
+ continue
+ if tt == 37:
+ self.set_opacity(d.getFloat())
+ continue
+ if tt == 40:
+ self.set_anchor(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_source_index_: res+=prefix+("source_index: %s\n" % self.DebugFormatInt32(self.source_index_))
+ if self.has_x_offset_: res+=prefix+("x_offset: %s\n" % self.DebugFormatInt32(self.x_offset_))
+ if self.has_y_offset_: res+=prefix+("y_offset: %s\n" % self.DebugFormatInt32(self.y_offset_))
+ if self.has_opacity_: res+=prefix+("opacity: %s\n" % self.DebugFormatFloat(self.opacity_))
+ if self.has_anchor_: res+=prefix+("anchor: %s\n" % self.DebugFormatInt32(self.anchor_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ ksource_index = 1
+ kx_offset = 2
+ ky_offset = 3
+ kopacity = 4
+ kanchor = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "source_index",
+ 2: "x_offset",
+ 3: "y_offset",
+ 4: "opacity",
+ 5: "anchor",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.FLOAT,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesCanvas(ProtocolBuffer.ProtocolMessage):
+ has_width_ = 0
+ width_ = 0
+ has_height_ = 0
+ height_ = 0
+ has_output_ = 0
+ has_color_ = 0
+ color_ = -1
+
+ def __init__(self, contents=None):
+ self.output_ = OutputSettings()
+ if contents is not None: self.MergeFromString(contents)
+
+ def width(self): return self.width_
+
+ def set_width(self, x):
+ self.has_width_ = 1
+ self.width_ = x
+
+ def clear_width(self):
+ if self.has_width_:
+ self.has_width_ = 0
+ self.width_ = 0
+
+ def has_width(self): return self.has_width_
+
+ def height(self): return self.height_
+
+ def set_height(self, x):
+ self.has_height_ = 1
+ self.height_ = x
+
+ def clear_height(self):
+ if self.has_height_:
+ self.has_height_ = 0
+ self.height_ = 0
+
+ def has_height(self): return self.has_height_
+
+ def output(self): return self.output_
+
+ def mutable_output(self): self.has_output_ = 1; return self.output_
+
+ def clear_output(self):self.has_output_ = 0; self.output_.Clear()
+
+ def has_output(self): return self.has_output_
+
+ def color(self): return self.color_
+
+ def set_color(self, x):
+ self.has_color_ = 1
+ self.color_ = x
+
+ def clear_color(self):
+ if self.has_color_:
+ self.has_color_ = 0
+ self.color_ = -1
+
+ def has_color(self): return self.has_color_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_width()): self.set_width(x.width())
+ if (x.has_height()): self.set_height(x.height())
+ if (x.has_output()): self.mutable_output().MergeFrom(x.output())
+ if (x.has_color()): self.set_color(x.color())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_width_ != x.has_width_: return 0
+ if self.has_width_ and self.width_ != x.width_: return 0
+ if self.has_height_ != x.has_height_: return 0
+ if self.has_height_ and self.height_ != x.height_: return 0
+ if self.has_output_ != x.has_output_: return 0
+ if self.has_output_ and self.output_ != x.output_: return 0
+ if self.has_color_ != x.has_color_: return 0
+ if self.has_color_ and self.color_ != x.color_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_width_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: width not set.')
+ if (not self.has_height_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: height not set.')
+ if (not self.has_output_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: output not set.')
+ elif not self.output_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.width_)
+ n += self.lengthVarInt64(self.height_)
+ n += self.lengthString(self.output_.ByteSize())
+ if (self.has_color_): n += 1 + self.lengthVarInt64(self.color_)
+ return n + 3
+
+ def Clear(self):
+ self.clear_width()
+ self.clear_height()
+ self.clear_output()
+ self.clear_color()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.width_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.height_)
+ out.putVarInt32(26)
+ out.putVarInt32(self.output_.ByteSize())
+ self.output_.OutputUnchecked(out)
+ if (self.has_color_):
+ out.putVarInt32(32)
+ out.putVarInt32(self.color_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_width(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.set_height(d.getVarInt32())
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_output().TryMerge(tmp)
+ continue
+ if tt == 32:
+ self.set_color(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_width_: res+=prefix+("width: %s\n" % self.DebugFormatInt32(self.width_))
+ if self.has_height_: res+=prefix+("height: %s\n" % self.DebugFormatInt32(self.height_))
+ if self.has_output_:
+ res+=prefix+"output <\n"
+ res+=self.output_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ if self.has_color_: res+=prefix+("color: %s\n" % self.DebugFormatInt32(self.color_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kwidth = 1
+ kheight = 2
+ koutput = 3
+ kcolor = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "width",
+ 2: "height",
+ 3: "output",
+ 4: "color",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesCompositeRequest(ProtocolBuffer.ProtocolMessage):
+ has_canvas_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = []
+ self.options_ = []
+ self.canvas_ = ImagesCanvas()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image_size(self): return len(self.image_)
+ def image_list(self): return self.image_
+
+ def image(self, i):
+ return self.image_[i]
+
+ def mutable_image(self, i):
+ return self.image_[i]
+
+ def add_image(self):
+ x = ImageData()
+ self.image_.append(x)
+ return x
+
+ def clear_image(self):
+ self.image_ = []
+ def options_size(self): return len(self.options_)
+ def options_list(self): return self.options_
+
+ def options(self, i):
+ return self.options_[i]
+
+ def mutable_options(self, i):
+ return self.options_[i]
+
+ def add_options(self):
+ x = CompositeImageOptions()
+ self.options_.append(x)
+ return x
+
+ def clear_options(self):
+ self.options_ = []
+ def canvas(self): return self.canvas_
+
+ def mutable_canvas(self): self.has_canvas_ = 1; return self.canvas_
+
+ def clear_canvas(self):self.has_canvas_ = 0; self.canvas_.Clear()
+
+ def has_canvas(self): return self.has_canvas_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.image_size()): self.add_image().CopyFrom(x.image(i))
+ for i in xrange(x.options_size()): self.add_options().CopyFrom(x.options(i))
+ if (x.has_canvas()): self.mutable_canvas().MergeFrom(x.canvas())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.image_) != len(x.image_): return 0
+ for e1, e2 in zip(self.image_, x.image_):
+ if e1 != e2: return 0
+ if len(self.options_) != len(x.options_): return 0
+ for e1, e2 in zip(self.options_, x.options_):
+ if e1 != e2: return 0
+ if self.has_canvas_ != x.has_canvas_: return 0
+ if self.has_canvas_ and self.canvas_ != x.canvas_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.image_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ for p in self.options_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (not self.has_canvas_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: canvas not set.')
+ elif not self.canvas_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.image_)
+ for i in xrange(len(self.image_)): n += self.lengthString(self.image_[i].ByteSize())
+ n += 1 * len(self.options_)
+ for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSize())
+ n += self.lengthString(self.canvas_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+ self.clear_options()
+ self.clear_canvas()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.image_)):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_[i].ByteSize())
+ self.image_[i].OutputUnchecked(out)
+ for i in xrange(len(self.options_)):
+ out.putVarInt32(18)
+ out.putVarInt32(self.options_[i].ByteSize())
+ self.options_[i].OutputUnchecked(out)
+ out.putVarInt32(26)
+ out.putVarInt32(self.canvas_.ByteSize())
+ self.canvas_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_image().TryMerge(tmp)
+ continue
+ if tt == 18:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_options().TryMerge(tmp)
+ continue
+ if tt == 26:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_canvas().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.image_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("image%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ cnt=0
+ for e in self.options_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("options%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ if self.has_canvas_:
+ res+=prefix+"canvas <\n"
+ res+=self.canvas_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+ koptions = 2
+ kcanvas = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ 2: "options",
+ 3: "canvas",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesCompositeResponse(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesHistogramRequest(ProtocolBuffer.ProtocolMessage):
+ has_image_ = 0
+
+ def __init__(self, contents=None):
+ self.image_ = ImageData()
+ if contents is not None: self.MergeFromString(contents)
+
+ def image(self): return self.image_
+
+ def mutable_image(self): self.has_image_ = 1; return self.image_
+
+ def clear_image(self):self.has_image_ = 0; self.image_.Clear()
+
+ def has_image(self): return self.has_image_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_image()): self.mutable_image().MergeFrom(x.image())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_image_ != x.has_image_: return 0
+ if self.has_image_ and self.image_ != x.image_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_image_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: image not set.')
+ elif not self.image_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.image_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_image()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.image_.ByteSize())
+ self.image_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_image().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_image_:
+ res+=prefix+"image <\n"
+ res+=self.image_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kimage = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "image",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesHistogram(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.red_ = []
+ self.green_ = []
+ self.blue_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def red_size(self): return len(self.red_)
+ def red_list(self): return self.red_
+
+ def red(self, i):
+ return self.red_[i]
+
+ def set_red(self, i, x):
+ self.red_[i] = x
+
+ def add_red(self, x):
+ self.red_.append(x)
+
+ def clear_red(self):
+ self.red_ = []
+
+ def green_size(self): return len(self.green_)
+ def green_list(self): return self.green_
+
+ def green(self, i):
+ return self.green_[i]
+
+ def set_green(self, i, x):
+ self.green_[i] = x
+
+ def add_green(self, x):
+ self.green_.append(x)
+
+ def clear_green(self):
+ self.green_ = []
+
+ def blue_size(self): return len(self.blue_)
+ def blue_list(self): return self.blue_
+
+ def blue(self, i):
+ return self.blue_[i]
+
+ def set_blue(self, i, x):
+ self.blue_[i] = x
+
+ def add_blue(self, x):
+ self.blue_.append(x)
+
+ def clear_blue(self):
+ self.blue_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.red_size()): self.add_red(x.red(i))
+ for i in xrange(x.green_size()): self.add_green(x.green(i))
+ for i in xrange(x.blue_size()): self.add_blue(x.blue(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.red_) != len(x.red_): return 0
+ for e1, e2 in zip(self.red_, x.red_):
+ if e1 != e2: return 0
+ if len(self.green_) != len(x.green_): return 0
+ for e1, e2 in zip(self.green_, x.green_):
+ if e1 != e2: return 0
+ if len(self.blue_) != len(x.blue_): return 0
+ for e1, e2 in zip(self.blue_, x.blue_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.red_)
+ for i in xrange(len(self.red_)): n += self.lengthVarInt64(self.red_[i])
+ n += 1 * len(self.green_)
+ for i in xrange(len(self.green_)): n += self.lengthVarInt64(self.green_[i])
+ n += 1 * len(self.blue_)
+ for i in xrange(len(self.blue_)): n += self.lengthVarInt64(self.blue_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_red()
+ self.clear_green()
+ self.clear_blue()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.red_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.red_[i])
+ for i in xrange(len(self.green_)):
+ out.putVarInt32(16)
+ out.putVarInt32(self.green_[i])
+ for i in xrange(len(self.blue_)):
+ out.putVarInt32(24)
+ out.putVarInt32(self.blue_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_red(d.getVarInt32())
+ continue
+ if tt == 16:
+ self.add_green(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.add_blue(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.red_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("red%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ cnt=0
+ for e in self.green_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("green%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ cnt=0
+ for e in self.blue_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("blue%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kred = 1
+ kgreen = 2
+ kblue = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "red",
+ 2: "green",
+ 3: "blue",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class ImagesHistogramResponse(ProtocolBuffer.ProtocolMessage):
+ has_histogram_ = 0
+
+ def __init__(self, contents=None):
+ self.histogram_ = ImagesHistogram()
+ if contents is not None: self.MergeFromString(contents)
+
+ def histogram(self): return self.histogram_
+
+ def mutable_histogram(self): self.has_histogram_ = 1; return self.histogram_
+
+ def clear_histogram(self):self.has_histogram_ = 0; self.histogram_.Clear()
+
+ def has_histogram(self): return self.has_histogram_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_histogram()): self.mutable_histogram().MergeFrom(x.histogram())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_histogram_ != x.has_histogram_: return 0
+ if self.has_histogram_ and self.histogram_ != x.histogram_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_histogram_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: histogram not set.')
+ elif not self.histogram_.IsInitialized(debug_strs): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(self.histogram_.ByteSize())
+ return n + 1
+
+ def Clear(self):
+ self.clear_histogram()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putVarInt32(self.histogram_.ByteSize())
+ self.histogram_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_histogram().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_histogram_:
+ res+=prefix+"histogram <\n"
+ res+=self.histogram_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ khistogram = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "histogram",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','OutputSettings','ImagesTransformRequest','ImagesTransformResponse','CompositeImageOptions','ImagesCanvas','ImagesCompositeRequest','ImagesCompositeResponse','ImagesHistogramRequest','ImagesHistogram','ImagesHistogramResponse']
diff --git a/google_appengine/google/appengine/api/images/images_service_pb.pyc b/google_appengine/google/appengine/api/images/images_service_pb.pyc
new file mode 100644
index 0000000..6b98746
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/images/images_stub.py b/google_appengine/google/appengine/api/images/images_stub.py
new file mode 100755
index 0000000..d89f47e
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_stub.py
@@ -0,0 +1,411 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the images API."""
+
+
+
+import logging
+import StringIO
+
+try:
+ import PIL
+ from PIL import _imaging
+ from PIL import Image
+except ImportError:
+ import _imaging
+ import Image
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import images
+from google.appengine.api.images import images_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+def _ArgbToRgbaTuple(argb):
+ """Convert from a single ARGB value to a tuple containing RGBA.
+
+ Args:
+ argb: Signed 32 bit integer containing an ARGB value.
+
+ Returns:
+ RGBA tuple.
+ """
+ unsigned_argb = argb % 0x100000000
+ return ((unsigned_argb >> 16) & 0xFF,
+ (unsigned_argb >> 8) & 0xFF,
+ unsigned_argb & 0xFF,
+ (unsigned_argb >> 24) & 0xFF)
+
+
+class ImagesServiceStub(apiproxy_stub.APIProxyStub):
+ """Stub version of images API to be used with the dev_appserver."""
+
+ def __init__(self, service_name='images'):
+ """Preloads PIL to load all modules in the unhardened environment.
+
+ Args:
+ service_name: Service name expected for all calls.
+ """
+ super(ImagesServiceStub, self).__init__(service_name)
+ Image.init()
+
+ def _Dynamic_Composite(self, request, response):
+ """Implementation of ImagesService::Composite.
+
+ Based off documentation of the PIL library at
+ http://www.pythonware.com/library/pil/handbook/index.htm
+
+ Args:
+ request: ImagesCompositeRequest, contains image request info.
+ response: ImagesCompositeResponse, contains transformed image.
+ """
+ width = request.canvas().width()
+ height = request.canvas().height()
+ color = _ArgbToRgbaTuple(request.canvas().color())
+ canvas = Image.new("RGBA", (width, height), color)
+ sources = []
+ if (not request.canvas().width() or request.canvas().width() > 4000 or
+ not request.canvas().height() or request.canvas().height() > 4000):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if not request.image_size():
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if not request.options_size():
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if request.options_size() > images.MAX_COMPOSITES_PER_REQUEST:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ for image in request.image_list():
+ sources.append(self._OpenImage(image.content()))
+
+ for options in request.options_list():
+ if (options.anchor() < images.TOP_LEFT or
+ options.anchor() > images.BOTTOM_RIGHT):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if options.source_index() >= len(sources) or options.source_index() < 0:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ if options.opacity() < 0 or options.opacity() > 1:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ source = sources[options.source_index()]
+ x_anchor = (options.anchor() % 3) * 0.5
+ y_anchor = (options.anchor() / 3) * 0.5
+ x_offset = int(options.x_offset() + x_anchor * (width - source.size[0]))
+ y_offset = int(options.y_offset() + y_anchor * (height - source.size[1]))
+ alpha = options.opacity() * 255
+ mask = Image.new("L", source.size, alpha)
+ canvas.paste(source, (x_offset, y_offset), mask)
+ response_value = self._EncodeImage(canvas, request.canvas().output())
+ response.mutable_image().set_content(response_value)
+
+ def _Dynamic_Histogram(self, request, response):
+ """Trivial implementation of ImagesService::Histogram.
+
+ Based off documentation of the PIL library at
+ http://www.pythonware.com/library/pil/handbook/index.htm
+
+ Args:
+ request: ImagesHistogramRequest, contains the image.
+ response: ImagesHistogramResponse, contains histogram of the image.
+ """
+ image = self._OpenImage(request.image().content())
+ img_format = image.format
+ if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.NOT_IMAGE)
+ image = image.convert("RGBA")
+ red = [0] * 256
+ green = [0] * 256
+ blue = [0] * 256
+ for pixel in image.getdata():
+ red[int((pixel[0] * pixel[3]) / 255)] += 1
+ green[int((pixel[1] * pixel[3]) / 255)] += 1
+ blue[int((pixel[2] * pixel[3]) / 255)] += 1
+ histogram = response.mutable_histogram()
+ for value in red:
+ histogram.add_red(value)
+ for value in green:
+ histogram.add_green(value)
+ for value in blue:
+ histogram.add_blue(value)
+
+ def _Dynamic_Transform(self, request, response):
+ """Trivial implementation of ImagesService::Transform.
+
+ Based off documentation of the PIL library at
+ http://www.pythonware.com/library/pil/handbook/index.htm
+
+ Args:
+ request: ImagesTransformRequest, contains image request info.
+ response: ImagesTransformResponse, contains transformed image.
+ """
+ original_image = self._OpenImage(request.image().content())
+
+ new_image = self._ProcessTransforms(original_image,
+ request.transform_list())
+
+ response_value = self._EncodeImage(new_image, request.output())
+ response.mutable_image().set_content(response_value)
+
+ def _EncodeImage(self, image, output_encoding):
+ """Encode the given image and return it in string form.
+
+ Args:
+ image: PIL Image object, image to encode.
+ output_encoding: ImagesTransformRequest.OutputSettings object.
+
+ Returns:
+ str with encoded image information in given encoding format.
+ """
+ image_string = StringIO.StringIO()
+
+ image_encoding = "PNG"
+
+ if (output_encoding.mime_type() == images_service_pb.OutputSettings.JPEG):
+ image_encoding = "JPEG"
+
+ image = image.convert("RGB")
+
+ image.save(image_string, image_encoding)
+
+ return image_string.getvalue()
+
+ def _OpenImage(self, image):
+ """Opens an image provided as a string.
+
+ Args:
+ image: image data to be opened
+
+ Raises:
+ apiproxy_errors.ApplicationError if the image cannot be opened or if it
+ is an unsupported format.
+
+ Returns:
+ Image containing the image data passed in.
+ """
+ if not image:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.NOT_IMAGE)
+
+ image = StringIO.StringIO(image)
+ try:
+ image = Image.open(image)
+ except IOError:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_IMAGE_DATA)
+
+ img_format = image.format
+ if img_format not in ("BMP", "GIF", "ICO", "JPEG", "PNG", "TIFF"):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.NOT_IMAGE)
+ return image
+
+ def _ValidateCropArg(self, arg):
+ """Check an argument for the Crop transform.
+
+ Args:
+ arg: float, argument to Crop transform to check.
+
+ Raises:
+ apiproxy_errors.ApplicationError on problem with argument.
+ """
+ if not isinstance(arg, float):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ if not (0 <= arg <= 1.0):
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ def _CalculateNewDimensions(self,
+ current_width,
+ current_height,
+ req_width,
+ req_height):
+ """Get new resize dimensions keeping the current aspect ratio.
+
+ This uses the more restricting of the two requested values to determine
+ the new ratio.
+
+ Args:
+ current_width: int, current width of the image.
+ current_height: int, current height of the image.
+ req_width: int, requested new width of the image.
+ req_height: int, requested new height of the image.
+
+ Returns:
+ tuple (width, height) which are both ints of the new ratio.
+ """
+
+ width_ratio = float(req_width) / current_width
+ height_ratio = float(req_height) / current_height
+
+ if req_width == 0 or (width_ratio > height_ratio and req_height != 0):
+ return int(height_ratio * current_width), req_height
+ else:
+ return req_width, int(width_ratio * current_height)
+
+ def _Resize(self, image, transform):
+ """Use PIL to resize the given image with the given transform.
+
+ Args:
+ image: PIL.Image.Image object to resize.
+ transform: images_service_pb.Transform to use when resizing.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if the resize data given is bad.
+ """
+ width = 0
+ height = 0
+
+ if transform.has_width():
+ width = transform.width()
+ if width < 0 or 4000 < width:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ if transform.has_height():
+ height = transform.height()
+ if height < 0 or 4000 < height:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+
+ current_width, current_height = image.size
+ new_width, new_height = self._CalculateNewDimensions(current_width,
+ current_height,
+ width,
+ height)
+
+ return image.resize((new_width, new_height), Image.ANTIALIAS)
+
+ def _Rotate(self, image, transform):
+ """Use PIL to rotate the given image with the given transform.
+
+ Args:
+ image: PIL.Image.Image object to rotate.
+ transform: images_service_pb.Transform to use when rotating.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if the rotate data given is bad.
+ """
+ degrees = transform.rotate()
+ if degrees < 0 or degrees % 90 != 0:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ degrees %= 360
+
+ degrees = 360 - degrees
+ return image.rotate(degrees)
+
+ def _Crop(self, image, transform):
+ """Use PIL to crop the given image with the given transform.
+
+ Args:
+ image: PIL.Image.Image object to crop.
+ transform: images_service_pb.Transform to use when cropping.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if the crop data given is bad.
+ """
+ left_x = 0.0
+ top_y = 0.0
+ right_x = 1.0
+ bottom_y = 1.0
+
+ if transform.has_crop_left_x():
+ left_x = transform.crop_left_x()
+ self._ValidateCropArg(left_x)
+
+ if transform.has_crop_top_y():
+ top_y = transform.crop_top_y()
+ self._ValidateCropArg(top_y)
+
+ if transform.has_crop_right_x():
+ right_x = transform.crop_right_x()
+ self._ValidateCropArg(right_x)
+
+ if transform.has_crop_bottom_y():
+ bottom_y = transform.crop_bottom_y()
+ self._ValidateCropArg(bottom_y)
+
+ width, height = image.size
+
+ box = (int(transform.crop_left_x() * width),
+ int(transform.crop_top_y() * height),
+ int(transform.crop_right_x() * width),
+ int(transform.crop_bottom_y() * height))
+
+ return image.crop(box)
+
+ def _ProcessTransforms(self, image, transforms):
+ """Execute PIL operations based on transform values.
+
+ Args:
+ image: PIL.Image.Image instance, image to manipulate.
+ trasnforms: list of ImagesTransformRequest.Transform objects.
+
+ Returns:
+ PIL.Image.Image with transforms performed on it.
+
+ Raises:
+ BadRequestError if we are passed more than one of the same type of
+ transform.
+ """
+ new_image = image
+ if len(transforms) > images.MAX_TRANSFORMS_PER_REQUEST:
+ raise apiproxy_errors.ApplicationError(
+ images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA)
+ for transform in transforms:
+ if transform.has_width() or transform.has_height():
+ new_image = self._Resize(new_image, transform)
+
+ elif transform.has_rotate():
+ new_image = self._Rotate(new_image, transform)
+
+ elif transform.has_horizontal_flip():
+ new_image = new_image.transpose(Image.FLIP_LEFT_RIGHT)
+
+ elif transform.has_vertical_flip():
+ new_image = new_image.transpose(Image.FLIP_TOP_BOTTOM)
+
+ elif (transform.has_crop_left_x() or
+ transform.has_crop_top_y() or
+ transform.has_crop_right_x() or
+ transform.has_crop_bottom_y()):
+ new_image = self._Crop(new_image, transform)
+
+ elif transform.has_autolevels():
+ logging.info("I'm Feeling Lucky autolevels will be visible once this "
+ "application is deployed.")
+ else:
+ logging.warn("Found no transformations found to perform.")
+
+ return new_image
diff --git a/google_appengine/google/appengine/api/images/images_stub.pyc b/google_appengine/google/appengine/api/images/images_stub.pyc
new file mode 100644
index 0000000..a29f50c
--- /dev/null
+++ b/google_appengine/google/appengine/api/images/images_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/__init__.py b/google_appengine/google/appengine/api/labs/__init__.py
new file mode 100755
index 0000000..c33ae80
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/__init__.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google_appengine/google/appengine/api/labs/__init__.pyc b/google_appengine/google/appengine/api/labs/__init__.pyc
new file mode 100644
index 0000000..3557233
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/__init__.py b/google_appengine/google/appengine/api/labs/taskqueue/__init__.py
new file mode 100644
index 0000000..cf9ea5a
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/__init__.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Task Queue API module."""
+
+from taskqueue import *
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/__init__.pyc b/google_appengine/google/appengine/api/labs/taskqueue/__init__.pyc
new file mode 100644
index 0000000..a9ca241
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py
new file mode 100755
index 0000000..733df36
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.py
@@ -0,0 +1,633 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Task Queue API.
+
+Enables an application to queue background work for itself. Work is done through
+webhooks that process tasks pushed from a queue. Tasks will execute in
+best-effort order of ETA. Webhooks that fail will cause tasks to be retried at a
+later time. Multiple queues may exist with independent throttling controls.
+
+Webhook URLs may be specified directly for Tasks, or the default URL scheme
+may be used, which will translate Task names into URLs relative to a Queue's
+base path. A default queue is also provided for simple usage.
+"""
+
+
+
+import datetime
+import re
+import time
+import urllib
+import urlparse
+
+import taskqueue_service_pb
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import urlfetch
+from google.appengine.runtime import apiproxy_errors
+
+
+class Error(Exception):
+ """Base-class for exceptions in this module."""
+
+
+class UnknownQueueError(Error):
+ """The queue specified is unknown."""
+
+
+class TransientError(Error):
+ """There was a transient error while accessing the queue.
+
+ Please Try again later.
+ """
+
+
+class InternalError(Error):
+ """There was an internal error while accessing this queue.
+
+ If this problem continues, please contact the App Engine team through
+ our support forum with a description of your problem.
+ """
+
+
+class InvalidTaskError(Error):
+ """The task's parameters, headers, or method is invalid."""
+
+
+class InvalidTaskNameError(InvalidTaskError):
+ """The task's name is invalid."""
+
+
+class TaskTooLargeError(InvalidTaskError):
+ """The task is too large with its headers and payload."""
+
+
+class TaskAlreadyExistsError(InvalidTaskError):
+ """Task already exists. It has not yet run."""
+
+
+class TombstonedTaskError(InvalidTaskError):
+ """Task has been tombstoned."""
+
+
+class InvalidUrlError(InvalidTaskError):
+ """The task's relative URL is invalid."""
+
+
+class BadTaskStateError(Error):
+ """The task is in the wrong state for the requested operation."""
+
+
+class InvalidQueueError(Error):
+ """The Queue's configuration is invalid."""
+
+
+class InvalidQueueNameError(InvalidQueueError):
+ """The Queue's name is invalid."""
+
+
+class _RelativeUrlError(Error):
+ """The relative URL supplied is invalid."""
+
+
+class PermissionDeniedError(Error):
+ """The requested operation is not allowed for this app."""
+
+
+MAX_QUEUE_NAME_LENGTH = 100
+
+MAX_TASK_NAME_LENGTH = 500
+
+MAX_TASK_SIZE_BYTES = 10 * (2 ** 10)
+
+MAX_URL_LENGTH = 2083
+
+_DEFAULT_QUEUE = 'default'
+
+_DEFAULT_QUEUE_PATH = '/_ah/queue'
+
+_METHOD_MAP = {
+ 'GET': taskqueue_service_pb.TaskQueueAddRequest.GET,
+ 'POST': taskqueue_service_pb.TaskQueueAddRequest.POST,
+ 'HEAD': taskqueue_service_pb.TaskQueueAddRequest.HEAD,
+ 'PUT': taskqueue_service_pb.TaskQueueAddRequest.PUT,
+ 'DELETE': taskqueue_service_pb.TaskQueueAddRequest.DELETE,
+}
+
+_NON_POST_METHODS = frozenset(['GET', 'HEAD', 'PUT', 'DELETE'])
+
+_BODY_METHODS = frozenset(['POST', 'PUT'])
+
+_TASK_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_TASK_NAME_LENGTH
+
+_TASK_NAME_RE = re.compile(_TASK_NAME_PATTERN)
+
+_QUEUE_NAME_PATTERN = r'^[a-zA-Z0-9-]{1,%s}$' % MAX_QUEUE_NAME_LENGTH
+
+_QUEUE_NAME_RE = re.compile(_QUEUE_NAME_PATTERN)
+
+
+class _UTCTimeZone(datetime.tzinfo):
+ """UTC timezone."""
+
+ ZERO = datetime.timedelta(0)
+
+ def utcoffset(self, dt):
+ return self.ZERO
+
+ def dst(self, dt):
+ return self.ZERO
+
+ def tzname(self, dt):
+ return 'UTC'
+
+
+_UTC = _UTCTimeZone()
+
+
+def _parse_relative_url(relative_url):
+ """Parses a relative URL and splits it into its path and query string.
+
+ Args:
+ relative_url: The relative URL, starting with a '/'.
+
+ Returns:
+ Tuple (path, query) where:
+ path: The path in the relative URL.
+ query: The query string in the URL without the '?' character.
+
+ Raises:
+ _RelativeUrlError if the relative_url is invalid for whatever reason
+ """
+ if not relative_url:
+ raise _RelativeUrlError('Relative URL is empty')
+ (scheme, netloc, path, query, fragment) = urlparse.urlsplit(relative_url)
+ if scheme or netloc:
+ raise _RelativeUrlError('Relative URL may not have a scheme or location')
+ if fragment:
+ raise _RelativeUrlError('Relative URL may not specify a fragment')
+ if not path or path[0] != '/':
+ raise _RelativeUrlError('Relative URL path must start with "/"')
+ return path, query
+
+
+def _flatten_params(params):
+ """Converts a dictionary of parameters to a list of parameters.
+
+ Any unicode strings in keys or values will be encoded as UTF-8.
+
+ Args:
+ params: Dictionary mapping parameter keys to values. Values will be
+ converted to a string and added to the list as tuple (key, value). If
+ a values is iterable and not a string, each contained value will be
+ added as a separate (key, value) tuple.
+
+ Returns:
+ List of (key, value) tuples.
+ """
+ def get_string(value):
+ if isinstance(value, unicode):
+ return unicode(value).encode('utf-8')
+ else:
+ return str(value)
+
+ param_list = []
+ for key, value in params.iteritems():
+ key = get_string(key)
+ if isinstance(value, basestring):
+ param_list.append((key, get_string(value)))
+ else:
+ try:
+ iterator = iter(value)
+ except TypeError:
+ param_list.append((key, str(value)))
+ else:
+ param_list.extend((key, get_string(v)) for v in iterator)
+
+ return param_list
+
+
+class Task(object):
+ """Represents a single Task on a queue."""
+
+ __CONSTRUCTOR_KWARGS = frozenset([
+ 'countdown', 'eta', 'headers', 'method', 'name', 'params', 'url'])
+
+ def __init__(self, payload=None, **kwargs):
+ """Initializer.
+
+ All parameters are optional.
+
+ Args:
+ payload: The payload data for this Task that will be delivered to the
+ webhook as the HTTP request body. This is only allowed for POST and PUT
+ methods.
+ countdown: Time in seconds into the future that this Task should execute.
+ Defaults to zero.
+ eta: Absolute time when the Task should execute. May not be specified
+ if 'countdown' is also supplied.
+ headers: Dictionary of headers to pass to the webhook. Values in the
+ dictionary may be iterable to indicate repeated header fields.
+ method: Method to use when accessing the webhook. Defaults to 'POST'.
+ name: Name to give the Task; if not specified, a name will be
+ auto-generated when added to a queue and assigned to this object. Must
+ match the _TASK_NAME_PATTERN regular expression.
+ params: Dictionary of parameters to use for this Task. For POST requests
+ these params will be encoded as 'application/x-www-form-urlencoded' and
+ set to the payload. For all other methods, the parameters will be
+ converted to a query string. May not be specified if the URL already
+ contains a query string.
+ url: Relative URL where the webhook that should handle this task is
+ located for this application. May have a query string unless this is
+ a POST method.
+
+ Raises:
+ InvalidTaskError if any of the parameters are invalid;
+ InvalidTaskNameError if the task name is invalid; InvalidUrlError if
+ the task URL is invalid or too long; TaskTooLargeError if the task with
+ its payload is too large.
+ """
+ args_diff = set(kwargs.iterkeys()) - self.__CONSTRUCTOR_KWARGS
+ if args_diff:
+ raise TypeError('Invalid arguments: %s' % ', '.join(args_diff))
+
+ self.__name = kwargs.get('name')
+ if self.__name and not _TASK_NAME_RE.match(self.__name):
+ raise InvalidTaskNameError(
+ 'Task name does not match expression "%s"; found %s' %
+ (_TASK_NAME_PATTERN, self.__name))
+
+ self.__default_url, self.__relative_url, query = Task.__determine_url(
+ kwargs.get('url', ''))
+ self.__headers = urlfetch._CaselessDict()
+ self.__headers.update(kwargs.get('headers', {}))
+ self.__method = kwargs.get('method', 'POST').upper()
+ self.__payload = None
+ params = kwargs.get('params', {})
+
+ if query and params:
+ raise InvalidTaskError('Query string and parameters both present; '
+ 'only one of these may be supplied')
+
+ if self.__method == 'POST':
+ if payload and params:
+ raise InvalidTaskError('Message body and parameters both present for '
+ 'POST method; only one of these may be supplied')
+ elif query:
+ raise InvalidTaskError('POST method may not have a query string; '
+ 'use the "params" keyword argument instead')
+ elif params:
+ self.__payload = Task.__encode_params(params)
+ self.__headers.setdefault(
+ 'content-type', 'application/x-www-form-urlencoded')
+ elif payload is not None:
+ self.__payload = Task.__convert_payload(payload, self.__headers)
+ elif self.__method in _NON_POST_METHODS:
+ if payload and self.__method not in _BODY_METHODS:
+ raise InvalidTaskError('Payload may only be specified for methods %s' %
+ ', '.join(_BODY_METHODS))
+ if payload:
+ self.__payload = Task.__convert_payload(payload, self.__headers)
+ if params:
+ query = Task.__encode_params(params)
+ if query:
+ self.__relative_url = '%s?%s' % (self.__relative_url, query)
+ else:
+ raise InvalidTaskError('Invalid method: %s' % self.__method)
+
+ self.__headers_list = _flatten_params(self.__headers)
+ self.__eta = Task.__determine_eta(
+ kwargs.get('eta'), kwargs.get('countdown'))
+ self.__enqueued = False
+
+ if self.size > MAX_TASK_SIZE_BYTES:
+ raise TaskTooLargeError('Task size must be less than %d; found %d' %
+ (MAX_TASK_SIZE_BYTES, self.size))
+
+ @staticmethod
+ def __determine_url(relative_url):
+ """Determines the URL of a task given a relative URL and a name.
+
+ Args:
+ relative_url: The relative URL for the Task.
+
+ Returns:
+ Tuple (default_url, relative_url, query) where:
+ default_url: True if this Task is using the default URL scheme;
+ False otherwise.
+ relative_url: String containing the relative URL for this Task.
+ query: The query string for this task.
+
+ Raises:
+ InvalidUrlError if the relative_url is invalid.
+ """
+ if not relative_url:
+ default_url, query = True, ''
+ else:
+ default_url = False
+ try:
+ relative_url, query = _parse_relative_url(relative_url)
+ except _RelativeUrlError, e:
+ raise InvalidUrlError(e)
+
+ if len(relative_url) > MAX_URL_LENGTH:
+ raise InvalidUrlError(
+ 'Task URL must be less than %d characters; found %d' %
+ (MAX_URL_LENGTH, len(relative_url)))
+
+ return (default_url, relative_url, query)
+
+ @staticmethod
+ def __determine_eta(eta=None, countdown=None, now=datetime.datetime.now):
+ """Determines the ETA for a task.
+
+ If 'eta' and 'countdown' are both None, the current time will be used.
+ Otherwise, only one of them may be specified.
+
+ Args:
+ eta: A datetime.datetime specifying the absolute ETA or None
+ countdown: Count in seconds into the future from the present time that
+ the ETA should be assigned to.
+
+ Returns:
+ A datetime in the UTC timezone containing the ETA.
+
+ Raises:
+ InvalidTaskError if the parameters are invalid.
+ """
+ if eta is not None and countdown is not None:
+ raise InvalidTaskError('May not use a countdown and ETA together')
+ elif eta is not None:
+ if not isinstance(eta, datetime.datetime):
+ raise InvalidTaskError('ETA must be a datetime.datetime instance')
+ elif countdown is not None:
+ try:
+ countdown = float(countdown)
+ except ValueError:
+ raise InvalidTaskError('Countdown must be a number')
+ else:
+ eta = now() + datetime.timedelta(seconds=countdown)
+ else:
+ eta = now()
+
+ if eta.tzinfo is None:
+ eta = eta.replace(tzinfo=_UTC)
+ return eta.astimezone(_UTC)
+
+ @staticmethod
+ def __encode_params(params):
+ """URL-encodes a list of parameters.
+
+ Args:
+ params: Dictionary of parameters, possibly with iterable values.
+
+ Returns:
+ URL-encoded version of the params, ready to be added to a query string or
+ POST body.
+ """
+ return urllib.urlencode(_flatten_params(params))
+
+ @staticmethod
+ def __convert_payload(payload, headers):
+ """Converts a Task payload into UTF-8 and sets headers if necessary.
+
+ Args:
+ payload: The payload data to convert.
+ headers: Dictionary of headers.
+
+ Returns:
+ The payload as a non-unicode string.
+
+ Raises:
+ InvalidTaskError if the payload is not a string or unicode instance.
+ """
+ if isinstance(payload, unicode):
+ headers.setdefault('content-type', 'text/plain; charset=utf-8')
+ payload = payload.encode('utf-8')
+ elif not isinstance(payload, str):
+ raise InvalidTaskError(
+ 'Task payloads must be strings; invalid payload: %r' % payload)
+ return payload
+
+ @property
+ def on_queue_url(self):
+ """Returns True if this Task will run on the queue's URL."""
+ return self.__default_url
+
+ @property
+ def eta(self):
+ """Returns an datetime corresponding to when this Task will execute."""
+ return self.__eta
+
+ @property
+ def headers(self):
+ """Returns a copy of the headers for this Task."""
+ return self.__headers.copy()
+
+ @property
+ def method(self):
+ """Returns the method to use for this Task."""
+ return self.__method
+
+ @property
+ def name(self):
+ """Returns the name of this Task.
+
+ Will be None if using auto-assigned Task names and this Task has not yet
+ been added to a Queue.
+ """
+ return self.__name
+
+ @property
+ def payload(self):
+ """Returns the payload for this task, which may be None."""
+ return self.__payload
+
+ @property
+ def size(self):
+ """Returns the size of this task in bytes."""
+ HEADER_SEPERATOR = len(': \r\n')
+ header_size = sum((len(key) + len(value) + HEADER_SEPERATOR)
+ for key, value in self.__headers_list)
+ return (len(self.__method) + len(self.__payload or '') +
+ len(self.__relative_url) + header_size)
+
+ @property
+ def url(self):
+ """Returns the relative URL for this Task."""
+ return self.__relative_url
+
+ @property
+ def was_enqueued(self):
+ """Returns True if this Task has been enqueued.
+
+ Note: This will not check if this task already exists in the queue.
+ """
+ return self.__enqueued
+
+ def add(self, queue_name=_DEFAULT_QUEUE, transactional=True):
+ """Adds this Task to a queue. See Queue.add."""
+ return Queue(queue_name).add(self, transactional=transactional)
+
+
+class Queue(object):
+ """Represents a Queue."""
+
+ def __init__(self, name=_DEFAULT_QUEUE):
+ """Initializer.
+
+ Args:
+ name: Name of this queue. If not supplied, defaults to the default queue.
+
+ Raises:
+ InvalidQueueNameError if the queue name is invalid.
+ """
+ if not _QUEUE_NAME_RE.match(name):
+ raise InvalidQueueNameError(
+ 'Queue name does not match pattern "%s"; found %s' %
+ (_QUEUE_NAME_PATTERN, name))
+ self.__name = name
+ self.__url = '%s/%s' % (_DEFAULT_QUEUE_PATH, self.__name)
+
+ def add(self, task, transactional=True):
+ """Adds a Task to this Queue.
+
+ Args:
+ task: The Task to add.
+ transactional: If false adds the task to a queue irrespectively to the
+ enclosing transaction success or failure. (optional)
+
+ Returns:
+ The Task that was supplied to this method.
+
+ Raises:
+ BadTaskStateError if the Task has already been added to a queue.
+ Error-subclass on application errors.
+ """
+ if task.was_enqueued:
+ raise BadTaskStateError('Task has already been enqueued')
+
+ request = taskqueue_service_pb.TaskQueueAddRequest()
+ response = taskqueue_service_pb.TaskQueueAddResponse()
+
+ adjusted_url = task.url
+ if task.on_queue_url:
+ adjusted_url = self.__url + task.url
+
+
+ request.set_queue_name(self.__name)
+ request.set_eta_usec(int(time.mktime(task.eta.utctimetuple())) * 10**6)
+ request.set_method(_METHOD_MAP.get(task.method))
+ request.set_url(adjusted_url)
+
+ if task.name:
+ request.set_task_name(task.name)
+ else:
+ request.set_task_name('')
+
+ if task.payload:
+ request.set_body(task.payload)
+ for key, value in _flatten_params(task.headers):
+ header = request.add_header()
+ header.set_key(key)
+ header.set_value(value)
+
+ if transactional:
+ from google.appengine.api import datastore
+ datastore._MaybeSetupTransaction(request, [])
+
+ call_tuple = ('taskqueue', 'Add', request, response)
+ apiproxy_stub_map.apiproxy.GetPreCallHooks().Call(*call_tuple)
+ try:
+ apiproxy_stub_map.MakeSyncCall(*call_tuple)
+ except apiproxy_errors.ApplicationError, e:
+ self.__TranslateError(e)
+ else:
+ apiproxy_stub_map.apiproxy.GetPostCallHooks().Call(*call_tuple)
+
+ if response.has_chosen_task_name():
+ task._Task__name = response.chosen_task_name()
+ task._Task__enqueued = True
+ return task
+
+ @property
+ def name(self):
+ """Returns the name of this queue."""
+ return self.__name
+
+ @staticmethod
+ def __TranslateError(error):
+ """Translates a TaskQueueServiceError into an exception.
+
+ Args:
+ error: Value from TaskQueueServiceError enum.
+
+ Raises:
+ The corresponding Exception sub-class for that error code.
+ """
+ if (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE):
+ raise UnknownQueueError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TRANSIENT_ERROR):
+ raise TransientError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INTERNAL_ERROR):
+ raise InternalError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TASK_TOO_LARGE):
+ raise TaskTooLargeError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_TASK_NAME):
+ raise InvalidTaskNameError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_NAME):
+ raise InvalidQueueNameError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_URL):
+ raise InvalidUrlError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_QUEUE_RATE):
+ raise InvalidQueueError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.PERMISSION_DENIED):
+ raise PermissionDeniedError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS):
+ raise TaskAlreadyExistsError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.TOMBSTONED_TASK):
+ raise TombstonedTaskError(error.error_detail)
+ elif (error.application_error ==
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA):
+ raise InvalidTaskError(error.error_detail)
+ else:
+ raise Error('Application error %s: %s' %
+ (error.application_error, error.error_detail))
+
+
+def add(*args, **kwargs):
+ """Convenience method will create a Task and add it to the default queue.
+
+ Args:
+ *args, **kwargs: Passed to the Task constructor.
+
+ Returns:
+ The Task that was added to the queue.
+ """
+ return Task(*args, **kwargs).add()
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pyc b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pyc
new file mode 100644
index 0000000..ebb0b63
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
new file mode 100644
index 0000000..1038974
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.py
@@ -0,0 +1,1645 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.datastore.datastore_v3_pb import *
+class TaskQueueServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ UNKNOWN_QUEUE = 1
+ TRANSIENT_ERROR = 2
+ INTERNAL_ERROR = 3
+ TASK_TOO_LARGE = 4
+ INVALID_TASK_NAME = 5
+ INVALID_QUEUE_NAME = 6
+ INVALID_URL = 7
+ INVALID_QUEUE_RATE = 8
+ PERMISSION_DENIED = 9
+ TASK_ALREADY_EXISTS = 10
+ TOMBSTONED_TASK = 11
+ INVALID_ETA = 12
+ INVALID_REQUEST = 13
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "UNKNOWN_QUEUE",
+ 2: "TRANSIENT_ERROR",
+ 3: "INTERNAL_ERROR",
+ 4: "TASK_TOO_LARGE",
+ 5: "INVALID_TASK_NAME",
+ 6: "INVALID_QUEUE_NAME",
+ 7: "INVALID_URL",
+ 8: "INVALID_QUEUE_RATE",
+ 9: "PERMISSION_DENIED",
+ 10: "TASK_ALREADY_EXISTS",
+ 11: "TOMBSTONED_TASK",
+ 12: "INVALID_ETA",
+ 13: "INVALID_REQUEST",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueAddRequest_Header(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(58)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(66)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 52: break
+ if tt == 58:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 66:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+class TaskQueueAddRequest(ProtocolBuffer.ProtocolMessage):
+
+ GET = 1
+ POST = 2
+ HEAD = 3
+ PUT = 4
+ DELETE = 5
+
+ _RequestMethod_NAMES = {
+ 1: "GET",
+ 2: "POST",
+ 3: "HEAD",
+ 4: "PUT",
+ 5: "DELETE",
+ }
+
+ def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
+ RequestMethod_Name = classmethod(RequestMethod_Name)
+
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_task_name_ = 0
+ task_name_ = ""
+ has_eta_usec_ = 0
+ eta_usec_ = 0
+ has_method_ = 0
+ method_ = 2
+ has_url_ = 0
+ url_ = ""
+ has_body_ = 0
+ body_ = ""
+ has_transaction_ = 0
+ transaction_ = None
+
+ def __init__(self, contents=None):
+ self.header_ = []
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def task_name(self): return self.task_name_
+
+ def set_task_name(self, x):
+ self.has_task_name_ = 1
+ self.task_name_ = x
+
+ def clear_task_name(self):
+ if self.has_task_name_:
+ self.has_task_name_ = 0
+ self.task_name_ = ""
+
+ def has_task_name(self): return self.has_task_name_
+
+ def eta_usec(self): return self.eta_usec_
+
+ def set_eta_usec(self, x):
+ self.has_eta_usec_ = 1
+ self.eta_usec_ = x
+
+ def clear_eta_usec(self):
+ if self.has_eta_usec_:
+ self.has_eta_usec_ = 0
+ self.eta_usec_ = 0
+
+ def has_eta_usec(self): return self.has_eta_usec_
+
+ def method(self): return self.method_
+
+ def set_method(self, x):
+ self.has_method_ = 1
+ self.method_ = x
+
+ def clear_method(self):
+ if self.has_method_:
+ self.has_method_ = 0
+ self.method_ = 2
+
+ def has_method(self): return self.has_method_
+
+ def url(self): return self.url_
+
+ def set_url(self, x):
+ self.has_url_ = 1
+ self.url_ = x
+
+ def clear_url(self):
+ if self.has_url_:
+ self.has_url_ = 0
+ self.url_ = ""
+
+ def has_url(self): return self.has_url_
+
+ def header_size(self): return len(self.header_)
+ def header_list(self): return self.header_
+
+ def header(self, i):
+ return self.header_[i]
+
+ def mutable_header(self, i):
+ return self.header_[i]
+
+ def add_header(self):
+ x = TaskQueueAddRequest_Header()
+ self.header_.append(x)
+ return x
+
+ def clear_header(self):
+ self.header_ = []
+ def body(self): return self.body_
+
+ def set_body(self, x):
+ self.has_body_ = 1
+ self.body_ = x
+
+ def clear_body(self):
+ if self.has_body_:
+ self.has_body_ = 0
+ self.body_ = ""
+
+ def has_body(self): return self.has_body_
+
+ def transaction(self):
+ if self.transaction_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.transaction_ is None: self.transaction_ = Transaction()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.transaction_
+
+ def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
+
+ def clear_transaction(self):
+ if self.has_transaction_:
+ self.has_transaction_ = 0;
+ if self.transaction_ is not None: self.transaction_.Clear()
+
+ def has_transaction(self): return self.has_transaction_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ if (x.has_task_name()): self.set_task_name(x.task_name())
+ if (x.has_eta_usec()): self.set_eta_usec(x.eta_usec())
+ if (x.has_method()): self.set_method(x.method())
+ if (x.has_url()): self.set_url(x.url())
+ for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
+ if (x.has_body()): self.set_body(x.body())
+ if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if self.has_task_name_ != x.has_task_name_: return 0
+ if self.has_task_name_ and self.task_name_ != x.task_name_: return 0
+ if self.has_eta_usec_ != x.has_eta_usec_: return 0
+ if self.has_eta_usec_ and self.eta_usec_ != x.eta_usec_: return 0
+ if self.has_method_ != x.has_method_: return 0
+ if self.has_method_ and self.method_ != x.method_: return 0
+ if self.has_url_ != x.has_url_: return 0
+ if self.has_url_ and self.url_ != x.url_: return 0
+ if len(self.header_) != len(x.header_): return 0
+ for e1, e2 in zip(self.header_, x.header_):
+ if e1 != e2: return 0
+ if self.has_body_ != x.has_body_: return 0
+ if self.has_body_ and self.body_ != x.body_: return 0
+ if self.has_transaction_ != x.has_transaction_: return 0
+ if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ if (not self.has_task_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: task_name not set.')
+ if (not self.has_eta_usec_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: eta_usec not set.')
+ if (not self.has_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: url not set.')
+ for p in self.header_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.queue_name_))
+ n += self.lengthString(len(self.task_name_))
+ n += self.lengthVarInt64(self.eta_usec_)
+ if (self.has_method_): n += 1 + self.lengthVarInt64(self.method_)
+ n += self.lengthString(len(self.url_))
+ n += 2 * len(self.header_)
+ for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
+ if (self.has_body_): n += 1 + self.lengthString(len(self.body_))
+ if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
+ return n + 4
+
+ def Clear(self):
+ self.clear_queue_name()
+ self.clear_task_name()
+ self.clear_eta_usec()
+ self.clear_method()
+ self.clear_url()
+ self.clear_header()
+ self.clear_body()
+ self.clear_transaction()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.queue_name_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.task_name_)
+ out.putVarInt32(24)
+ out.putVarInt64(self.eta_usec_)
+ out.putVarInt32(34)
+ out.putPrefixedString(self.url_)
+ if (self.has_method_):
+ out.putVarInt32(40)
+ out.putVarInt32(self.method_)
+ for i in xrange(len(self.header_)):
+ out.putVarInt32(51)
+ self.header_[i].OutputUnchecked(out)
+ out.putVarInt32(52)
+ if (self.has_body_):
+ out.putVarInt32(74)
+ out.putPrefixedString(self.body_)
+ if (self.has_transaction_):
+ out.putVarInt32(82)
+ out.putVarInt32(self.transaction_.ByteSize())
+ self.transaction_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_task_name(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_eta_usec(d.getVarInt64())
+ continue
+ if tt == 34:
+ self.set_url(d.getPrefixedString())
+ continue
+ if tt == 40:
+ self.set_method(d.getVarInt32())
+ continue
+ if tt == 51:
+ self.add_header().TryMerge(d)
+ continue
+ if tt == 74:
+ self.set_body(d.getPrefixedString())
+ continue
+ if tt == 82:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_transaction().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ if self.has_task_name_: res+=prefix+("task_name: %s\n" % self.DebugFormatString(self.task_name_))
+ if self.has_eta_usec_: res+=prefix+("eta_usec: %s\n" % self.DebugFormatInt64(self.eta_usec_))
+ if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatInt32(self.method_))
+ if self.has_url_: res+=prefix+("url: %s\n" % self.DebugFormatString(self.url_))
+ cnt=0
+ for e in self.header_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Header%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+ if self.has_transaction_:
+ res+=prefix+"transaction <\n"
+ res+=self.transaction_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kqueue_name = 1
+ ktask_name = 2
+ keta_usec = 3
+ kmethod = 5
+ kurl = 4
+ kHeaderGroup = 6
+ kHeaderkey = 7
+ kHeadervalue = 8
+ kbody = 9
+ ktransaction = 10
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "queue_name",
+ 2: "task_name",
+ 3: "eta_usec",
+ 4: "url",
+ 5: "method",
+ 6: "Header",
+ 7: "key",
+ 8: "value",
+ 9: "body",
+ 10: "transaction",
+ }, 10)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.STARTGROUP,
+ 7: ProtocolBuffer.Encoder.STRING,
+ 8: ProtocolBuffer.Encoder.STRING,
+ 9: ProtocolBuffer.Encoder.STRING,
+ 10: ProtocolBuffer.Encoder.STRING,
+ }, 10, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueAddResponse(ProtocolBuffer.ProtocolMessage):
+ has_chosen_task_name_ = 0
+ chosen_task_name_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def chosen_task_name(self): return self.chosen_task_name_
+
+ def set_chosen_task_name(self, x):
+ self.has_chosen_task_name_ = 1
+ self.chosen_task_name_ = x
+
+ def clear_chosen_task_name(self):
+ if self.has_chosen_task_name_:
+ self.has_chosen_task_name_ = 0
+ self.chosen_task_name_ = ""
+
+ def has_chosen_task_name(self): return self.has_chosen_task_name_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_chosen_task_name()): self.set_chosen_task_name(x.chosen_task_name())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_chosen_task_name_ != x.has_chosen_task_name_: return 0
+ if self.has_chosen_task_name_ and self.chosen_task_name_ != x.chosen_task_name_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_chosen_task_name_): n += 1 + self.lengthString(len(self.chosen_task_name_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_chosen_task_name()
+
+ def OutputUnchecked(self, out):
+ if (self.has_chosen_task_name_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.chosen_task_name_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_chosen_task_name(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_chosen_task_name_: res+=prefix+("chosen_task_name: %s\n" % self.DebugFormatString(self.chosen_task_name_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kchosen_task_name = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "chosen_task_name",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueUpdateQueueRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_bucket_refill_per_second_ = 0
+ bucket_refill_per_second_ = 0.0
+ has_bucket_capacity_ = 0
+ bucket_capacity_ = 0
+ has_user_specified_rate_ = 0
+ user_specified_rate_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def bucket_refill_per_second(self): return self.bucket_refill_per_second_
+
+ def set_bucket_refill_per_second(self, x):
+ self.has_bucket_refill_per_second_ = 1
+ self.bucket_refill_per_second_ = x
+
+ def clear_bucket_refill_per_second(self):
+ if self.has_bucket_refill_per_second_:
+ self.has_bucket_refill_per_second_ = 0
+ self.bucket_refill_per_second_ = 0.0
+
+ def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
+
+ def bucket_capacity(self): return self.bucket_capacity_
+
+ def set_bucket_capacity(self, x):
+ self.has_bucket_capacity_ = 1
+ self.bucket_capacity_ = x
+
+ def clear_bucket_capacity(self):
+ if self.has_bucket_capacity_:
+ self.has_bucket_capacity_ = 0
+ self.bucket_capacity_ = 0
+
+ def has_bucket_capacity(self): return self.has_bucket_capacity_
+
+ def user_specified_rate(self): return self.user_specified_rate_
+
+ def set_user_specified_rate(self, x):
+ self.has_user_specified_rate_ = 1
+ self.user_specified_rate_ = x
+
+ def clear_user_specified_rate(self):
+ if self.has_user_specified_rate_:
+ self.has_user_specified_rate_ = 0
+ self.user_specified_rate_ = ""
+
+ def has_user_specified_rate(self): return self.has_user_specified_rate_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
+ if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
+ if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
+ if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
+ if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
+ if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
+ if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
+ if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ if (not self.has_bucket_refill_per_second_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_refill_per_second not set.')
+ if (not self.has_bucket_capacity_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_capacity not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += self.lengthString(len(self.queue_name_))
+ n += self.lengthVarInt64(self.bucket_capacity_)
+ if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
+ return n + 12
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_queue_name()
+ self.clear_bucket_refill_per_second()
+ self.clear_bucket_capacity()
+ self.clear_user_specified_rate()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_)
+ out.putVarInt32(25)
+ out.putDouble(self.bucket_refill_per_second_)
+ out.putVarInt32(32)
+ out.putVarInt32(self.bucket_capacity_)
+ if (self.has_user_specified_rate_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.user_specified_rate_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 25:
+ self.set_bucket_refill_per_second(d.getDouble())
+ continue
+ if tt == 32:
+ self.set_bucket_capacity(d.getVarInt32())
+ continue
+ if tt == 42:
+ self.set_user_specified_rate(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
+ if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormatInt32(self.bucket_capacity_))
+ if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kqueue_name = 2
+ kbucket_refill_per_second = 3
+ kbucket_capacity = 4
+ kuser_specified_rate = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "queue_name",
+ 3: "bucket_refill_per_second",
+ 4: "bucket_capacity",
+ 5: "user_specified_rate",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.DOUBLE,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueUpdateQueueResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueuesRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_max_rows_ = 0
+ max_rows_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def max_rows(self): return self.max_rows_
+
+ def set_max_rows(self, x):
+ self.has_max_rows_ = 1
+ self.max_rows_ = x
+
+ def clear_max_rows(self):
+ if self.has_max_rows_:
+ self.has_max_rows_ = 0
+ self.max_rows_ = 0
+
+ def has_max_rows(self): return self.has_max_rows_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ if (x.has_max_rows()): self.set_max_rows(x.max_rows())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if self.has_max_rows_ != x.has_max_rows_: return 0
+ if self.has_max_rows_ and self.max_rows_ != x.max_rows_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_max_rows_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: max_rows not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += self.lengthVarInt64(self.max_rows_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_max_rows()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.max_rows_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_max_rows(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ if self.has_max_rows_: res+=prefix+("max_rows: %s\n" % self.DebugFormatInt32(self.max_rows_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kmax_rows = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "max_rows",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueuesResponse_Queue(ProtocolBuffer.ProtocolMessage):
+ has_queue_name_ = 0
+ queue_name_ = ""
+ has_bucket_refill_per_second_ = 0
+ bucket_refill_per_second_ = 0.0
+ has_bucket_capacity_ = 0
+ bucket_capacity_ = 0.0
+ has_user_specified_rate_ = 0
+ user_specified_rate_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def queue_name(self): return self.queue_name_
+
+ def set_queue_name(self, x):
+ self.has_queue_name_ = 1
+ self.queue_name_ = x
+
+ def clear_queue_name(self):
+ if self.has_queue_name_:
+ self.has_queue_name_ = 0
+ self.queue_name_ = ""
+
+ def has_queue_name(self): return self.has_queue_name_
+
+ def bucket_refill_per_second(self): return self.bucket_refill_per_second_
+
+ def set_bucket_refill_per_second(self, x):
+ self.has_bucket_refill_per_second_ = 1
+ self.bucket_refill_per_second_ = x
+
+ def clear_bucket_refill_per_second(self):
+ if self.has_bucket_refill_per_second_:
+ self.has_bucket_refill_per_second_ = 0
+ self.bucket_refill_per_second_ = 0.0
+
+ def has_bucket_refill_per_second(self): return self.has_bucket_refill_per_second_
+
+ def bucket_capacity(self): return self.bucket_capacity_
+
+ def set_bucket_capacity(self, x):
+ self.has_bucket_capacity_ = 1
+ self.bucket_capacity_ = x
+
+ def clear_bucket_capacity(self):
+ if self.has_bucket_capacity_:
+ self.has_bucket_capacity_ = 0
+ self.bucket_capacity_ = 0.0
+
+ def has_bucket_capacity(self): return self.has_bucket_capacity_
+
+ def user_specified_rate(self): return self.user_specified_rate_
+
+ def set_user_specified_rate(self, x):
+ self.has_user_specified_rate_ = 1
+ self.user_specified_rate_ = x
+
+ def clear_user_specified_rate(self):
+ if self.has_user_specified_rate_:
+ self.has_user_specified_rate_ = 0
+ self.user_specified_rate_ = ""
+
+ def has_user_specified_rate(self): return self.has_user_specified_rate_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_queue_name()): self.set_queue_name(x.queue_name())
+ if (x.has_bucket_refill_per_second()): self.set_bucket_refill_per_second(x.bucket_refill_per_second())
+ if (x.has_bucket_capacity()): self.set_bucket_capacity(x.bucket_capacity())
+ if (x.has_user_specified_rate()): self.set_user_specified_rate(x.user_specified_rate())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_queue_name_ != x.has_queue_name_: return 0
+ if self.has_queue_name_ and self.queue_name_ != x.queue_name_: return 0
+ if self.has_bucket_refill_per_second_ != x.has_bucket_refill_per_second_: return 0
+ if self.has_bucket_refill_per_second_ and self.bucket_refill_per_second_ != x.bucket_refill_per_second_: return 0
+ if self.has_bucket_capacity_ != x.has_bucket_capacity_: return 0
+ if self.has_bucket_capacity_ and self.bucket_capacity_ != x.bucket_capacity_: return 0
+ if self.has_user_specified_rate_ != x.has_user_specified_rate_: return 0
+ if self.has_user_specified_rate_ and self.user_specified_rate_ != x.user_specified_rate_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_queue_name_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: queue_name not set.')
+ if (not self.has_bucket_refill_per_second_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_refill_per_second not set.')
+ if (not self.has_bucket_capacity_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bucket_capacity not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.queue_name_))
+ if (self.has_user_specified_rate_): n += 1 + self.lengthString(len(self.user_specified_rate_))
+ return n + 19
+
+ def Clear(self):
+ self.clear_queue_name()
+ self.clear_bucket_refill_per_second()
+ self.clear_bucket_capacity()
+ self.clear_user_specified_rate()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_)
+ out.putVarInt32(25)
+ out.putDouble(self.bucket_refill_per_second_)
+ out.putVarInt32(33)
+ out.putDouble(self.bucket_capacity_)
+ if (self.has_user_specified_rate_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.user_specified_rate_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_queue_name(d.getPrefixedString())
+ continue
+ if tt == 25:
+ self.set_bucket_refill_per_second(d.getDouble())
+ continue
+ if tt == 33:
+ self.set_bucket_capacity(d.getDouble())
+ continue
+ if tt == 42:
+ self.set_user_specified_rate(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_queue_name_: res+=prefix+("queue_name: %s\n" % self.DebugFormatString(self.queue_name_))
+ if self.has_bucket_refill_per_second_: res+=prefix+("bucket_refill_per_second: %s\n" % self.DebugFormat(self.bucket_refill_per_second_))
+ if self.has_bucket_capacity_: res+=prefix+("bucket_capacity: %s\n" % self.DebugFormat(self.bucket_capacity_))
+ if self.has_user_specified_rate_: res+=prefix+("user_specified_rate: %s\n" % self.DebugFormatString(self.user_specified_rate_))
+ return res
+
+class TaskQueueFetchQueuesResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.queue_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def queue_size(self): return len(self.queue_)
+ def queue_list(self): return self.queue_
+
+ def queue(self, i):
+ return self.queue_[i]
+
+ def mutable_queue(self, i):
+ return self.queue_[i]
+
+ def add_queue(self):
+ x = TaskQueueFetchQueuesResponse_Queue()
+ self.queue_.append(x)
+ return x
+
+ def clear_queue(self):
+ self.queue_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.queue_size()): self.add_queue().CopyFrom(x.queue(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.queue_) != len(x.queue_): return 0
+ for e1, e2 in zip(self.queue_, x.queue_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.queue_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.queue_)
+ for i in xrange(len(self.queue_)): n += self.queue_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_queue()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.queue_)):
+ out.putVarInt32(11)
+ self.queue_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_queue().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.queue_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Queue%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kQueueGroup = 1
+ kQueuequeue_name = 2
+ kQueuebucket_refill_per_second = 3
+ kQueuebucket_capacity = 4
+ kQueueuser_specified_rate = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Queue",
+ 2: "queue_name",
+ 3: "bucket_refill_per_second",
+ 4: "bucket_capacity",
+ 5: "user_specified_rate",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.DOUBLE,
+ 4: ProtocolBuffer.Encoder.DOUBLE,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueueStatsRequest(ProtocolBuffer.ProtocolMessage):
+ has_app_id_ = 0
+ app_id_ = ""
+ has_max_num_tasks_ = 0
+ max_num_tasks_ = 0
+
+ def __init__(self, contents=None):
+ self.queue_name_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def app_id(self): return self.app_id_
+
+ def set_app_id(self, x):
+ self.has_app_id_ = 1
+ self.app_id_ = x
+
+ def clear_app_id(self):
+ if self.has_app_id_:
+ self.has_app_id_ = 0
+ self.app_id_ = ""
+
+ def has_app_id(self): return self.has_app_id_
+
+ def queue_name_size(self): return len(self.queue_name_)
+ def queue_name_list(self): return self.queue_name_
+
+ def queue_name(self, i):
+ return self.queue_name_[i]
+
+ def set_queue_name(self, i, x):
+ self.queue_name_[i] = x
+
+ def add_queue_name(self, x):
+ self.queue_name_.append(x)
+
+ def clear_queue_name(self):
+ self.queue_name_ = []
+
+ def max_num_tasks(self): return self.max_num_tasks_
+
+ def set_max_num_tasks(self, x):
+ self.has_max_num_tasks_ = 1
+ self.max_num_tasks_ = x
+
+ def clear_max_num_tasks(self):
+ if self.has_max_num_tasks_:
+ self.has_max_num_tasks_ = 0
+ self.max_num_tasks_ = 0
+
+ def has_max_num_tasks(self): return self.has_max_num_tasks_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_app_id()): self.set_app_id(x.app_id())
+ for i in xrange(x.queue_name_size()): self.add_queue_name(x.queue_name(i))
+ if (x.has_max_num_tasks()): self.set_max_num_tasks(x.max_num_tasks())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_app_id_ != x.has_app_id_: return 0
+ if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
+ if len(self.queue_name_) != len(x.queue_name_): return 0
+ for e1, e2 in zip(self.queue_name_, x.queue_name_):
+ if e1 != e2: return 0
+ if self.has_max_num_tasks_ != x.has_max_num_tasks_: return 0
+ if self.has_max_num_tasks_ and self.max_num_tasks_ != x.max_num_tasks_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_app_id_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: app_id not set.')
+ if (not self.has_max_num_tasks_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: max_num_tasks not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.app_id_))
+ n += 1 * len(self.queue_name_)
+ for i in xrange(len(self.queue_name_)): n += self.lengthString(len(self.queue_name_[i]))
+ n += self.lengthVarInt64(self.max_num_tasks_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_app_id()
+ self.clear_queue_name()
+ self.clear_max_num_tasks()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.app_id_)
+ for i in xrange(len(self.queue_name_)):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.queue_name_[i])
+ out.putVarInt32(24)
+ out.putVarInt32(self.max_num_tasks_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_app_id(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.add_queue_name(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_max_num_tasks(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
+ cnt=0
+ for e in self.queue_name_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("queue_name%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_max_num_tasks_: res+=prefix+("max_num_tasks: %s\n" % self.DebugFormatInt32(self.max_num_tasks_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kapp_id = 1
+ kqueue_name = 2
+ kmax_num_tasks = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "app_id",
+ 2: "queue_name",
+ 3: "max_num_tasks",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class TaskQueueFetchQueueStatsResponse_QueueStats(ProtocolBuffer.ProtocolMessage):
+ has_num_tasks_ = 0
+ num_tasks_ = 0
+ has_oldest_eta_usec_ = 0
+ oldest_eta_usec_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def num_tasks(self): return self.num_tasks_
+
+ def set_num_tasks(self, x):
+ self.has_num_tasks_ = 1
+ self.num_tasks_ = x
+
+ def clear_num_tasks(self):
+ if self.has_num_tasks_:
+ self.has_num_tasks_ = 0
+ self.num_tasks_ = 0
+
+ def has_num_tasks(self): return self.has_num_tasks_
+
+ def oldest_eta_usec(self): return self.oldest_eta_usec_
+
+ def set_oldest_eta_usec(self, x):
+ self.has_oldest_eta_usec_ = 1
+ self.oldest_eta_usec_ = x
+
+ def clear_oldest_eta_usec(self):
+ if self.has_oldest_eta_usec_:
+ self.has_oldest_eta_usec_ = 0
+ self.oldest_eta_usec_ = 0
+
+ def has_oldest_eta_usec(self): return self.has_oldest_eta_usec_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_num_tasks()): self.set_num_tasks(x.num_tasks())
+ if (x.has_oldest_eta_usec()): self.set_oldest_eta_usec(x.oldest_eta_usec())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_num_tasks_ != x.has_num_tasks_: return 0
+ if self.has_num_tasks_ and self.num_tasks_ != x.num_tasks_: return 0
+ if self.has_oldest_eta_usec_ != x.has_oldest_eta_usec_: return 0
+ if self.has_oldest_eta_usec_ and self.oldest_eta_usec_ != x.oldest_eta_usec_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_num_tasks_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: num_tasks not set.')
+ if (not self.has_oldest_eta_usec_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: oldest_eta_usec not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.num_tasks_)
+ n += self.lengthVarInt64(self.oldest_eta_usec_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_num_tasks()
+ self.clear_oldest_eta_usec()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(16)
+ out.putVarInt32(self.num_tasks_)
+ out.putVarInt32(24)
+ out.putVarInt64(self.oldest_eta_usec_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 16:
+ self.set_num_tasks(d.getVarInt32())
+ continue
+ if tt == 24:
+ self.set_oldest_eta_usec(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_num_tasks_: res+=prefix+("num_tasks: %s\n" % self.DebugFormatInt32(self.num_tasks_))
+ if self.has_oldest_eta_usec_: res+=prefix+("oldest_eta_usec: %s\n" % self.DebugFormatInt64(self.oldest_eta_usec_))
+ return res
+
+class TaskQueueFetchQueueStatsResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.queuestats_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def queuestats_size(self): return len(self.queuestats_)
+ def queuestats_list(self): return self.queuestats_
+
+ def queuestats(self, i):
+ return self.queuestats_[i]
+
+ def mutable_queuestats(self, i):
+ return self.queuestats_[i]
+
+ def add_queuestats(self):
+ x = TaskQueueFetchQueueStatsResponse_QueueStats()
+ self.queuestats_.append(x)
+ return x
+
+ def clear_queuestats(self):
+ self.queuestats_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.queuestats_size()): self.add_queuestats().CopyFrom(x.queuestats(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.queuestats_) != len(x.queuestats_): return 0
+ for e1, e2 in zip(self.queuestats_, x.queuestats_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.queuestats_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.queuestats_)
+ for i in xrange(len(self.queuestats_)): n += self.queuestats_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_queuestats()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.queuestats_)):
+ out.putVarInt32(11)
+ self.queuestats_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_queuestats().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.queuestats_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("QueueStats%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kQueueStatsGroup = 1
+ kQueueStatsnum_tasks = 2
+ kQueueStatsoldest_eta_usec = 3
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "QueueStats",
+ 2: "num_tasks",
+ 3: "oldest_eta_usec",
+ }, 3)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ }, 3, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['TaskQueueServiceError','TaskQueueAddRequest','TaskQueueAddRequest_Header','TaskQueueAddResponse','TaskQueueUpdateQueueRequest','TaskQueueUpdateQueueResponse','TaskQueueFetchQueuesRequest','TaskQueueFetchQueuesResponse','TaskQueueFetchQueuesResponse_Queue','TaskQueueFetchQueueStatsRequest','TaskQueueFetchQueueStatsResponse','TaskQueueFetchQueueStatsResponse_QueueStats']
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pyc b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pyc
new file mode 100644
index 0000000..e0c961a
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
new file mode 100755
index 0000000..dfe5e16
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.py
@@ -0,0 +1,327 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the Task Queue API.
+
+This stub only stores tasks; it doesn't actually run them. It also validates
+the tasks by checking their queue name against the queue.yaml.
+
+As well as implementing Task Queue API functions, the stub exposes various other
+functions that are used by the dev_appserver's admin console to display the
+application's queues and tasks.
+"""
+
+
+
+import base64
+import datetime
+import os
+
+import taskqueue_service_pb
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import queueinfo
+from google.appengine.api import urlfetch
+from google.appengine.runtime import apiproxy_errors
+
+
+DEFAULT_RATE = '5.00/s'
+
+DEFAULT_BUCKET_SIZE = 5
+
+MAX_ETA_DELTA_DAYS = 30
+
+
+def _ParseQueueYaml(unused_self, root_path):
+ """Loads the queue.yaml file and parses it.
+
+ Args:
+ unused_self: Allows this function to be bound to a class member. Not used.
+ root_path: Directory containing queue.yaml. Not used.
+
+ Returns:
+ None if queue.yaml doesn't exist, otherwise a queueinfo.QueueEntry object
+ populaeted from the queue.yaml.
+ """
+ if root_path is None:
+ return None
+ for queueyaml in ('queue.yaml', 'queue.yml'):
+ try:
+ fh = open(os.path.join(root_path, queueyaml), 'r')
+ except IOError:
+ continue
+ try:
+ queue_info = queueinfo.LoadSingleQueue(fh)
+ return queue_info
+ finally:
+ fh.close()
+ return None
+
+
+def _CompareTasksByEta(a, b):
+ """Python sort comparator for tasks by estimated time of arrival (ETA).
+
+ Args:
+ a: A taskqueue_service_pb.TaskQueueAddRequest.
+ b: A taskqueue_service_pb.TaskQueueAddRequest.
+
+ Returns:
+ Standard 1/0/-1 comparison result.
+ """
+ if a.eta_usec() > b.eta_usec():
+ return 1
+ if a.eta_usec() < b.eta_usec():
+ return -1
+ return 0
+
+
+def _FormatEta(eta_usec):
+ """Formats a task ETA as a date string in UTC."""
+ eta = datetime.datetime.fromtimestamp(eta_usec/1000000)
+ return eta.strftime('%Y/%m/%d %H:%M:%S')
+
+
+def _EtaDelta(eta_usec):
+ """Formats a task ETA as a relative time string."""
+ eta = datetime.datetime.fromtimestamp(eta_usec/1000000)
+ now = datetime.datetime.utcnow()
+ if eta > now:
+ return str(eta - now) + ' from now'
+ else:
+ return str(now - eta) + ' ago'
+
+
+class TaskQueueServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only task queue service stub.
+
+ This stub does not attempt to automatically execute tasks. Instead, it
+ stores them for display on a console. The user may manually execute the
+ tasks from the console.
+ """
+
+ queue_yaml_parser = _ParseQueueYaml
+
+ def __init__(self, service_name='taskqueue', root_path=None):
+ """Constructor.
+
+ Args:
+ service_name: Service name expected for all calls.
+ root_path: Root path to the directory of the application which may contain
+ a queue.yaml file. If None, then it's assumed no queue.yaml file is
+ available.
+ """
+ super(TaskQueueServiceStub, self).__init__(service_name)
+ self._taskqueues = {}
+ self._next_task_id = 1
+ self._root_path = root_path
+
+ def _Dynamic_Add(self, request, response):
+ """Local implementation of the Add RPC in TaskQueueService.
+
+ Must adhere to the '_Dynamic_' naming convention for stubbing to work.
+ See taskqueue_service.proto for a full description of the RPC.
+
+ Args:
+ request: A taskqueue_service_pb.TaskQueueAddRequest.
+ response: A taskqueue_service_pb.TaskQueueAddResponse.
+ """
+ if request.eta_usec() < 0:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+ eta = datetime.datetime.utcfromtimestamp(request.eta_usec() / 1e6)
+ max_eta = (datetime.datetime.utcnow() +
+ datetime.timedelta(days=MAX_ETA_DELTA_DAYS))
+ if eta > max_eta:
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.INVALID_ETA)
+
+ if not self._IsValidQueue(request.queue_name()):
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.UNKNOWN_QUEUE)
+
+ if not request.task_name():
+ request.set_task_name('task%d' % self._next_task_id)
+ response.set_chosen_task_name(request.task_name())
+ self._next_task_id += 1
+
+ tasks = self._taskqueues.setdefault(request.queue_name(), [])
+ for task in tasks:
+ if task.task_name() == request.task_name():
+ raise apiproxy_errors.ApplicationError(
+ taskqueue_service_pb.TaskQueueServiceError.TASK_ALREADY_EXISTS)
+ tasks.append(request)
+ tasks.sort(_CompareTasksByEta)
+
+ def _IsValidQueue(self, queue_name):
+ """Determines whether a queue is valid, i.e. tasks can be added to it.
+
+ Valid queues are the 'default' queue, plus any queues in the queue.yaml
+ file.
+
+ Args:
+ queue_name: the name of the queue to validate.
+
+ Returns:
+ True iff queue is valid.
+ """
+ if queue_name == 'default':
+ return True
+ queue_info = self.queue_yaml_parser(self._root_path)
+ if queue_info and queue_info.queue:
+ for entry in queue_info.queue:
+ if entry.name == queue_name:
+ return True
+ return False
+
+ def GetQueues(self):
+ """Gets all the applications's queues.
+
+ Returns:
+ A list of dictionaries, where each dictionary contains one queue's
+ attributes. E.g.:
+ [{'name': 'some-queue',
+ 'max_rate': '1/s',
+ 'bucket_size': 5,
+ 'oldest_task': '2009/02/02 05:37:42',
+ 'eta_delta': '0:00:06.342511 ago',
+ 'tasks_in_queue': 12}, ...]
+ """
+ queues = []
+ queue_info = self.queue_yaml_parser(self._root_path)
+ has_default = False
+ if queue_info and queue_info.queue:
+ for entry in queue_info.queue:
+ if entry.name == 'default':
+ has_default = True
+ queue = {}
+ queues.append(queue)
+ queue['name'] = entry.name
+ queue['max_rate'] = entry.rate
+ if entry.bucket_size:
+ queue['bucket_size'] = entry.bucket_size
+ else:
+ queue['bucket_size'] = DEFAULT_BUCKET_SIZE
+
+ tasks = self._taskqueues.setdefault(entry.name, [])
+ if tasks:
+ queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
+ queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
+ else:
+ queue['oldest_task'] = ''
+ queue['tasks_in_queue'] = len(tasks)
+
+ if not has_default:
+ queue = {}
+ queues.append(queue)
+ queue['name'] = 'default'
+ queue['max_rate'] = DEFAULT_RATE
+ queue['bucket_size'] = DEFAULT_BUCKET_SIZE
+
+ tasks = self._taskqueues.get('default', [])
+ if tasks:
+ queue['oldest_task'] = _FormatEta(tasks[0].eta_usec())
+ queue['eta_delta'] = _EtaDelta(tasks[0].eta_usec())
+ else:
+ queue['oldest_task'] = ''
+ queue['tasks_in_queue'] = len(tasks)
+ return queues
+
+ def GetTasks(self, queue_name):
+ """Gets a queue's tasks.
+
+ Args:
+ queue_name: Queue's name to return tasks for.
+
+ Returns:
+ A list of dictionaries, where each dictionary contains one task's
+ attributes. E.g.
+ [{'name': 'task-123',
+ 'url': '/update',
+ 'method': 'GET',
+ 'eta': '2009/02/02 05:37:42',
+ 'eta_delta': '0:00:06.342511 ago',
+ 'body': '',
+ 'headers': {'X-AppEngine-QueueName': 'update-queue',
+ 'X-AppEngine-TaskName': 'task-123',
+ 'X-AppEngine-TaskRetryCount': '0',
+ 'X-AppEngine-Development-Payload': '1',
+ 'Content-Length': 0,
+ 'Content-Type': 'application/octet-streamn'}, ...]
+
+ Raises:
+ ValueError: A task request contains an unknown HTTP method type.
+ """
+ tasks = self._taskqueues.get(queue_name, [])
+ result_tasks = []
+ for task_request in tasks:
+ task = {}
+ result_tasks.append(task)
+ task['name'] = task_request.task_name()
+ task['url'] = task_request.url()
+ method = task_request.method()
+ if method == taskqueue_service_pb.TaskQueueAddRequest.GET:
+ task['method'] = 'GET'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.POST:
+ task['method'] = 'POST'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.HEAD:
+ task['method'] = 'HEAD'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.PUT:
+ task['method'] = 'PUT'
+ elif method == taskqueue_service_pb.TaskQueueAddRequest.DELETE:
+ task['method'] = 'DELETE'
+ else:
+ raise ValueError('Unexpected method: %d' % method)
+
+ task['eta'] = _FormatEta(task_request.eta_usec())
+ task['eta_delta'] = _EtaDelta(task_request.eta_usec())
+ task['body'] = base64.b64encode(task_request.body())
+ headers = urlfetch._CaselessDict()
+ task['headers'] = headers
+ for req_header in task_request.header_list():
+ headers[req_header.key()] = req_header.value()
+
+ headers['X-AppEngine-QueueName'] = queue_name
+ headers['X-AppEngine-TaskName'] = task['name']
+ headers['X-AppEngine-TaskRetryCount'] = '0'
+ headers['X-AppEngine-Development-Payload'] = '1'
+ headers['Content-Length'] = len(task['body'])
+ headers['Content-Type'] = headers.get(
+ 'Content-Type', 'application/octet-stream')
+
+ return result_tasks
+
+ def DeleteTask(self, queue_name, task_name):
+ """Deletes a task from a queue.
+
+ Args:
+ queue_name: the name of the queue to delete the task from.
+ task_name: the name of the task to delete.
+ """
+ tasks = self._taskqueues.get(queue_name, [])
+ for task in tasks:
+ if task.task_name() == task_name:
+ tasks.remove(task)
+ return
+
+ def FlushQueue(self, queue_name):
+ """Removes all tasks from a queue.
+
+ Args:
+ queue_name: the name of the queue to remove tasks from.
+ """
+ self._taskqueues[queue_name] = []
diff --git a/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pyc b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pyc
new file mode 100644
index 0000000..d23fecb
--- /dev/null
+++ b/google_appengine/google/appengine/api/labs/taskqueue/taskqueue_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail.py b/google_appengine/google/appengine/api/mail.py
new file mode 100755
index 0000000..ca6df88
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail.py
@@ -0,0 +1,1127 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Sends email on behalf of application.
+
+Provides functions for application developers to provide email services
+for their applications. Also provides a few utility methods.
+"""
+
+
+
+
+
+
+import email
+from email import MIMEBase
+from email import MIMEMultipart
+from email import MIMEText
+import logging
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import mail_service_pb
+from google.appengine.api import users
+from google.appengine.api.mail_errors import *
+from google.appengine.runtime import apiproxy_errors
+
+
+
+ERROR_MAP = {
+ mail_service_pb.MailServiceError.BAD_REQUEST:
+ BadRequestError,
+
+ mail_service_pb.MailServiceError.UNAUTHORIZED_SENDER:
+ InvalidSenderError,
+
+ mail_service_pb.MailServiceError.INVALID_ATTACHMENT_TYPE:
+ InvalidAttachmentTypeError,
+}
+
+
+EXTENSION_MIME_MAP = {
+ 'asc': 'text/plain',
+ 'bmp': 'image/x-ms-bmp',
+ 'css': 'text/css',
+ 'csv': 'text/csv',
+ 'diff': 'text/plain',
+ 'gif': 'image/gif',
+ 'htm': 'text/html',
+ 'html': 'text/html',
+ 'ics': 'text/calendar',
+ 'jpe': 'image/jpeg',
+ 'jpeg': 'image/jpeg',
+ 'jpg': 'image/jpeg',
+ 'pdf': 'application/pdf',
+ 'png': 'image/png',
+ 'pot': 'text/plain',
+ 'rss': 'text/rss+xml',
+ 'text': 'text/plain',
+ 'tif': 'image/tiff',
+ 'tiff': 'image/tiff',
+ 'txt': 'text/plain',
+ 'vcf': 'text/directory',
+ 'wbmp': 'image/vnd.wap.wbmp',
+ }
+
+EXTENSION_WHITELIST = frozenset(EXTENSION_MIME_MAP.iterkeys())
+
+
+def invalid_email_reason(email_address, field):
+ """Determine reason why email is invalid.
+
+ Args:
+ email_address: Email to check.
+ field: Field that is invalid.
+
+ Returns:
+ String indicating invalid email reason if there is one,
+ else None.
+ """
+ if email_address is None:
+ return 'None email address for %s.' % field
+
+ if isinstance(email_address, users.User):
+ email_address = email_address.email()
+ if not isinstance(email_address, basestring):
+ return 'Invalid email address type for %s.' % field
+ stripped_address = email_address.strip()
+ if not stripped_address:
+ return 'Empty email address for %s.' % field
+ return None
+
+InvalidEmailReason = invalid_email_reason
+
+
+def is_email_valid(email_address):
+ """Determine if email is invalid.
+
+ Args:
+ email_address: Email to check.
+
+ Returns:
+ True if email is valid, else False.
+ """
+ return invalid_email_reason(email_address, '') is None
+
+IsEmailValid = is_email_valid
+
+
+def check_email_valid(email_address, field):
+ """Check that email is valid.
+
+ Args:
+ email_address: Email to check.
+ field: Field to check.
+
+ Raises:
+ InvalidEmailError if email_address is invalid.
+ """
+ reason = invalid_email_reason(email_address, field)
+ if reason is not None:
+ raise InvalidEmailError(reason)
+
+CheckEmailValid = check_email_valid
+
+
+def _email_check_and_list(emails, field):
+ """Generate a list of emails.
+
+ Args:
+ emails: Single email or list of emails.
+
+ Returns:
+ Sequence of email addresses.
+
+ Raises:
+ InvalidEmailError if any email addresses are invalid.
+ """
+ if isinstance(emails, types.StringTypes):
+ check_email_valid(value)
+ else:
+ for address in iter(emails):
+ check_email_valid(address, field)
+
+
+def _email_sequence(emails):
+ """Forces email to be sequenceable type.
+
+ Iterable values are returned as is. This function really just wraps the case
+ where there is a single email string.
+
+ Args:
+ emails: Emails (or email) to coerce to sequence.
+
+ Returns:
+ Single tuple with email in it if only one email string provided,
+ else returns emails as is.
+ """
+ if isinstance(emails, basestring):
+ return emails,
+ return emails
+
+
+def _attachment_sequence(attachments):
+ """Forces attachments to be sequenceable type.
+
+ Iterable values are returned as is. This function really just wraps the case
+ where there is a single attachment.
+
+ Args:
+ attachments: Attachments (or attachment) to coerce to sequence.
+
+ Returns:
+ Single tuple with attachment tuple in it if only one attachment provided,
+ else returns attachments as is.
+ """
+ if len(attachments) == 2 and isinstance(attachments[0], basestring):
+ return attachments,
+ return attachments
+
+
+def _parse_mime_message(mime_message):
+ """Helper function converts a mime_message in to email.Message.Message.
+
+ Args:
+ mime_message: MIME Message, string or file containing mime message.
+
+ Returns:
+ Instance of email.Message.Message. Will return mime_message if already
+ an instance.
+ """
+ if isinstance(mime_message, email.Message.Message):
+ return mime_message
+ elif isinstance(mime_message, basestring):
+ return email.message_from_string(mime_message)
+ else:
+ return email.message_from_file(mime_message)
+
+
+def send_mail(sender,
+ to,
+ subject,
+ body,
+ make_sync_call=apiproxy_stub_map.MakeSyncCall,
+ **kw):
+ """Sends mail on behalf of application.
+
+ Args:
+ sender: Sender email address as appears in the 'from' email line.
+ to: List of 'to' addresses or a single address.
+ subject: Message subject string.
+ body: Body of type text/plain.
+ make_sync_call: Function used to make sync call to API proxy.
+ kw: Keyword arguments compatible with EmailMessage keyword based
+ constructor.
+
+ Raises:
+ InvalidEmailError when invalid email address provided.
+ """
+ kw['sender'] = sender
+ kw['to'] = to
+ kw['subject'] = subject
+ kw['body'] = body
+ message = EmailMessage(**kw)
+ message.send(make_sync_call)
+
+SendMail = send_mail
+
+
+def send_mail_to_admins(sender,
+ subject,
+ body,
+ make_sync_call=apiproxy_stub_map.MakeSyncCall,
+ **kw):
+ """Sends mail to admins on behalf of application.
+
+ Args:
+ sender: Sender email address as appears in the 'from' email line.
+ subject: Message subject string.
+ body: Body of type text/plain.
+ make_sync_call: Function used to make sync call to API proxy.
+ kw: Keyword arguments compatible with EmailMessage keyword based
+ constructor.
+
+ Raises:
+ InvalidEmailError when invalid email address provided.
+ """
+ kw['sender'] = sender
+ kw['subject'] = subject
+ kw['body'] = body
+ message = AdminEmailMessage(**kw)
+ message.send(make_sync_call)
+
+SendMailToAdmins = send_mail_to_admins
+
+
+def _GetMimeType(file_name):
+ """Determine mime-type from file name.
+
+ Parses file name and determines mime-type based on extension map.
+
+ This method is not part of the public API and should not be used by
+ applications.
+
+ Args:
+ file_name: File to determine extension for.
+
+ Returns:
+ Mime-type associated with file extension.
+
+ Raises:
+ InvalidAttachmentTypeError when the file name of an attachment.
+ """
+ extension_index = file_name.rfind('.')
+ if extension_index == -1:
+ raise InvalidAttachmentTypeError(
+ "File '%s' does not have an extension" % file_name)
+ extension = file_name[extension_index + 1:]
+ mime_type = EXTENSION_MIME_MAP.get(extension, None)
+ if mime_type is None:
+ raise InvalidAttachmentTypeError(
+ "Extension '%s' is not supported." % extension)
+ return mime_type
+
+
+def mail_message_to_mime_message(protocol_message):
+ """Generate a MIMEMultitype message from protocol buffer.
+
+ Generates a complete MIME multi-part email object from a MailMessage
+ protocol buffer. The body fields are sent as individual alternatives
+ if they are both present, otherwise, only one body part is sent.
+
+ Multiple entry email fields such as 'To', 'Cc' and 'Bcc' are converted
+ to a list of comma separated email addresses.
+
+ Args:
+ protocol_message: Message PB to convert to MIMEMultitype.
+
+ Returns:
+ MIMEMultitype representing the provided MailMessage.
+
+ Raises:
+ InvalidAttachmentTypeError when the file name of an attachment
+ """
+ parts = []
+ if protocol_message.has_textbody():
+ parts.append(MIMEText.MIMEText(protocol_message.textbody()))
+ if protocol_message.has_htmlbody():
+ parts.append(MIMEText.MIMEText(protocol_message.htmlbody(),
+ _subtype='html'))
+
+ if len(parts) == 1:
+ payload = parts
+ else:
+ payload = [MIMEMultipart.MIMEMultipart('alternative', _subparts=parts)]
+
+ result = MIMEMultipart.MIMEMultipart(_subparts=payload)
+ for attachment in protocol_message.attachment_list():
+ file_name = attachment.filename()
+ mime_type = _GetMimeType(file_name)
+ maintype, subtype = mime_type.split('/')
+ mime_attachment = MIMEBase.MIMEBase(maintype, subtype)
+ mime_attachment.add_header('Content-Disposition',
+ 'attachment',
+ filename=attachment.filename())
+ mime_attachment.set_payload(attachment.data())
+ result.attach(mime_attachment)
+
+ if protocol_message.to_size():
+ result['To'] = ', '.join(protocol_message.to_list())
+ if protocol_message.cc_size():
+ result['Cc'] = ', '.join(protocol_message.cc_list())
+ if protocol_message.bcc_size():
+ result['Bcc'] = ', '.join(protocol_message.bcc_list())
+
+ result['From'] = protocol_message.sender()
+ result['Reply-To'] = protocol_message.replyto()
+ result['Subject'] = protocol_message.subject()
+
+ return result
+
+MailMessageToMIMEMessage = mail_message_to_mime_message
+
+
+def _to_str(value):
+ """Helper function to make sure unicode values converted to utf-8.
+
+ Args:
+ value: str or unicode to convert to utf-8.
+
+ Returns:
+ UTF-8 encoded str of value, otherwise value unchanged.
+ """
+ if isinstance(value, unicode):
+ return value.encode('utf-8')
+ return value
+
+
+class EncodedPayload(object):
+ """Wrapper for a payload that contains encoding information.
+
+ When an email is recieved, it is usually encoded using a certain
+ character set, and then possibly further encoded using a transfer
+ encoding in that character set. Most of the times, it is possible
+ to decode the encoded payload as is, however, in the case where it
+ is not, the encoded payload and the original encoding information
+ must be preserved.
+
+ Attributes:
+ payload: The original encoded payload.
+ charset: The character set of the encoded payload. None means use
+ default character set.
+ encoding: The transfer encoding of the encoded payload. None means
+ content not encoded.
+ """
+
+ def __init__(self, payload, charset=None, encoding=None):
+ """Constructor.
+
+ Args:
+ payload: Maps to attribute of the same name.
+ charset: Maps to attribute of the same name.
+ encoding: Maps to attribute of the same name.
+ """
+ self.payload = payload
+ self.charset = charset
+ self.encoding = encoding
+
+ def decode(self):
+ """Attempt to decode the encoded data.
+
+ Attempt to use pythons codec library to decode the payload. All
+ exceptions are passed back to the caller.
+
+ Returns:
+ Binary or unicode version of payload content.
+ """
+ payload = self.payload
+
+ if self.encoding and self.encoding.lower() != '7bit':
+ try:
+ payload = payload.decode(self.encoding).lower()
+ except LookupError:
+ raise UnknownEncodingError('Unknown decoding %s.' % self.encoding)
+ except (Exception, Error), e:
+ raise PayloadEncodingError('Could not decode payload: %s' % e)
+
+ if self.charset and str(self.charset).lower() != '7bit':
+ try:
+ payload = payload.decode(str(self.charset)).lower()
+ except LookupError:
+ raise UnknownCharsetError('Unknown charset %s.' % self.charset)
+ except (Exception, Error), e:
+ raise PayloadEncodingError('Could read characters: %s' % e)
+
+ return payload
+
+ def __eq__(self, other):
+ """Equality operator.
+
+ Args:
+ other: The other EncodedPayload object to compare with. Comparison
+ with other object types are not implemented.
+
+ Returns:
+ True of payload and encodings are equal, else false.
+ """
+ if isinstance(other, EncodedPayload):
+ return (self.payload == other.payload and
+ self.charset == other.charset and
+ self.encoding == other.encoding)
+ else:
+ return NotImplemented
+
+ def copy_to(self, mime_message):
+ """Copy contents to MIME message payload.
+
+ If no content transfer encoding is specified, and the character set does
+ not equal the over-all message encoding, the payload will be base64
+ encoded.
+
+ Args:
+ mime_message: Message instance to receive new payload.
+ """
+ if self.encoding:
+ mime_message['content-transfer-encoding'] = self.encoding
+ mime_message.set_payload(self.payload, self.charset)
+
+ def to_mime_message(self):
+ """Convert to MIME message.
+
+ Returns:
+ MIME message instance of payload.
+ """
+ mime_message = email.Message.Message()
+ self.copy_to(mime_message)
+ return mime_message
+
+ def __str__(self):
+ """String representation of encoded message.
+
+ Returns:
+ MIME encoded representation of encoded payload as an independent message.
+ """
+ return str(self.to_mime_message())
+
+ def __repr__(self):
+ """Basic representation of encoded payload.
+
+ Returns:
+ Payload itself is represented by its hash value.
+ """
+ result = '<EncodedPayload payload=#%d' % hash(self.payload)
+ if self.charset:
+ result += ' charset=%s' % self.charset
+ if self.encoding:
+ result += ' encoding=%s' % self.encoding
+ return result + '>'
+
+
+class _EmailMessageBase(object):
+ """Base class for email API service objects.
+
+ Subclasses must define a class variable called _API_CALL with the name
+ of its underlying mail sending API call.
+ """
+
+ PROPERTIES = set([
+ 'sender',
+ 'reply_to',
+ 'subject',
+ 'body',
+ 'html',
+ 'attachments',
+ ])
+
+ PROPERTIES.update(('to', 'cc', 'bcc'))
+
+ def __init__(self, mime_message=None, **kw):
+ """Initialize Email message.
+
+ Creates new MailMessage protocol buffer and initializes it with any
+ keyword arguments.
+
+ Args:
+ mime_message: MIME message to initialize from. If instance of
+ email.Message.Message will take ownership as original message.
+ kw: List of keyword properties as defined by PROPERTIES.
+ """
+ if mime_message:
+ mime_message = _parse_mime_message(mime_message)
+ self.update_from_mime_message(mime_message)
+ self.__original = mime_message
+
+ self.initialize(**kw)
+
+ @property
+ def original(self):
+ """Get original MIME message from which values were set."""
+ return self.__original
+
+ def initialize(self, **kw):
+ """Keyword initialization.
+
+ Used to set all fields of the email message using keyword arguments.
+
+ Args:
+ kw: List of keyword properties as defined by PROPERTIES.
+ """
+ for name, value in kw.iteritems():
+ setattr(self, name, value)
+
+ def Initialize(self, **kw):
+ self.initialize(**kw)
+
+ def check_initialized(self):
+ """Check if EmailMessage is properly initialized.
+
+ Test used to determine if EmailMessage meets basic requirements
+ for being used with the mail API. This means that the following
+ fields must be set or have at least one value in the case of
+ multi value fields:
+
+ - Subject must be set.
+ - A recipient must be specified.
+ - Must contain a body.
+ - All bodies and attachments must decode properly.
+
+ This check does not include determining if the sender is actually
+ authorized to send email for the application.
+
+ Raises:
+ Appropriate exception for initialization failure.
+
+ InvalidAttachmentTypeError: Use of incorrect attachment type.
+ MissingRecipientsError: No recipients specified in to, cc or bcc.
+ MissingSenderError: No sender specified.
+ MissingSubjectError: Subject is not specified.
+ MissingBodyError: No body specified.
+ PayloadEncodingError: Payload is not properly encoded.
+ UnknownEncodingError: Payload has unknown encoding.
+ UnknownCharsetError: Payload has unknown character set.
+ """
+ if not hasattr(self, 'sender'):
+ raise MissingSenderError()
+ if not hasattr(self, 'subject'):
+ raise MissingSubjectError()
+
+ found_body = False
+
+ try:
+ body = self.body
+ except AttributeError:
+ pass
+ else:
+ if isinstance(body, EncodedPayload):
+ body.decode()
+ found_body = True
+
+ try:
+ html = self.html
+ except AttributeError:
+ pass
+ else:
+ if isinstance(html, EncodedPayload):
+ html.decode()
+ found_body = True
+
+ if not found_body:
+ raise MissingBodyError()
+
+ if hasattr(self, 'attachments'):
+ for file_name, data in _attachment_sequence(self.attachments):
+ _GetMimeType(file_name)
+
+ if isinstance(data, EncodedPayload):
+ data.decode()
+
+ def CheckInitialized(self):
+ self.check_initialized()
+
+ def is_initialized(self):
+ """Determine if EmailMessage is properly initialized.
+
+ Returns:
+ True if message is properly initializes, otherwise False.
+ """
+ try:
+ self.check_initialized()
+ return True
+ except Error:
+ return False
+
+ def IsInitialized(self):
+ return self.is_initialized()
+
+ def ToProto(self):
+ """Convert mail message to protocol message.
+
+ Unicode strings are converted to UTF-8 for all fields.
+
+ This method is overriden by EmailMessage to support the sender fields.
+
+ Returns:
+ MailMessage protocol version of mail message.
+
+ Raises:
+ Passes through decoding errors that occur when using when decoding
+ EncodedPayload objects.
+ """
+ self.check_initialized()
+ message = mail_service_pb.MailMessage()
+ message.set_sender(_to_str(self.sender))
+
+ if hasattr(self, 'reply_to'):
+ message.set_replyto(_to_str(self.reply_to))
+ message.set_subject(_to_str(self.subject))
+
+ if hasattr(self, 'body'):
+ body = self.body
+ if isinstance(body, EncodedPayload):
+ body = body.decode()
+ message.set_textbody(_to_str(body))
+ if hasattr(self, 'html'):
+ html = self.html
+ if isinstance(html, EncodedPayload):
+ html = html.decode()
+ message.set_htmlbody(_to_str(html))
+
+ if hasattr(self, 'attachments'):
+ for file_name, data in _attachment_sequence(self.attachments):
+ if isinstance(data, EncodedPayload):
+ data = data.decode()
+ attachment = message.add_attachment()
+ attachment.set_filename(_to_str(file_name))
+ attachment.set_data(_to_str(data))
+ return message
+
+ def to_mime_message(self):
+ """Generate a MIMEMultitype message from EmailMessage.
+
+ Calls MailMessageToMessage after converting self to protocol
+ buffer. Protocol buffer is better at handing corner cases
+ than EmailMessage class.
+
+ Returns:
+ MIMEMultitype representing the provided MailMessage.
+
+ Raises:
+ Appropriate exception for initialization failure.
+
+ InvalidAttachmentTypeError: Use of incorrect attachment type.
+ MissingSenderError: No sender specified.
+ MissingSubjectError: Subject is not specified.
+ MissingBodyError: No body specified.
+ """
+ return mail_message_to_mime_message(self.ToProto())
+
+ def ToMIMEMessage(self):
+ return self.to_mime_message()
+
+ def send(self, make_sync_call=apiproxy_stub_map.MakeSyncCall):
+ """Send email message.
+
+ Send properly initialized email message via email API.
+
+ Args:
+ make_sync_call: Method which will make synchronous call to api proxy.
+
+ Raises:
+ Errors defined in this file above.
+ """
+ message = self.ToProto()
+ response = api_base_pb.VoidProto()
+
+ try:
+ make_sync_call('mail', self._API_CALL, message, response)
+ except apiproxy_errors.ApplicationError, e:
+ if e.application_error in ERROR_MAP:
+ raise ERROR_MAP[e.application_error](e.error_detail)
+ raise e
+
+ def Send(self, *args, **kwds):
+ self.send(*args, **kwds)
+
+ def _check_attachment(self, attachment):
+ file_name, data = attachment
+ if not (isinstance(file_name, basestring) or
+ isinstance(data, basestring)):
+ raise TypeError()
+
+ def _check_attachments(self, attachments):
+ """Checks values going to attachment field.
+
+ Mainly used to check type safety of the values. Each value of the list
+ must be a pair of the form (file_name, data), and both values a string
+ type.
+
+ Args:
+ attachments: Collection of attachment tuples.
+
+ Raises:
+ TypeError if values are not string type.
+ """
+ if len(attachments) == 2 and isinstance(attachments[0], basestring):
+ self._check_attachment(attachments)
+ else:
+ for attachment in attachments:
+ self._check_attachment(attachment)
+
+ def __setattr__(self, attr, value):
+ """Property setting access control.
+
+ Controls write access to email fields.
+
+ Args:
+ attr: Attribute to access.
+ value: New value for field.
+
+ Raises:
+ ValueError: If provided with an empty field.
+ AttributeError: If not an allowed assignment field.
+ """
+ if not attr.startswith('_EmailMessageBase'):
+ if attr in ['sender', 'reply_to']:
+ check_email_valid(value, attr)
+
+ if not value:
+ raise ValueError('May not set empty value for \'%s\'' % attr)
+
+ if attr not in self.PROPERTIES:
+ raise AttributeError('\'EmailMessage\' has no attribute \'%s\'' % attr)
+
+ if attr == 'attachments':
+ self._check_attachments(value)
+
+ super(_EmailMessageBase, self).__setattr__(attr, value)
+
+ def _add_body(self, content_type, payload):
+ """Add body to email from payload.
+
+ Will overwrite any existing default plain or html body.
+
+ Args:
+ content_type: Content-type of body.
+ payload: Payload to store body as.
+ """
+ if content_type == 'text/plain':
+ self.body = payload
+ elif content_type == 'text/html':
+ self.html = payload
+
+ def _update_payload(self, mime_message):
+ """Update payload of mail message from mime_message.
+
+ This function works recusively when it receives a multipart body.
+ If it receives a non-multi mime object, it will determine whether or
+ not it is an attachment by whether it has a filename or not. Attachments
+ and bodies are then wrapped in EncodedPayload with the correct charsets and
+ encodings.
+
+ Args:
+ mime_message: A Message MIME email object.
+ """
+ payload = mime_message.get_payload()
+
+ if payload:
+ if mime_message.get_content_maintype() == 'multipart':
+ for alternative in payload:
+ self._update_payload(alternative)
+ else:
+ filename = mime_message.get_param('filename',
+ header='content-disposition')
+ if not filename:
+ filename = mime_message.get_param('name')
+
+ payload = EncodedPayload(payload,
+ mime_message.get_charset(),
+ mime_message['content-transfer-encoding'])
+
+ if filename:
+ try:
+ attachments = self.attachments
+ except AttributeError:
+ self.attachments = (filename, payload)
+ else:
+ if isinstance(attachments[0], basestring):
+ self.attachments = [attachments]
+ attachments = self.attachments
+ attachments.append((filename, payload))
+ else:
+ self._add_body(mime_message.get_content_type(), payload)
+
+ def update_from_mime_message(self, mime_message):
+ """Copy information from a mime message.
+
+ Set information of instance to values of mime message. This method
+ will only copy values that it finds. Any missing values will not
+ be copied, nor will they overwrite old values with blank values.
+
+ This object is not guaranteed to be initialized after this call.
+
+ Args:
+ mime_message: email.Message instance to copy information from.
+
+ Returns:
+ MIME Message instance of mime_message argument.
+ """
+ mime_message = _parse_mime_message(mime_message)
+
+ sender = mime_message['from']
+ if sender:
+ self.sender = sender
+
+ reply_to = mime_message['reply-to']
+ if reply_to:
+ self.reply_to = reply_to
+
+ subject = mime_message['subject']
+ if subject:
+ self.subject = subject
+
+ self._update_payload(mime_message)
+
+ def bodies(self, content_type=None):
+ """Iterate over all bodies.
+
+ Yields:
+ Tuple (content_type, payload) for html and body in that order.
+ """
+ if (not content_type or
+ content_type == 'text' or
+ content_type == 'text/html'):
+ try:
+ yield 'text/html', self.html
+ except AttributeError:
+ pass
+
+ if (not content_type or
+ content_type == 'text' or
+ content_type == 'text/plain'):
+ try:
+ yield 'text/plain', self.body
+ except AttributeError:
+ pass
+
+
+class EmailMessage(_EmailMessageBase):
+ """Main interface to email API service.
+
+ This class is used to programmatically build an email message to send via
+ the Mail API. The usage is to construct an instance, populate its fields
+ and call Send().
+
+ Example Usage:
+ An EmailMessage can be built completely by the constructor.
+
+ EmailMessage(sender='sender@nowhere.com',
+ to='recipient@nowhere.com',
+ subject='a subject',
+ body='This is an email to you').Send()
+
+ It might be desirable for an application to build an email in different
+ places throughout the code. For this, EmailMessage is mutable.
+
+ message = EmailMessage()
+ message.sender = 'sender@nowhere.com'
+ message.to = ['recipient1@nowhere.com', 'recipient2@nowhere.com']
+ message.subject = 'a subject'
+ message.body = 'This is an email to you')
+ message.check_initialized()
+ message.send()
+ """
+
+ _API_CALL = 'Send'
+ PROPERTIES = set(_EmailMessageBase.PROPERTIES)
+
+ def check_initialized(self):
+ """Provide additional checks to ensure recipients have been specified.
+
+ Raises:
+ MissingRecipientError when no recipients specified in to, cc or bcc.
+ """
+ if (not hasattr(self, 'to') and
+ not hasattr(self, 'cc') and
+ not hasattr(self, 'bcc')):
+ raise MissingRecipientsError()
+ super(EmailMessage, self).check_initialized()
+
+ def CheckInitialized(self):
+ self.check_initialized()
+
+ def ToProto(self):
+ """Does addition conversion of recipient fields to protocol buffer.
+
+ Returns:
+ MailMessage protocol version of mail message including sender fields.
+ """
+ message = super(EmailMessage, self).ToProto()
+
+ for attribute, adder in (('to', message.add_to),
+ ('cc', message.add_cc),
+ ('bcc', message.add_bcc)):
+ if hasattr(self, attribute):
+ for address in _email_sequence(getattr(self, attribute)):
+ adder(_to_str(address))
+ return message
+
+ def __setattr__(self, attr, value):
+ """Provides additional checks on recipient fields."""
+ if attr in ['to', 'cc', 'bcc']:
+ if isinstance(value, basestring):
+ check_email_valid(value, attr)
+ else:
+ for address in value:
+ check_email_valid(address, attr)
+
+ super(EmailMessage, self).__setattr__(attr, value)
+
+ def update_from_mime_message(self, mime_message):
+ """Copy information from a mime message.
+
+ Update fields for recipients.
+
+ Args:
+ mime_message: email.Message instance to copy information from.
+ """
+ mime_message = _parse_mime_message(mime_message)
+ super(EmailMessage, self).update_from_mime_message(mime_message)
+
+ to = mime_message.get_all('to')
+ if to:
+ if len(to) == 1:
+ self.to = to[0]
+ else:
+ self.to = to
+
+ cc = mime_message.get_all('cc')
+ if cc:
+ if len(cc) == 1:
+ self.cc = cc[0]
+ else:
+ self.cc = cc
+
+ bcc = mime_message.get_all('bcc')
+ if bcc:
+ if len(bcc) == 1:
+ self.bcc = bcc[0]
+ else:
+ self.bcc = bcc
+
+
+class AdminEmailMessage(_EmailMessageBase):
+ """Interface to sending email messages to all admins via the amil API.
+
+ This class is used to programmatically build an admin email message to send
+ via the Mail API. The usage is to construct an instance, populate its fields
+ and call Send().
+
+ Unlike the normal email message, addresses in the recipient fields are
+ ignored and not used for sending.
+
+ Example Usage:
+ An AdminEmailMessage can be built completely by the constructor.
+
+ AdminEmailMessage(sender='sender@nowhere.com',
+ subject='a subject',
+ body='This is an email to you').Send()
+
+ It might be desirable for an application to build an admin email in
+ different places throughout the code. For this, AdminEmailMessage is
+ mutable.
+
+ message = AdminEmailMessage()
+ message.sender = 'sender@nowhere.com'
+ message.subject = 'a subject'
+ message.body = 'This is an email to you')
+ message.check_initialized()
+ message.send()
+ """
+
+ _API_CALL = 'SendToAdmins'
+ __UNUSED_PROPERTIES = set(('to', 'cc', 'bcc'))
+
+ def __setattr__(self, attr, value):
+ if attr in self.__UNUSED_PROPERTIES:
+ logging.warning('\'%s\' is not a valid property to set '
+ 'for AdminEmailMessage. It is unused.', attr)
+ super(AdminEmailMessage, self).__setattr__(attr, value)
+
+
+class InboundEmailMessage(EmailMessage):
+ """Parsed email object as recevied from external source.
+
+ Has a date field and can store any number of additional bodies. These
+ additional attributes make the email more flexible as required for
+ incoming mail, where the developer has less control over the content.
+
+ Example Usage:
+
+ # Read mail message from CGI input.
+ message = InboundEmailMessage(sys.stdin.read())
+ logging.info('Received email message from %s at %s',
+ message.sender,
+ message.date)
+ enriched_body = list(message.bodies('text/enriched'))[0]
+ ... Do something with body ...
+ """
+
+ __HEADER_PROPERTIES = {'date': 'date',
+ 'message_id': 'message-id',
+ }
+
+ PROPERTIES = frozenset(_EmailMessageBase.PROPERTIES |
+ set(('alternate_bodies',)) |
+ set(__HEADER_PROPERTIES.iterkeys()))
+
+ def update_from_mime_message(self, mime_message):
+ """Update values from MIME message.
+
+ Copies over date values.
+
+ Args:
+ mime_message: email.Message instance to copy information from.
+ """
+ mime_message = _parse_mime_message(mime_message)
+ super(InboundEmailMessage, self).update_from_mime_message(mime_message)
+
+ for property, header in InboundEmailMessage.__HEADER_PROPERTIES.iteritems():
+ value = mime_message[header]
+ if value:
+ setattr(self, property, value)
+
+ def _add_body(self, content_type, payload):
+ """Add body to inbound message.
+
+ Method is overidden to handle incoming messages that have more than one
+ plain or html bodies or has any unidentified bodies.
+
+ This method will not overwrite existing html and body values. This means
+ that when updating, the text and html bodies that are first in the MIME
+ document order are assigned to the body and html properties.
+
+ Args:
+ content_type: Content-type of additional body.
+ payload: Content of additional body.
+ """
+ if (content_type == 'text/plain' and not hasattr(self, 'body') or
+ content_type == 'text/html' and not hasattr(self, 'html')):
+ super(InboundEmailMessage, self)._add_body(content_type, payload)
+ else:
+ try:
+ alternate_bodies = self.alternate_bodies
+ except AttributeError:
+ alternate_bodies = self.alternate_bodies = [(content_type, payload)]
+ else:
+ alternate_bodies.append((content_type, payload))
+
+ def bodies(self, content_type=None):
+ """Iterate over all bodies.
+
+ Args:
+ content_type: Content type to filter on. Allows selection of only
+ specific types of content. Can be just the base type of the content
+ type. For example:
+ content_type = 'text/html' # Matches only HTML content.
+ content_type = 'text' # Matches text of any kind.
+
+ Yields:
+ Tuple (content_type, payload) for all bodies of message, including body,
+ html and all alternate_bodies in that order.
+ """
+ main_bodies = super(InboundEmailMessage, self).bodies(content_type)
+ for payload_type, payload in main_bodies:
+ yield payload_type, payload
+
+ partial_type = bool(content_type and content_type.find('/') < 0)
+
+ try:
+ for payload_type, payload in self.alternate_bodies:
+ if content_type:
+ if partial_type:
+ match_type = payload_type.split('/')[0]
+ else:
+ match_type = payload_type
+ match = match_type == content_type
+ else:
+ match = True
+
+ if match:
+ yield payload_type, payload
+ except AttributeError:
+ pass
diff --git a/google_appengine/google/appengine/api/mail.pyc b/google_appengine/google/appengine/api/mail.pyc
new file mode 100644
index 0000000..2a8a69c
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail_errors.py b/google_appengine/google/appengine/api/mail_errors.py
new file mode 100755
index 0000000..6d2b9c3
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_errors.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Exceptions raised my mail API."""
+
+
+class Error(Exception):
+ """Base Mail error type."""
+
+class BadRequestError(Error):
+ """Email is not valid."""
+
+class InvalidSenderError(Error):
+ """Sender is not a permitted to send mail for this application."""
+
+class InvalidEmailError(Error):
+ """Bad email set on an email field."""
+
+class InvalidAttachmentTypeError(Error):
+ """Invalid file type for attachments. We don't send viruses!"""
+
+class MissingRecipientsError(Error):
+ """No recipients specified in message."""
+
+class MissingSenderError(Error):
+ """No sender specified in message."""
+
+class MissingSubjectError(Error):
+ """Subject not specified in message."""
+
+class MissingBodyError(Error):
+ """No body specified in message."""
+
+class PayloadEncodingError(Error):
+ """Unknown payload encoding."""
+
+class UnknownEncodingError(PayloadEncodingError):
+ """Raised when encoding is not known."""
+
+class UnknownCharsetError(PayloadEncodingError):
+ """Raised when charset is not known."""
diff --git a/google_appengine/google/appengine/api/mail_errors.pyc b/google_appengine/google/appengine/api/mail_errors.pyc
new file mode 100644
index 0000000..78f8b20
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail_service_pb.py b/google_appengine/google/appengine/api/mail_service_pb.py
new file mode 100644
index 0000000..1b608ea
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_service_pb.py
@@ -0,0 +1,584 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.api.api_base_pb import *
+class MailServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ INTERNAL_ERROR = 1
+ BAD_REQUEST = 2
+ UNAUTHORIZED_SENDER = 3
+ INVALID_ATTACHMENT_TYPE = 4
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "INTERNAL_ERROR",
+ 2: "BAD_REQUEST",
+ 3: "UNAUTHORIZED_SENDER",
+ 4: "INVALID_ATTACHMENT_TYPE",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MailAttachment(ProtocolBuffer.ProtocolMessage):
+ has_filename_ = 0
+ filename_ = ""
+ has_data_ = 0
+ data_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def filename(self): return self.filename_
+
+ def set_filename(self, x):
+ self.has_filename_ = 1
+ self.filename_ = x
+
+ def clear_filename(self):
+ if self.has_filename_:
+ self.has_filename_ = 0
+ self.filename_ = ""
+
+ def has_filename(self): return self.has_filename_
+
+ def data(self): return self.data_
+
+ def set_data(self, x):
+ self.has_data_ = 1
+ self.data_ = x
+
+ def clear_data(self):
+ if self.has_data_:
+ self.has_data_ = 0
+ self.data_ = ""
+
+ def has_data(self): return self.has_data_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_filename()): self.set_filename(x.filename())
+ if (x.has_data()): self.set_data(x.data())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_filename_ != x.has_filename_: return 0
+ if self.has_filename_ and self.filename_ != x.filename_: return 0
+ if self.has_data_ != x.has_data_: return 0
+ if self.has_data_ and self.data_ != x.data_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_filename_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: filename not set.')
+ if (not self.has_data_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: data not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.filename_))
+ n += self.lengthString(len(self.data_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_filename()
+ self.clear_data()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.filename_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.data_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_filename(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_data(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_filename_: res+=prefix+("FileName: %s\n" % self.DebugFormatString(self.filename_))
+ if self.has_data_: res+=prefix+("Data: %s\n" % self.DebugFormatString(self.data_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kFileName = 1
+ kData = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "FileName",
+ 2: "Data",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MailMessage(ProtocolBuffer.ProtocolMessage):
+ has_sender_ = 0
+ sender_ = ""
+ has_replyto_ = 0
+ replyto_ = ""
+ has_subject_ = 0
+ subject_ = ""
+ has_textbody_ = 0
+ textbody_ = ""
+ has_htmlbody_ = 0
+ htmlbody_ = ""
+
+ def __init__(self, contents=None):
+ self.to_ = []
+ self.cc_ = []
+ self.bcc_ = []
+ self.attachment_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def sender(self): return self.sender_
+
+ def set_sender(self, x):
+ self.has_sender_ = 1
+ self.sender_ = x
+
+ def clear_sender(self):
+ if self.has_sender_:
+ self.has_sender_ = 0
+ self.sender_ = ""
+
+ def has_sender(self): return self.has_sender_
+
+ def replyto(self): return self.replyto_
+
+ def set_replyto(self, x):
+ self.has_replyto_ = 1
+ self.replyto_ = x
+
+ def clear_replyto(self):
+ if self.has_replyto_:
+ self.has_replyto_ = 0
+ self.replyto_ = ""
+
+ def has_replyto(self): return self.has_replyto_
+
+ def to_size(self): return len(self.to_)
+ def to_list(self): return self.to_
+
+ def to(self, i):
+ return self.to_[i]
+
+ def set_to(self, i, x):
+ self.to_[i] = x
+
+ def add_to(self, x):
+ self.to_.append(x)
+
+ def clear_to(self):
+ self.to_ = []
+
+ def cc_size(self): return len(self.cc_)
+ def cc_list(self): return self.cc_
+
+ def cc(self, i):
+ return self.cc_[i]
+
+ def set_cc(self, i, x):
+ self.cc_[i] = x
+
+ def add_cc(self, x):
+ self.cc_.append(x)
+
+ def clear_cc(self):
+ self.cc_ = []
+
+ def bcc_size(self): return len(self.bcc_)
+ def bcc_list(self): return self.bcc_
+
+ def bcc(self, i):
+ return self.bcc_[i]
+
+ def set_bcc(self, i, x):
+ self.bcc_[i] = x
+
+ def add_bcc(self, x):
+ self.bcc_.append(x)
+
+ def clear_bcc(self):
+ self.bcc_ = []
+
+ def subject(self): return self.subject_
+
+ def set_subject(self, x):
+ self.has_subject_ = 1
+ self.subject_ = x
+
+ def clear_subject(self):
+ if self.has_subject_:
+ self.has_subject_ = 0
+ self.subject_ = ""
+
+ def has_subject(self): return self.has_subject_
+
+ def textbody(self): return self.textbody_
+
+ def set_textbody(self, x):
+ self.has_textbody_ = 1
+ self.textbody_ = x
+
+ def clear_textbody(self):
+ if self.has_textbody_:
+ self.has_textbody_ = 0
+ self.textbody_ = ""
+
+ def has_textbody(self): return self.has_textbody_
+
+ def htmlbody(self): return self.htmlbody_
+
+ def set_htmlbody(self, x):
+ self.has_htmlbody_ = 1
+ self.htmlbody_ = x
+
+ def clear_htmlbody(self):
+ if self.has_htmlbody_:
+ self.has_htmlbody_ = 0
+ self.htmlbody_ = ""
+
+ def has_htmlbody(self): return self.has_htmlbody_
+
+ def attachment_size(self): return len(self.attachment_)
+ def attachment_list(self): return self.attachment_
+
+ def attachment(self, i):
+ return self.attachment_[i]
+
+ def mutable_attachment(self, i):
+ return self.attachment_[i]
+
+ def add_attachment(self):
+ x = MailAttachment()
+ self.attachment_.append(x)
+ return x
+
+ def clear_attachment(self):
+ self.attachment_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_sender()): self.set_sender(x.sender())
+ if (x.has_replyto()): self.set_replyto(x.replyto())
+ for i in xrange(x.to_size()): self.add_to(x.to(i))
+ for i in xrange(x.cc_size()): self.add_cc(x.cc(i))
+ for i in xrange(x.bcc_size()): self.add_bcc(x.bcc(i))
+ if (x.has_subject()): self.set_subject(x.subject())
+ if (x.has_textbody()): self.set_textbody(x.textbody())
+ if (x.has_htmlbody()): self.set_htmlbody(x.htmlbody())
+ for i in xrange(x.attachment_size()): self.add_attachment().CopyFrom(x.attachment(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_sender_ != x.has_sender_: return 0
+ if self.has_sender_ and self.sender_ != x.sender_: return 0
+ if self.has_replyto_ != x.has_replyto_: return 0
+ if self.has_replyto_ and self.replyto_ != x.replyto_: return 0
+ if len(self.to_) != len(x.to_): return 0
+ for e1, e2 in zip(self.to_, x.to_):
+ if e1 != e2: return 0
+ if len(self.cc_) != len(x.cc_): return 0
+ for e1, e2 in zip(self.cc_, x.cc_):
+ if e1 != e2: return 0
+ if len(self.bcc_) != len(x.bcc_): return 0
+ for e1, e2 in zip(self.bcc_, x.bcc_):
+ if e1 != e2: return 0
+ if self.has_subject_ != x.has_subject_: return 0
+ if self.has_subject_ and self.subject_ != x.subject_: return 0
+ if self.has_textbody_ != x.has_textbody_: return 0
+ if self.has_textbody_ and self.textbody_ != x.textbody_: return 0
+ if self.has_htmlbody_ != x.has_htmlbody_: return 0
+ if self.has_htmlbody_ and self.htmlbody_ != x.htmlbody_: return 0
+ if len(self.attachment_) != len(x.attachment_): return 0
+ for e1, e2 in zip(self.attachment_, x.attachment_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_sender_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: sender not set.')
+ if (not self.has_subject_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: subject not set.')
+ for p in self.attachment_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.sender_))
+ if (self.has_replyto_): n += 1 + self.lengthString(len(self.replyto_))
+ n += 1 * len(self.to_)
+ for i in xrange(len(self.to_)): n += self.lengthString(len(self.to_[i]))
+ n += 1 * len(self.cc_)
+ for i in xrange(len(self.cc_)): n += self.lengthString(len(self.cc_[i]))
+ n += 1 * len(self.bcc_)
+ for i in xrange(len(self.bcc_)): n += self.lengthString(len(self.bcc_[i]))
+ n += self.lengthString(len(self.subject_))
+ if (self.has_textbody_): n += 1 + self.lengthString(len(self.textbody_))
+ if (self.has_htmlbody_): n += 1 + self.lengthString(len(self.htmlbody_))
+ n += 1 * len(self.attachment_)
+ for i in xrange(len(self.attachment_)): n += self.lengthString(self.attachment_[i].ByteSize())
+ return n + 2
+
+ def Clear(self):
+ self.clear_sender()
+ self.clear_replyto()
+ self.clear_to()
+ self.clear_cc()
+ self.clear_bcc()
+ self.clear_subject()
+ self.clear_textbody()
+ self.clear_htmlbody()
+ self.clear_attachment()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.sender_)
+ if (self.has_replyto_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.replyto_)
+ for i in xrange(len(self.to_)):
+ out.putVarInt32(26)
+ out.putPrefixedString(self.to_[i])
+ for i in xrange(len(self.cc_)):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.cc_[i])
+ for i in xrange(len(self.bcc_)):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.bcc_[i])
+ out.putVarInt32(50)
+ out.putPrefixedString(self.subject_)
+ if (self.has_textbody_):
+ out.putVarInt32(58)
+ out.putPrefixedString(self.textbody_)
+ if (self.has_htmlbody_):
+ out.putVarInt32(66)
+ out.putPrefixedString(self.htmlbody_)
+ for i in xrange(len(self.attachment_)):
+ out.putVarInt32(74)
+ out.putVarInt32(self.attachment_[i].ByteSize())
+ self.attachment_[i].OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_sender(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_replyto(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.add_to(d.getPrefixedString())
+ continue
+ if tt == 34:
+ self.add_cc(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.add_bcc(d.getPrefixedString())
+ continue
+ if tt == 50:
+ self.set_subject(d.getPrefixedString())
+ continue
+ if tt == 58:
+ self.set_textbody(d.getPrefixedString())
+ continue
+ if tt == 66:
+ self.set_htmlbody(d.getPrefixedString())
+ continue
+ if tt == 74:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.add_attachment().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_sender_: res+=prefix+("Sender: %s\n" % self.DebugFormatString(self.sender_))
+ if self.has_replyto_: res+=prefix+("ReplyTo: %s\n" % self.DebugFormatString(self.replyto_))
+ cnt=0
+ for e in self.to_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("To%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ cnt=0
+ for e in self.cc_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Cc%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ cnt=0
+ for e in self.bcc_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Bcc%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_subject_: res+=prefix+("Subject: %s\n" % self.DebugFormatString(self.subject_))
+ if self.has_textbody_: res+=prefix+("TextBody: %s\n" % self.DebugFormatString(self.textbody_))
+ if self.has_htmlbody_: res+=prefix+("HtmlBody: %s\n" % self.DebugFormatString(self.htmlbody_))
+ cnt=0
+ for e in self.attachment_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Attachment%s <\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kSender = 1
+ kReplyTo = 2
+ kTo = 3
+ kCc = 4
+ kBcc = 5
+ kSubject = 6
+ kTextBody = 7
+ kHtmlBody = 8
+ kAttachment = 9
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Sender",
+ 2: "ReplyTo",
+ 3: "To",
+ 4: "Cc",
+ 5: "Bcc",
+ 6: "Subject",
+ 7: "TextBody",
+ 8: "HtmlBody",
+ 9: "Attachment",
+ }, 9)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.STRING,
+ 7: ProtocolBuffer.Encoder.STRING,
+ 8: ProtocolBuffer.Encoder.STRING,
+ 9: ProtocolBuffer.Encoder.STRING,
+ }, 9, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['MailServiceError','MailAttachment','MailMessage']
diff --git a/google_appengine/google/appengine/api/mail_service_pb.pyc b/google_appengine/google/appengine/api/mail_service_pb.pyc
new file mode 100644
index 0000000..c60ce72
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/mail_stub.py b/google_appengine/google/appengine/api/mail_stub.py
new file mode 100755
index 0000000..151ea76
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_stub.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the mail API, writes email to logs and can optionally
+send real email via SMTP or sendmail."""
+
+
+
+
+
+from email import MIMEBase
+from email import MIMEMultipart
+from email import MIMEText
+import logging
+import mail
+import mimetypes
+import subprocess
+import smtplib
+
+from google.appengine.api import apiproxy_stub
+
+
+class MailServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only mail service stub.
+
+ This stub does not actually attempt to send email. instead it merely logs
+ a description of the email to the developers console.
+
+ Args:
+ host: Host of SMTP server to use. Blank disables sending SMTP.
+ port: Port of SMTP server to use.
+ user: User to log in to SMTP server as.
+ password: Password for SMTP server user.
+ """
+
+ def __init__(self,
+ host=None,
+ port=25,
+ user='',
+ password='',
+ enable_sendmail=False,
+ show_mail_body=False,
+ service_name='mail'):
+ """Constructor.
+
+ Args:
+ host: Host of SMTP mail server.
+ post: Port of SMTP mail server.
+ user: Sending user of SMTP mail.
+ password: SMTP password.
+ enable_sendmail: Whether sendmail enabled or not.
+ show_mail_body: Whether to show mail body in log.
+ service_name: Service name expected for all calls.
+ """
+ super(MailServiceStub, self).__init__(service_name)
+ self._smtp_host = host
+ self._smtp_port = port
+ self._smtp_user = user
+ self._smtp_password = password
+ self._enable_sendmail = enable_sendmail
+ self._show_mail_body = show_mail_body
+
+ def _GenerateLog(self, method, message, log):
+ """Generate a list of log messages representing sent mail.
+
+ Args:
+ message: Message to write to log.
+ log: Log function of type string -> None
+ """
+ log('MailService.%s' % method)
+ log(' From: %s' % message.sender())
+
+ for address in message.to_list():
+ log(' To: %s' % address)
+ for address in message.cc_list():
+ log(' Cc: %s' % address)
+ for address in message.bcc_list():
+ log(' Bcc: %s' % address)
+
+ if message.replyto():
+ log(' Reply-to: %s' % message.replyto())
+
+ log(' Subject: %s' % message.subject())
+
+ if message.has_textbody():
+ log(' Body:')
+ log(' Content-type: text/plain')
+ log(' Data length: %d' % len(message.textbody()))
+ if self._show_mail_body:
+ log('-----\n' + message.textbody() + '\n-----')
+
+ if message.has_htmlbody():
+ log(' Body:')
+ log(' Content-type: text/html')
+ log(' Data length: %d' % len(message.htmlbody()))
+ if self._show_mail_body:
+ log('-----\n' + message.htmlbody() + '\n-----')
+
+ for attachment in message.attachment_list():
+ log(' Attachment:')
+ log(' File name: %s' % attachment.filename())
+ log(' Data length: %s' % len(attachment.data()))
+
+ def _SendSMTP(self, mime_message, smtp_lib=smtplib.SMTP):
+ """Send MIME message via SMTP.
+
+ Connects to SMTP server and sends MIME message. If user is supplied
+ will try to login to that server to send as authenticated. Does not
+ currently support encryption.
+
+ Args:
+ mime_message: MimeMessage to send. Create using ToMIMEMessage.
+ smtp_lib: Class of SMTP library. Used for dependency injection.
+ """
+ smtp = smtp_lib()
+ try:
+ smtp.connect(self._smtp_host, self._smtp_port)
+ if self._smtp_user:
+ smtp.login(self._smtp_user, self._smtp_password)
+
+ tos = ', '.join([mime_message[to] for to in ['To', 'Cc', 'Bcc']
+ if mime_message[to]])
+ smtp.sendmail(mime_message['From'], tos, str(mime_message))
+ finally:
+ smtp.quit()
+
+ def _SendSendmail(self, mime_message,
+ popen=subprocess.Popen,
+ sendmail_command='sendmail'):
+ """Send MIME message via sendmail, if exists on computer.
+
+ Attempts to send email via sendmail. Any IO failure, including
+ the program not being found is ignored.
+
+ Args:
+ mime_message: MimeMessage to send. Create using ToMIMEMessage.
+ popen: popen function to create a new sub-process.
+ """
+ try:
+ tos = [mime_message[to] for to in ['To', 'Cc', 'Bcc'] if mime_message[to]]
+ sendmail_command = '%s %s' % (sendmail_command, ' '.join(tos))
+
+ try:
+ child = popen(sendmail_command,
+ shell=True,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ except (IOError, OSError), e:
+ logging.error('Unable to open pipe to sendmail')
+ raise
+ try:
+ child.stdin.write(str(mime_message))
+ child.stdin.close()
+ finally:
+ while child.poll() is None:
+ child.stdout.read(100)
+ child.stdout.close()
+ except (IOError, OSError), e:
+ logging.error('Error sending mail using sendmail: ' + str(e))
+
+ def _Send(self, request, response, log=logging.info,
+ smtp_lib=smtplib.SMTP,
+ popen=subprocess.Popen,
+ sendmail_command='sendmail'):
+ """Implementation of MailServer::Send().
+
+ Logs email message. Contents of attachments are not shown, only
+ their sizes. If SMTP is configured, will send via SMTP, else
+ will use Sendmail if it is installed.
+
+ Args:
+ request: The message to send, a SendMailRequest.
+ response: The send response, a SendMailResponse.
+ log: Log function to send log information. Used for dependency
+ injection.
+ smtp_lib: Class of SMTP library. Used for dependency injection.
+ popen2: popen2 function to use for opening pipe to other process.
+ Used for dependency injection.
+ """
+ self._GenerateLog('Send', request, log)
+
+ if self._smtp_host and self._enable_sendmail:
+ log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
+
+ import email
+
+ mime_message = mail.MailMessageToMIMEMessage(request)
+ if self._smtp_host:
+ self._SendSMTP(mime_message, smtp_lib)
+ elif self._enable_sendmail:
+ self._SendSendmail(mime_message, popen, sendmail_command)
+ else:
+ logging.info('You are not currently sending out real email. '
+ 'If you have sendmail installed you can use it '
+ 'by using the server with --enable_sendmail')
+
+ _Dynamic_Send = _Send
+
+ def _SendToAdmins(self, request, response, log=logging.info):
+ """Implementation of MailServer::SendToAdmins().
+
+ Logs email message. Contents of attachments are not shown, only
+ their sizes.
+
+ Given the difficulty of determining who the actual sender
+ is, Sendmail and SMTP are disabled for this action.
+
+ Args:
+ request: The message to send, a SendMailRequest.
+ response: The send response, a SendMailResponse.
+ log: Log function to send log information. Used for dependency
+ injection.
+ """
+ self._GenerateLog('SendToAdmins', request, log)
+
+ if self._smtp_host and self._enable_sendmail:
+ log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
+
+ _Dynamic_SendToAdmins = _SendToAdmins
diff --git a/google_appengine/google/appengine/api/mail_stub.pyc b/google_appengine/google/appengine/api/mail_stub.pyc
new file mode 100644
index 0000000..1f7e646
--- /dev/null
+++ b/google_appengine/google/appengine/api/mail_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/memcache/__init__.py b/google_appengine/google/appengine/api/memcache/__init__.py
new file mode 100755
index 0000000..1f23cb6
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/__init__.py
@@ -0,0 +1,931 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Memcache API.
+
+Provides memcached-alike API to application developers to store
+data in memory when reliable storage via the DataStore API isn't
+required and higher performance is desired.
+"""
+
+
+
+import cStringIO
+import math
+import pickle
+import types
+import sha
+
+from google.appengine.api import api_base_pb
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import namespace_manager
+from google.appengine.api.memcache import memcache_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
+MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
+
+MemcacheGetResponse = memcache_service_pb.MemcacheGetResponse
+MemcacheGetRequest = memcache_service_pb.MemcacheGetRequest
+
+MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
+MemcacheDeleteRequest = memcache_service_pb.MemcacheDeleteRequest
+
+MemcacheIncrementResponse = memcache_service_pb.MemcacheIncrementResponse
+MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
+
+MemcacheFlushResponse = memcache_service_pb.MemcacheFlushResponse
+MemcacheFlushRequest = memcache_service_pb.MemcacheFlushRequest
+
+MemcacheStatsRequest = memcache_service_pb.MemcacheStatsRequest
+MemcacheStatsResponse = memcache_service_pb.MemcacheStatsResponse
+
+DELETE_NETWORK_FAILURE = 0
+DELETE_ITEM_MISSING = 1
+DELETE_SUCCESSFUL = 2
+
+MAX_KEY_SIZE = 250
+MAX_VALUE_SIZE = 10 ** 6
+
+STAT_HITS = 'hits'
+STAT_MISSES = 'misses'
+STAT_BYTE_HITS = 'byte_hits'
+STAT_ITEMS = 'items'
+STAT_BYTES = 'bytes'
+STAT_OLDEST_ITEM_AGES = 'oldest_item_age'
+
+FLAG_TYPE_MASK = 7
+FLAG_COMPRESSED = 1 << 3
+
+TYPE_STR = 0
+TYPE_UNICODE = 1
+TYPE_PICKLED = 2
+TYPE_INT = 3
+TYPE_LONG = 4
+TYPE_BOOL = 5
+
+
+def _key_string(key, key_prefix='', server_to_user_dict=None):
+ """Utility function to handle different ways of requesting keys.
+
+ Args:
+ key: Either a string or tuple of (shard_number, string). In Google App
+ Engine the sharding is automatic so the shard number is ignored.
+ To memcache, the key is just bytes (no defined encoding).
+ key_prefix: Optional string prefix to prepend to key.
+ server_to_user_dict: Optional dictionary to populate with a mapping of
+ server-side key (which includes the key_prefix) to user-supplied key
+ (which does not have the prefix).
+
+ Returns:
+ The key as a non-unicode string prepended with key_prefix. This is
+ the key sent to and stored by the server. If the resulting key is
+ longer then MAX_KEY_SIZE, it will be hashed with sha1 and will be
+ replaced with the hex representation of the said hash.
+
+ Raises:
+ TypeError: If provided key isn't a string or tuple of (int, string)
+ or key_prefix or server_to_user_dict are of the wrong type.
+ """
+ if type(key) is types.TupleType:
+ key = key[1]
+ if not isinstance(key, basestring):
+ raise TypeError('Key must be a string instance, received %r' % key)
+ if not isinstance(key_prefix, basestring):
+ raise TypeError('key_prefix must be a string instance, received %r' %
+ key_prefix)
+
+ server_key = key_prefix + key
+ if isinstance(server_key, unicode):
+ server_key = server_key.encode('utf-8')
+
+ if len(server_key) > MAX_KEY_SIZE:
+ server_key = sha.new(server_key).hexdigest()
+
+ if server_to_user_dict is not None:
+ if not isinstance(server_to_user_dict, dict):
+ raise TypeError('server_to_user_dict must be a dict instance, ' +
+ 'received %r' % key)
+ server_to_user_dict[server_key] = key
+
+ return server_key
+
+
+def _validate_encode_value(value, do_pickle):
+ """Utility function to validate and encode server keys and values.
+
+ Args:
+ value: Value to store in memcache. If it's a string, it will get passed
+ along as-is. If it's a unicode string, it will be marked appropriately,
+ such that retrievals will yield a unicode value. If it's any other data
+ type, this function will attempt to pickle the data and then store the
+ serialized result, unpickling it upon retrieval.
+ do_pickle: Callable that takes an object and returns a non-unicode
+ string containing the pickled object.
+
+ Returns:
+ Tuple (stored_value, flags) where:
+ stored_value: The value as a non-unicode string that should be stored
+ in memcache.
+ flags: An integer with bits set from the FLAG_* constants in this file
+ to indicate the encoding of the key and value.
+
+ Raises:
+ ValueError: If the encoded value is too large.
+ pickle.PicklingError: If the value is not a string and could not be pickled.
+ RuntimeError: If a complicated data structure could not be pickled due to
+ too many levels of recursion in its composition.
+ """
+ flags = 0
+ stored_value = value
+
+ if isinstance(value, str):
+ pass
+ elif isinstance(value, unicode):
+ stored_value = value.encode('utf-8')
+ flags |= TYPE_UNICODE
+ elif isinstance(value, bool):
+ stored_value = str(int(value))
+ flags |= TYPE_BOOL
+ elif isinstance(value, int):
+ stored_value = str(value)
+ flags |= TYPE_INT
+ elif isinstance(value, long):
+ stored_value = str(value)
+ flags |= TYPE_LONG
+ else:
+ stored_value = do_pickle(value)
+ flags |= TYPE_PICKLED
+
+
+ if len(stored_value) > MAX_VALUE_SIZE:
+ raise ValueError('Values may not be more than %d bytes in length; '
+ 'received %d bytes' % (MAX_VALUE_SIZE, len(stored_value)))
+
+ return (stored_value, flags)
+
+
+def _decode_value(stored_value, flags, do_unpickle):
+ """Utility function for decoding values retrieved from memcache.
+
+ Args:
+ stored_value: The value as a non-unicode string that was stored.
+ flags: An integer with bits set from the FLAG_* constants in this file
+ that indicate the encoding of the key and value.
+ do_unpickle: Callable that takes a non-unicode string object that contains
+ a pickled object and returns the pickled object.
+
+ Returns:
+ The original object that was stored, be it a normal string, a unicode
+ string, int, long, or a Python object that was pickled.
+
+ Raises:
+ pickle.UnpicklingError: If the value could not be unpickled.
+ """
+ assert isinstance(stored_value, str)
+ assert isinstance(flags, (int, long))
+
+ type_number = flags & FLAG_TYPE_MASK
+ value = stored_value
+
+
+ if type_number == TYPE_STR:
+ return value
+ elif type_number == TYPE_UNICODE:
+ return value.decode('utf-8')
+ elif type_number == TYPE_PICKLED:
+ return do_unpickle(value)
+ elif type_number == TYPE_BOOL:
+ return bool(int(value))
+ elif type_number == TYPE_INT:
+ return int(value)
+ elif type_number == TYPE_LONG:
+ return long(value)
+ else:
+ assert False, "Unknown stored type"
+ assert False, "Shouldn't get here."
+
+class Client(object):
+ """Memcache client object, through which one invokes all memcache operations.
+
+ Several methods are no-ops to retain source-level compatibility
+ with the existing popular Python memcache library.
+
+ Any method that takes a 'key' argument will accept that key as a string
+ (unicode or not) or a tuple of (hash_value, string) where the hash_value,
+ normally used for sharding onto a memcache instance, is instead ignored, as
+ Google App Engine deals with the sharding transparently. Keys in memcache are
+ just bytes, without a specified encoding. All such methods may raise TypeError
+ if provided a bogus key value and a ValueError if the key is too large.
+
+ Any method that takes a 'value' argument will accept as that value any
+ string (unicode or not), int, long, or pickle-able Python object, including
+ all native types. You'll get back from the cache the same type that you
+ originally put in.
+ """
+
+ def __init__(self, servers=None, debug=0,
+ pickleProtocol=pickle.HIGHEST_PROTOCOL,
+ pickler=pickle.Pickler,
+ unpickler=pickle.Unpickler,
+ pload=None,
+ pid=None,
+ make_sync_call=apiproxy_stub_map.MakeSyncCall):
+ """Create a new Client object.
+
+ No parameters are required.
+
+ Arguments:
+ servers: Ignored; only for compatibility.
+ debug: Ignored; only for compatibility.
+ pickleProtocol: Pickle protocol to use for pickling the object.
+ pickler: pickle.Pickler sub-class to use for pickling.
+ unpickler: pickle.Unpickler sub-class to use for unpickling.
+ pload: Callable to use for retrieving objects by persistent id.
+ pid: Callable to use for determine the persistent id for objects, if any.
+ make_sync_call: Function to use to make an App Engine service call.
+ Used for testing.
+ """
+ self._pickle_data = cStringIO.StringIO()
+ self._pickler_instance = pickler(self._pickle_data,
+ protocol=pickleProtocol)
+ self._unpickler_instance = unpickler(self._pickle_data)
+ if pid is not None:
+ self._pickler_instance.persistent_id = pid
+ if pload is not None:
+ self._unpickler_instance.persistent_load = pload
+
+ def DoPickle(value):
+ self._pickle_data.truncate(0)
+ self._pickler_instance.clear_memo()
+ self._pickler_instance.dump(value)
+ return self._pickle_data.getvalue()
+ self._do_pickle = DoPickle
+
+ def DoUnpickle(value):
+ self._pickle_data.truncate(0)
+ self._pickle_data.write(value)
+ self._pickle_data.seek(0)
+ self._unpickler_instance.memo.clear()
+ return self._unpickler_instance.load()
+ self._do_unpickle = DoUnpickle
+
+ self._make_sync_call = make_sync_call
+
+ def set_servers(self, servers):
+ """Sets the pool of memcache servers used by the client.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def disconnect_all(self):
+ """Closes all connections to memcache servers.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def forget_dead_hosts(self):
+ """Resets all servers to the alive status.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def debuglog(self):
+ """Logging function for debugging information.
+
+ This is purely a compatibility method. In Google App Engine, it's a no-op.
+ """
+ pass
+
+ def get_stats(self):
+ """Gets memcache statistics for this application.
+
+ All of these statistics may reset due to various transient conditions. They
+ provide the best information available at the time of being called.
+
+ Returns:
+ Dictionary mapping statistic names to associated values. Statistics and
+ their associated meanings:
+
+ hits: Number of cache get requests resulting in a cache hit.
+ misses: Number of cache get requests resulting in a cache miss.
+ byte_hits: Sum of bytes transferred on get requests. Rolls over to
+ zero on overflow.
+ items: Number of key/value pairs in the cache.
+ bytes: Total size of all items in the cache.
+ oldest_item_age: How long in seconds since the oldest item in the
+ cache was accessed. Effectively, this indicates how long a new
+ item will survive in the cache without being accessed. This is
+ _not_ the amount of time that has elapsed since the item was
+ created.
+
+ On error, returns None.
+ """
+ request = MemcacheStatsRequest()
+ response = MemcacheStatsResponse()
+ try:
+ self._make_sync_call('memcache', 'Stats', request, response)
+ except apiproxy_errors.Error:
+ return None
+
+ if not response.has_stats():
+ return {
+ STAT_HITS: 0,
+ STAT_MISSES: 0,
+ STAT_BYTE_HITS: 0,
+ STAT_ITEMS: 0,
+ STAT_BYTES: 0,
+ STAT_OLDEST_ITEM_AGES: 0,
+ }
+
+ stats = response.stats()
+ return {
+ STAT_HITS: stats.hits(),
+ STAT_MISSES: stats.misses(),
+ STAT_BYTE_HITS: stats.byte_hits(),
+ STAT_ITEMS: stats.items(),
+ STAT_BYTES: stats.bytes(),
+ STAT_OLDEST_ITEM_AGES: stats.oldest_item_age(),
+ }
+
+ def flush_all(self):
+ """Deletes everything in memcache.
+
+ Returns:
+ True on success, False on RPC or server error.
+ """
+ request = MemcacheFlushRequest()
+ response = MemcacheFlushResponse()
+ try:
+ self._make_sync_call('memcache', 'FlushAll', request, response)
+ except apiproxy_errors.Error:
+ return False
+ return True
+
+ def get(self, key, namespace=None):
+ """Looks up a single key in memcache.
+
+ If you have multiple items to load, though, it's much more efficient
+ to use get_multi() instead, which loads them in one bulk operation,
+ reducing the networking latency that'd otherwise be required to do
+ many serialized get() operations.
+
+ Args:
+ key: The key in memcache to look up. See docs on Client
+ for details of format.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ The value of the key, if found in memcache, else None.
+ """
+ request = MemcacheGetRequest()
+ request.add_key(_key_string(key))
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheGetResponse()
+ try:
+ self._make_sync_call('memcache', 'Get', request, response)
+ except apiproxy_errors.Error:
+ return None
+
+ if not response.item_size():
+ return None
+
+ return _decode_value(response.item(0).value(),
+ response.item(0).flags(),
+ self._do_unpickle)
+
+ def get_multi(self, keys, key_prefix='', namespace=None):
+ """Looks up multiple keys from memcache in one operation.
+
+ This is the recommended way to do bulk loads.
+
+ Args:
+ keys: List of keys to look up. Keys may be strings or
+ tuples of (hash_value, string). Google App Engine
+ does the sharding and hashing automatically, though, so the hash
+ value is ignored. To memcache, keys are just series of bytes,
+ and not in any particular encoding.
+ key_prefix: Prefix to prepend to all keys when talking to the server;
+ not included in the returned dictionary.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A dictionary of the keys and values that were present in memcache.
+ Even if the key_prefix was specified, that key_prefix won't be on
+ the keys in the returned dictionary.
+ """
+ request = MemcacheGetRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheGetResponse()
+ user_key = {}
+ for key in keys:
+ request.add_key(_key_string(key, key_prefix, user_key))
+ try:
+ self._make_sync_call('memcache', 'Get', request, response)
+ except apiproxy_errors.Error:
+ return {}
+
+ return_value = {}
+ for returned_item in response.item_list():
+ value = _decode_value(returned_item.value(), returned_item.flags(),
+ self._do_unpickle)
+ return_value[user_key[returned_item.key()]] = value
+ return return_value
+
+ def delete(self, key, seconds=0, namespace=None):
+ """Deletes a key from memcache.
+
+ Args:
+ key: Key to delete. See docs on Client for detils.
+ seconds: Optional number of seconds to make deleted items 'locked'
+ for 'add' operations. Value can be a delta from current time (up to
+ 1 month), or an absolute Unix epoch time. Defaults to 0, which means
+ items can be immediately added. With or without this option,
+ a 'set' operation will always work. Float values will be rounded up to
+ the nearest whole second.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ DELETE_NETWORK_FAILURE (0) on network failure,
+ DELETE_ITEM_MISSING (1) if the server tried to delete the item but
+ didn't have it, or
+ DELETE_SUCCESSFUL (2) if the item was actually deleted.
+ This can be used as a boolean value, where a network failure is the
+ only bad condition.
+ """
+ if not isinstance(seconds, (int, long, float)):
+ raise TypeError('Delete timeout must be a number.')
+ if seconds < 0:
+ raise ValueError('Delete timeout must be non-negative.')
+
+ request = MemcacheDeleteRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheDeleteResponse()
+
+ delete_item = request.add_item()
+ delete_item.set_key(_key_string(key))
+ delete_item.set_delete_time(int(math.ceil(seconds)))
+ try:
+ self._make_sync_call('memcache', 'Delete', request, response)
+ except apiproxy_errors.Error:
+ return DELETE_NETWORK_FAILURE
+ assert response.delete_status_size() == 1, 'Unexpected status size.'
+
+ if response.delete_status(0) == MemcacheDeleteResponse.DELETED:
+ return DELETE_SUCCESSFUL
+ elif response.delete_status(0) == MemcacheDeleteResponse.NOT_FOUND:
+ return DELETE_ITEM_MISSING
+ assert False, 'Unexpected deletion status code.'
+
+ def delete_multi(self, keys, seconds=0, key_prefix='', namespace=None):
+ """Delete multiple keys at once.
+
+ Args:
+ keys: List of keys to delete.
+ seconds: Optional number of seconds to make deleted items 'locked'
+ for 'add' operations. Value can be a delta from current time (up to
+ 1 month), or an absolute Unix epoch time. Defaults to 0, which means
+ items can be immediately added. With or without this option,
+ a 'set' operation will always work. Float values will be rounded up to
+ the nearest whole second.
+ key_prefix: Prefix to put on all keys when sending specified
+ keys to memcache. See docs for get_multi() and set_multi().
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if all operations completed successfully. False if one
+ or more failed to complete.
+ """
+ if not isinstance(seconds, (int, long, float)):
+ raise TypeError('Delete timeout must be a number.')
+ if seconds < 0:
+ raise ValueError('Delete timeout must not be negative.')
+
+ request = MemcacheDeleteRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheDeleteResponse()
+
+ for key in keys:
+ delete_item = request.add_item()
+ delete_item.set_key(_key_string(key, key_prefix=key_prefix))
+ delete_item.set_delete_time(int(math.ceil(seconds)))
+ try:
+ self._make_sync_call('memcache', 'Delete', request, response)
+ except apiproxy_errors.Error:
+ return False
+ return True
+
+ def set(self, key, value, time=0, min_compress_len=0, namespace=None):
+ """Sets a key's value, regardless of previous contents in cache.
+
+ Unlike add() and replace(), this method always sets (or
+ overwrites) the value in memcache, regardless of previous
+ contents.
+
+ Args:
+ key: Key to set. See docs on Client for details.
+ value: Value to set. Any type. If complex, will be pickled.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ min_compress_len: Ignored option for compatibility.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if set. False on error.
+ """
+ return self._set_with_policy(MemcacheSetRequest.SET, key, value, time=time,
+ namespace=namespace)
+
+ def add(self, key, value, time=0, min_compress_len=0, namespace=None):
+ """Sets a key's value, iff item is not already in memcache.
+
+ Args:
+ key: Key to set. See docs on Client for details.
+ value: Value to set. Any type. If complex, will be pickled.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ min_compress_len: Ignored option for compatibility.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if added. False on error.
+ """
+ return self._set_with_policy(MemcacheSetRequest.ADD, key, value, time=time,
+ namespace=namespace)
+
+ def replace(self, key, value, time=0, min_compress_len=0, namespace=None):
+ """Replaces a key's value, failing if item isn't already in memcache.
+
+ Args:
+ key: Key to set. See docs on Client for details.
+ value: Value to set. Any type. If complex, will be pickled.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ min_compress_len: Ignored option for compatibility.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if replaced. False on RPC error or cache miss.
+ """
+ return self._set_with_policy(MemcacheSetRequest.REPLACE,
+ key, value, time=time, namespace=namespace)
+
+ def _set_with_policy(self, policy, key, value, time=0, namespace=None):
+ """Sets a single key with a specified policy.
+
+ Helper function for set(), add(), and replace().
+
+ Args:
+ policy: One of MemcacheSetRequest.SET, .ADD, or .REPLACE.
+ key: Key to add, set, or replace. See docs on Client for details.
+ value: Value to set.
+ time: Expiration time, defaulting to 0 (never expiring).
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ True if stored, False on RPC error or policy error, e.g. a replace
+ that failed due to the item not already existing, or an add
+ failing due to the item not already existing.
+ """
+ if not isinstance(time, (int, long, float)):
+ raise TypeError('Expiration must be a number.')
+ if time < 0:
+ raise ValueError('Expiration must not be negative.')
+
+ request = MemcacheSetRequest()
+ item = request.add_item()
+ item.set_key(_key_string(key))
+ stored_value, flags = _validate_encode_value(value, self._do_pickle)
+ item.set_value(stored_value)
+ item.set_flags(flags)
+ item.set_set_policy(policy)
+ item.set_expiration_time(int(math.ceil(time)))
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheSetResponse()
+ try:
+ self._make_sync_call('memcache', 'Set', request, response)
+ except apiproxy_errors.Error:
+ return False
+ if response.set_status_size() != 1:
+ return False
+ return response.set_status(0) == MemcacheSetResponse.STORED
+
+ def _set_multi_with_policy(self, policy, mapping, time=0, key_prefix='',
+ namespace=None):
+ """Set multiple keys with a specified policy.
+
+ Helper function for set_multi(), add_multi(), and replace_multi(). This
+ reduces the network latency of doing many requests in serial.
+
+ Args:
+ policy: One of MemcacheSetRequest.SET, ADD, or REPLACE.
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set. On total success,
+ this list should be empty. On network/RPC/server errors,
+ a list of all input keys is returned; in this case the keys
+ may or may not have been updated.
+ """
+ if not isinstance(time, (int, long, float)):
+ raise TypeError('Expiration must be a number.')
+ if time < 0.0:
+ raise ValueError('Expiration must not be negative.')
+
+ request = MemcacheSetRequest()
+ user_key = {}
+ server_keys = []
+ for key, value in mapping.iteritems():
+ server_key = _key_string(key, key_prefix, user_key)
+ stored_value, flags = _validate_encode_value(value, self._do_pickle)
+ server_keys.append(server_key)
+
+ item = request.add_item()
+ item.set_key(server_key)
+ item.set_value(stored_value)
+ item.set_flags(flags)
+ item.set_set_policy(policy)
+ item.set_expiration_time(int(math.ceil(time)))
+ namespace_manager._add_name_space(request, namespace)
+
+ response = MemcacheSetResponse()
+ try:
+ self._make_sync_call('memcache', 'Set', request, response)
+ except apiproxy_errors.Error:
+ return user_key.values()
+
+ assert response.set_status_size() == len(server_keys)
+
+ unset_list = []
+ for server_key, set_status in zip(server_keys, response.set_status_list()):
+ if set_status != MemcacheSetResponse.STORED:
+ unset_list.append(user_key[server_key])
+
+ return unset_list
+
+ def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0,
+ namespace=None):
+ """Set multiple keys' values at once, regardless of previous contents.
+
+ Args:
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ min_compress_len: Unimplemented compatibility option.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set. On total success,
+ this list should be empty.
+ """
+ return self._set_multi_with_policy(MemcacheSetRequest.SET, mapping,
+ time=time, key_prefix=key_prefix,
+ namespace=namespace)
+
+ def add_multi(self, mapping, time=0, key_prefix='', min_compress_len=0,
+ namespace=None):
+ """Set multiple keys' values iff items are not already in memcache.
+
+ Args:
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ min_compress_len: Unimplemented compatibility option.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set because they did not already
+ exist in memcache. On total success, this list should be empty.
+ """
+ return self._set_multi_with_policy(MemcacheSetRequest.ADD, mapping,
+ time=time, key_prefix=key_prefix,
+ namespace=namespace)
+
+ def replace_multi(self, mapping, time=0, key_prefix='', min_compress_len=0,
+ namespace=None):
+ """Replace multiple keys' values, failing if the items aren't in memcache.
+
+ Args:
+ mapping: Dictionary of keys to values.
+ time: Optional expiration time, either relative number of seconds
+ from current time (up to 1 month), or an absolute Unix epoch time.
+ By default, items never expire, though items may be evicted due to
+ memory pressure. Float values will be rounded up to the nearest
+ whole second.
+ key_prefix: Prefix for to prepend to all keys.
+ min_compress_len: Unimplemented compatibility option.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+
+ Returns:
+ A list of keys whose values were NOT set because they already existed
+ in memcache. On total success, this list should be empty.
+ """
+ return self._set_multi_with_policy(MemcacheSetRequest.REPLACE, mapping,
+ time=time, key_prefix=key_prefix,
+ namespace=namespace)
+
+ def incr(self, key, delta=1, namespace=None, initial_value=None):
+ """Atomically increments a key's value.
+
+ Internally, the value is a unsigned 64-bit integer. Memcache
+ doesn't check 64-bit overflows. The value, if too large, will
+ wrap around.
+
+ Unless an initial_value is specified, the key must already exist
+ in the cache to be incremented. To initialize a counter, either
+ specify initial_value or set() it to the initial value, as an
+ ASCII decimal integer. Future get()s of the key, post-increment,
+ will still be an ASCII decimal value.
+
+ Args:
+ key: Key to increment. See Client's docstring for details.
+ delta: Non-negative integer value (int or long) to increment key by,
+ defaulting to 1.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+ initial_value: initial value to put in the cache, if it doesn't
+ already exist. The default value, None, will not create a cache
+ entry if it doesn't already exist.
+
+ Returns:
+ New long integer value, or None if key was not in the cache, could not
+ be incremented for any other reason, or a network/RPC/server error
+ occurred.
+
+ Raises:
+ ValueError: If number is negative.
+ TypeError: If delta isn't an int or long.
+ """
+ return self._incrdecr(key, False, delta, namespace=namespace,
+ initial_value=initial_value)
+
+ def decr(self, key, delta=1, namespace=None, initial_value=None):
+ """Atomically decrements a key's value.
+
+ Internally, the value is a unsigned 64-bit integer. Memcache
+ caps decrementing below zero to zero.
+
+ The key must already exist in the cache to be decremented. See
+ docs on incr() for details.
+
+ Args:
+ key: Key to decrement. See Client's docstring for details.
+ delta: Non-negative integer value (int or long) to decrement key by,
+ defaulting to 1.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+ initial_value: initial value to put in the cache, if it doesn't
+ already exist. The default value, None, will not create a cache
+ entry if it doesn't already exist.
+
+ Returns:
+ New long integer value, or None if key wasn't in cache and couldn't
+ be decremented, or a network/RPC/server error occurred.
+
+ Raises:
+ ValueError: If number is negative.
+ TypeError: If delta isn't an int or long.
+ """
+ return self._incrdecr(key, True, delta, namespace=namespace,
+ initial_value=initial_value)
+
+ def _incrdecr(self, key, is_negative, delta, namespace=None,
+ initial_value=None):
+ """Increment or decrement a key by a provided delta.
+
+ Args:
+ key: Key to increment or decrement.
+ is_negative: Boolean, if this is a decrement.
+ delta: Non-negative integer amount (int or long) to increment
+ or decrement by.
+ namespace: a string specifying an optional namespace to use in
+ the request.
+ initial_value: initial value to put in the cache, if it doesn't
+ already exist. The default value, None, will not create a cache
+ entry if it doesn't already exist.
+
+ Returns:
+ New long integer value, or None on cache miss or network/RPC/server
+ error.
+
+ Raises:
+ ValueError: If delta is negative.
+ TypeError: If delta isn't an int or long.
+ """
+ if not isinstance(delta, (int, long)):
+ raise TypeError('Delta must be an integer or long, received %r' % delta)
+ if delta < 0:
+ raise ValueError('Delta must not be negative.')
+
+ request = MemcacheIncrementRequest()
+ namespace_manager._add_name_space(request, namespace)
+ response = MemcacheIncrementResponse()
+ request.set_key(_key_string(key))
+ request.set_delta(delta)
+ if is_negative:
+ request.set_direction(MemcacheIncrementRequest.DECREMENT)
+ else:
+ request.set_direction(MemcacheIncrementRequest.INCREMENT)
+ if initial_value is not None:
+ request.set_initial_value(long(initial_value))
+
+ try:
+ self._make_sync_call('memcache', 'Increment', request, response)
+ except apiproxy_errors.Error:
+ return None
+
+ if response.has_new_value():
+ return response.new_value()
+ return None
+
+
+_CLIENT = None
+
+
+def setup_client(client_obj):
+ """Sets the Client object instance to use for all module-level methods.
+
+ Use this method if you want to have customer persistent_id() or
+ persistent_load() functions associated with your client.
+
+ Args:
+ client_obj: Instance of the memcache.Client object.
+ """
+ global _CLIENT
+ var_dict = globals()
+
+ _CLIENT = client_obj
+ var_dict['set_servers'] = _CLIENT.set_servers
+ var_dict['disconnect_all'] = _CLIENT.disconnect_all
+ var_dict['forget_dead_hosts'] = _CLIENT.forget_dead_hosts
+ var_dict['debuglog'] = _CLIENT.debuglog
+ var_dict['get'] = _CLIENT.get
+ var_dict['get_multi'] = _CLIENT.get_multi
+ var_dict['set'] = _CLIENT.set
+ var_dict['set_multi'] = _CLIENT.set_multi
+ var_dict['add'] = _CLIENT.add
+ var_dict['add_multi'] = _CLIENT.add_multi
+ var_dict['replace'] = _CLIENT.replace
+ var_dict['replace_multi'] = _CLIENT.replace_multi
+ var_dict['delete'] = _CLIENT.delete
+ var_dict['delete_multi'] = _CLIENT.delete_multi
+ var_dict['incr'] = _CLIENT.incr
+ var_dict['decr'] = _CLIENT.decr
+ var_dict['flush_all'] = _CLIENT.flush_all
+ var_dict['get_stats'] = _CLIENT.get_stats
+
+
+setup_client(Client())
diff --git a/google_appengine/google/appengine/api/memcache/__init__.pyc b/google_appengine/google/appengine/api/memcache/__init__.pyc
new file mode 100644
index 0000000..2e2cfef
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/memcache/memcache_service_pb.py b/google_appengine/google/appengine/api/memcache/memcache_service_pb.py
new file mode 100644
index 0000000..8d499b2
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_service_pb.py
@@ -0,0 +1,2002 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ UNSPECIFIED_ERROR = 1
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "UNSPECIFIED_ERROR",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheGetRequest(ProtocolBuffer.ProtocolMessage):
+ has_name_space_ = 0
+ name_space_ = ""
+
+ def __init__(self, contents=None):
+ self.key_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def key_size(self): return len(self.key_)
+ def key_list(self): return self.key_
+
+ def key(self, i):
+ return self.key_[i]
+
+ def set_key(self, i, x):
+ self.key_[i] = x
+
+ def add_key(self, x):
+ self.key_.append(x)
+
+ def clear_key(self):
+ self.key_ = []
+
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.key_size()): self.add_key(x.key(i))
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.key_) != len(x.key_): return 0
+ for e1, e2 in zip(self.key_, x.key_):
+ if e1 != e2: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.key_)
+ for i in xrange(len(self.key_)): n += self.lengthString(len(self.key_[i]))
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_name_space()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.key_)):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.key_[i])
+ if (self.has_name_space_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.name_space_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.add_key(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.key_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("key%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 1
+ kname_space = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "key",
+ 2: "name_space",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheGetResponse_Item(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+ has_flags_ = 0
+ flags_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+ def flags(self): return self.flags_
+
+ def set_flags(self, x):
+ self.has_flags_ = 1
+ self.flags_ = x
+
+ def clear_flags(self):
+ if self.has_flags_:
+ self.has_flags_ = 0
+ self.flags_ = 0
+
+ def has_flags(self): return self.has_flags_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+ if (x.has_flags()): self.set_flags(x.flags())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ if self.has_flags_ != x.has_flags_: return 0
+ if self.has_flags_ and self.flags_ != x.flags_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ if (self.has_flags_): n += 5
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+ self.clear_flags()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.value_)
+ if (self.has_flags_):
+ out.putVarInt32(37)
+ out.put32(self.flags_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_value(d.getPrefixedString())
+ continue
+ if tt == 37:
+ self.set_flags(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
+ return res
+
+class MemcacheGetResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ self.item_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def item_size(self): return len(self.item_)
+ def item_list(self): return self.item_
+
+ def item(self, i):
+ return self.item_[i]
+
+ def mutable_item(self, i):
+ return self.item_[i]
+
+ def add_item(self):
+ x = MemcacheGetResponse_Item()
+ self.item_.append(x)
+ return x
+
+ def clear_item(self):
+ self.item_ = []
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.item_) != len(x.item_): return 0
+ for e1, e2 in zip(self.item_, x.item_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.item_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.item_)
+ for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
+ return n + 0
+
+ def Clear(self):
+ self.clear_item()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.item_)):
+ out.putVarInt32(11)
+ self.item_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_item().TryMerge(d)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.item_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Item%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kItemGroup = 1
+ kItemkey = 2
+ kItemvalue = 3
+ kItemflags = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Item",
+ 2: "key",
+ 3: "value",
+ 4: "flags",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.FLOAT,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheSetRequest_Item(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+ has_flags_ = 0
+ flags_ = 0
+ has_set_policy_ = 0
+ set_policy_ = 1
+ has_expiration_time_ = 0
+ expiration_time_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+ def flags(self): return self.flags_
+
+ def set_flags(self, x):
+ self.has_flags_ = 1
+ self.flags_ = x
+
+ def clear_flags(self):
+ if self.has_flags_:
+ self.has_flags_ = 0
+ self.flags_ = 0
+
+ def has_flags(self): return self.has_flags_
+
+ def set_policy(self): return self.set_policy_
+
+ def set_set_policy(self, x):
+ self.has_set_policy_ = 1
+ self.set_policy_ = x
+
+ def clear_set_policy(self):
+ if self.has_set_policy_:
+ self.has_set_policy_ = 0
+ self.set_policy_ = 1
+
+ def has_set_policy(self): return self.has_set_policy_
+
+ def expiration_time(self): return self.expiration_time_
+
+ def set_expiration_time(self, x):
+ self.has_expiration_time_ = 1
+ self.expiration_time_ = x
+
+ def clear_expiration_time(self):
+ if self.has_expiration_time_:
+ self.has_expiration_time_ = 0
+ self.expiration_time_ = 0
+
+ def has_expiration_time(self): return self.has_expiration_time_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+ if (x.has_flags()): self.set_flags(x.flags())
+ if (x.has_set_policy()): self.set_set_policy(x.set_policy())
+ if (x.has_expiration_time()): self.set_expiration_time(x.expiration_time())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ if self.has_flags_ != x.has_flags_: return 0
+ if self.has_flags_ and self.flags_ != x.flags_: return 0
+ if self.has_set_policy_ != x.has_set_policy_: return 0
+ if self.has_set_policy_ and self.set_policy_ != x.set_policy_: return 0
+ if self.has_expiration_time_ != x.has_expiration_time_: return 0
+ if self.has_expiration_time_ and self.expiration_time_ != x.expiration_time_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ if (self.has_flags_): n += 5
+ if (self.has_set_policy_): n += 1 + self.lengthVarInt64(self.set_policy_)
+ if (self.has_expiration_time_): n += 5
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+ self.clear_flags()
+ self.clear_set_policy()
+ self.clear_expiration_time()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(26)
+ out.putPrefixedString(self.value_)
+ if (self.has_flags_):
+ out.putVarInt32(37)
+ out.put32(self.flags_)
+ if (self.has_set_policy_):
+ out.putVarInt32(40)
+ out.putVarInt32(self.set_policy_)
+ if (self.has_expiration_time_):
+ out.putVarInt32(53)
+ out.put32(self.expiration_time_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 26:
+ self.set_value(d.getPrefixedString())
+ continue
+ if tt == 37:
+ self.set_flags(d.get32())
+ continue
+ if tt == 40:
+ self.set_set_policy(d.getVarInt32())
+ continue
+ if tt == 53:
+ self.set_expiration_time(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
+ if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
+ if self.has_set_policy_: res+=prefix+("set_policy: %s\n" % self.DebugFormatInt32(self.set_policy_))
+ if self.has_expiration_time_: res+=prefix+("expiration_time: %s\n" % self.DebugFormatFixed32(self.expiration_time_))
+ return res
+
+class MemcacheSetRequest(ProtocolBuffer.ProtocolMessage):
+
+ SET = 1
+ ADD = 2
+ REPLACE = 3
+
+ _SetPolicy_NAMES = {
+ 1: "SET",
+ 2: "ADD",
+ 3: "REPLACE",
+ }
+
+ def SetPolicy_Name(cls, x): return cls._SetPolicy_NAMES.get(x, "")
+ SetPolicy_Name = classmethod(SetPolicy_Name)
+
+ has_name_space_ = 0
+ name_space_ = ""
+
+ def __init__(self, contents=None):
+ self.item_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def item_size(self): return len(self.item_)
+ def item_list(self): return self.item_
+
+ def item(self, i):
+ return self.item_[i]
+
+ def mutable_item(self, i):
+ return self.item_[i]
+
+ def add_item(self):
+ x = MemcacheSetRequest_Item()
+ self.item_.append(x)
+ return x
+
+ def clear_item(self):
+ self.item_ = []
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.item_) != len(x.item_): return 0
+ for e1, e2 in zip(self.item_, x.item_):
+ if e1 != e2: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.item_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.item_)
+ for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_item()
+ self.clear_name_space()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.item_)):
+ out.putVarInt32(11)
+ self.item_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+ if (self.has_name_space_):
+ out.putVarInt32(58)
+ out.putPrefixedString(self.name_space_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_item().TryMerge(d)
+ continue
+ if tt == 58:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.item_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Item%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kItemGroup = 1
+ kItemkey = 2
+ kItemvalue = 3
+ kItemflags = 4
+ kItemset_policy = 5
+ kItemexpiration_time = 6
+ kname_space = 7
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Item",
+ 2: "key",
+ 3: "value",
+ 4: "flags",
+ 5: "set_policy",
+ 6: "expiration_time",
+ 7: "name_space",
+ }, 7)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STRING,
+ 4: ProtocolBuffer.Encoder.FLOAT,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.FLOAT,
+ 7: ProtocolBuffer.Encoder.STRING,
+ }, 7, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheSetResponse(ProtocolBuffer.ProtocolMessage):
+
+ STORED = 1
+ NOT_STORED = 2
+ ERROR = 3
+
+ _SetStatusCode_NAMES = {
+ 1: "STORED",
+ 2: "NOT_STORED",
+ 3: "ERROR",
+ }
+
+ def SetStatusCode_Name(cls, x): return cls._SetStatusCode_NAMES.get(x, "")
+ SetStatusCode_Name = classmethod(SetStatusCode_Name)
+
+
+ def __init__(self, contents=None):
+ self.set_status_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def set_status_size(self): return len(self.set_status_)
+ def set_status_list(self): return self.set_status_
+
+ def set_status(self, i):
+ return self.set_status_[i]
+
+ def set_set_status(self, i, x):
+ self.set_status_[i] = x
+
+ def add_set_status(self, x):
+ self.set_status_.append(x)
+
+ def clear_set_status(self):
+ self.set_status_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.set_status_size()): self.add_set_status(x.set_status(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.set_status_) != len(x.set_status_): return 0
+ for e1, e2 in zip(self.set_status_, x.set_status_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.set_status_)
+ for i in xrange(len(self.set_status_)): n += self.lengthVarInt64(self.set_status_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_set_status()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.set_status_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.set_status_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_set_status(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.set_status_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("set_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kset_status = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "set_status",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheDeleteRequest_Item(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_delete_time_ = 0
+ delete_time_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def delete_time(self): return self.delete_time_
+
+ def set_delete_time(self, x):
+ self.has_delete_time_ = 1
+ self.delete_time_ = x
+
+ def clear_delete_time(self):
+ if self.has_delete_time_:
+ self.has_delete_time_ = 0
+ self.delete_time_ = 0
+
+ def has_delete_time(self): return self.has_delete_time_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_delete_time()): self.set_delete_time(x.delete_time())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_delete_time_ != x.has_delete_time_: return 0
+ if self.has_delete_time_ and self.delete_time_ != x.delete_time_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ if (self.has_delete_time_): n += 5
+ return n + 1
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_delete_time()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.key_)
+ if (self.has_delete_time_):
+ out.putVarInt32(29)
+ out.put32(self.delete_time_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 12: break
+ if tt == 18:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 29:
+ self.set_delete_time(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_delete_time_: res+=prefix+("delete_time: %s\n" % self.DebugFormatFixed32(self.delete_time_))
+ return res
+
+class MemcacheDeleteRequest(ProtocolBuffer.ProtocolMessage):
+ has_name_space_ = 0
+ name_space_ = ""
+
+ def __init__(self, contents=None):
+ self.item_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def item_size(self): return len(self.item_)
+ def item_list(self): return self.item_
+
+ def item(self, i):
+ return self.item_[i]
+
+ def mutable_item(self, i):
+ return self.item_[i]
+
+ def add_item(self):
+ x = MemcacheDeleteRequest_Item()
+ self.item_.append(x)
+ return x
+
+ def clear_item(self):
+ self.item_ = []
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.item_) != len(x.item_): return 0
+ for e1, e2 in zip(self.item_, x.item_):
+ if e1 != e2: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ for p in self.item_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 2 * len(self.item_)
+ for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ return n + 0
+
+ def Clear(self):
+ self.clear_item()
+ self.clear_name_space()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.item_)):
+ out.putVarInt32(11)
+ self.item_[i].OutputUnchecked(out)
+ out.putVarInt32(12)
+ if (self.has_name_space_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.name_space_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 11:
+ self.add_item().TryMerge(d)
+ continue
+ if tt == 34:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.item_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Item%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kItemGroup = 1
+ kItemkey = 2
+ kItemdelete_time = 3
+ kname_space = 4
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Item",
+ 2: "key",
+ 3: "delete_time",
+ 4: "name_space",
+ }, 4)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STARTGROUP,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.FLOAT,
+ 4: ProtocolBuffer.Encoder.STRING,
+ }, 4, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheDeleteResponse(ProtocolBuffer.ProtocolMessage):
+
+ DELETED = 1
+ NOT_FOUND = 2
+
+ _DeleteStatusCode_NAMES = {
+ 1: "DELETED",
+ 2: "NOT_FOUND",
+ }
+
+ def DeleteStatusCode_Name(cls, x): return cls._DeleteStatusCode_NAMES.get(x, "")
+ DeleteStatusCode_Name = classmethod(DeleteStatusCode_Name)
+
+
+ def __init__(self, contents=None):
+ self.delete_status_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def delete_status_size(self): return len(self.delete_status_)
+ def delete_status_list(self): return self.delete_status_
+
+ def delete_status(self, i):
+ return self.delete_status_[i]
+
+ def set_delete_status(self, i, x):
+ self.delete_status_[i] = x
+
+ def add_delete_status(self, x):
+ self.delete_status_.append(x)
+
+ def clear_delete_status(self):
+ self.delete_status_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.delete_status_size()): self.add_delete_status(x.delete_status(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.delete_status_) != len(x.delete_status_): return 0
+ for e1, e2 in zip(self.delete_status_, x.delete_status_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.delete_status_)
+ for i in xrange(len(self.delete_status_)): n += self.lengthVarInt64(self.delete_status_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_delete_status()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.delete_status_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.delete_status_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_delete_status(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.delete_status_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("delete_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kdelete_status = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "delete_status",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheIncrementRequest(ProtocolBuffer.ProtocolMessage):
+
+ INCREMENT = 1
+ DECREMENT = 2
+
+ _Direction_NAMES = {
+ 1: "INCREMENT",
+ 2: "DECREMENT",
+ }
+
+ def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
+ Direction_Name = classmethod(Direction_Name)
+
+ has_key_ = 0
+ key_ = ""
+ has_name_space_ = 0
+ name_space_ = ""
+ has_delta_ = 0
+ delta_ = 1
+ has_direction_ = 0
+ direction_ = 1
+ has_initial_value_ = 0
+ initial_value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def name_space(self): return self.name_space_
+
+ def set_name_space(self, x):
+ self.has_name_space_ = 1
+ self.name_space_ = x
+
+ def clear_name_space(self):
+ if self.has_name_space_:
+ self.has_name_space_ = 0
+ self.name_space_ = ""
+
+ def has_name_space(self): return self.has_name_space_
+
+ def delta(self): return self.delta_
+
+ def set_delta(self, x):
+ self.has_delta_ = 1
+ self.delta_ = x
+
+ def clear_delta(self):
+ if self.has_delta_:
+ self.has_delta_ = 0
+ self.delta_ = 1
+
+ def has_delta(self): return self.has_delta_
+
+ def direction(self): return self.direction_
+
+ def set_direction(self, x):
+ self.has_direction_ = 1
+ self.direction_ = x
+
+ def clear_direction(self):
+ if self.has_direction_:
+ self.has_direction_ = 0
+ self.direction_ = 1
+
+ def has_direction(self): return self.has_direction_
+
+ def initial_value(self): return self.initial_value_
+
+ def set_initial_value(self, x):
+ self.has_initial_value_ = 1
+ self.initial_value_ = x
+
+ def clear_initial_value(self):
+ if self.has_initial_value_:
+ self.has_initial_value_ = 0
+ self.initial_value_ = 0
+
+ def has_initial_value(self): return self.has_initial_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_name_space()): self.set_name_space(x.name_space())
+ if (x.has_delta()): self.set_delta(x.delta())
+ if (x.has_direction()): self.set_direction(x.direction())
+ if (x.has_initial_value()): self.set_initial_value(x.initial_value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_name_space_ != x.has_name_space_: return 0
+ if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
+ if self.has_delta_ != x.has_delta_: return 0
+ if self.has_delta_ and self.delta_ != x.delta_: return 0
+ if self.has_direction_ != x.has_direction_: return 0
+ if self.has_direction_ and self.direction_ != x.direction_: return 0
+ if self.has_initial_value_ != x.has_initial_value_: return 0
+ if self.has_initial_value_ and self.initial_value_ != x.initial_value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ if (self.has_name_space_): n += 1 + self.lengthString(len(self.name_space_))
+ if (self.has_delta_): n += 1 + self.lengthVarInt64(self.delta_)
+ if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
+ if (self.has_initial_value_): n += 1 + self.lengthVarInt64(self.initial_value_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_name_space()
+ self.clear_delta()
+ self.clear_direction()
+ self.clear_initial_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.key_)
+ if (self.has_delta_):
+ out.putVarInt32(16)
+ out.putVarUint64(self.delta_)
+ if (self.has_direction_):
+ out.putVarInt32(24)
+ out.putVarInt32(self.direction_)
+ if (self.has_name_space_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.name_space_)
+ if (self.has_initial_value_):
+ out.putVarInt32(40)
+ out.putVarUint64(self.initial_value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_delta(d.getVarUint64())
+ continue
+ if tt == 24:
+ self.set_direction(d.getVarInt32())
+ continue
+ if tt == 34:
+ self.set_name_space(d.getPrefixedString())
+ continue
+ if tt == 40:
+ self.set_initial_value(d.getVarUint64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
+ if self.has_delta_: res+=prefix+("delta: %s\n" % self.DebugFormatInt64(self.delta_))
+ if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
+ if self.has_initial_value_: res+=prefix+("initial_value: %s\n" % self.DebugFormatInt64(self.initial_value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kkey = 1
+ kname_space = 4
+ kdelta = 2
+ kdirection = 3
+ kinitial_value = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "key",
+ 2: "delta",
+ 3: "direction",
+ 4: "name_space",
+ 5: "initial_value",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheIncrementResponse(ProtocolBuffer.ProtocolMessage):
+ has_new_value_ = 0
+ new_value_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def new_value(self): return self.new_value_
+
+ def set_new_value(self, x):
+ self.has_new_value_ = 1
+ self.new_value_ = x
+
+ def clear_new_value(self):
+ if self.has_new_value_:
+ self.has_new_value_ = 0
+ self.new_value_ = 0
+
+ def has_new_value(self): return self.has_new_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_new_value()): self.set_new_value(x.new_value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_new_value_ != x.has_new_value_: return 0
+ if self.has_new_value_ and self.new_value_ != x.new_value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_new_value_): n += 1 + self.lengthVarInt64(self.new_value_)
+ return n + 0
+
+ def Clear(self):
+ self.clear_new_value()
+
+ def OutputUnchecked(self, out):
+ if (self.has_new_value_):
+ out.putVarInt32(8)
+ out.putVarUint64(self.new_value_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_new_value(d.getVarUint64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_new_value_: res+=prefix+("new_value: %s\n" % self.DebugFormatInt64(self.new_value_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ knew_value = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "new_value",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheFlushRequest(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheFlushResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheStatsRequest(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MergedNamespaceStats(ProtocolBuffer.ProtocolMessage):
+ has_hits_ = 0
+ hits_ = 0
+ has_misses_ = 0
+ misses_ = 0
+ has_byte_hits_ = 0
+ byte_hits_ = 0
+ has_items_ = 0
+ items_ = 0
+ has_bytes_ = 0
+ bytes_ = 0
+ has_oldest_item_age_ = 0
+ oldest_item_age_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def hits(self): return self.hits_
+
+ def set_hits(self, x):
+ self.has_hits_ = 1
+ self.hits_ = x
+
+ def clear_hits(self):
+ if self.has_hits_:
+ self.has_hits_ = 0
+ self.hits_ = 0
+
+ def has_hits(self): return self.has_hits_
+
+ def misses(self): return self.misses_
+
+ def set_misses(self, x):
+ self.has_misses_ = 1
+ self.misses_ = x
+
+ def clear_misses(self):
+ if self.has_misses_:
+ self.has_misses_ = 0
+ self.misses_ = 0
+
+ def has_misses(self): return self.has_misses_
+
+ def byte_hits(self): return self.byte_hits_
+
+ def set_byte_hits(self, x):
+ self.has_byte_hits_ = 1
+ self.byte_hits_ = x
+
+ def clear_byte_hits(self):
+ if self.has_byte_hits_:
+ self.has_byte_hits_ = 0
+ self.byte_hits_ = 0
+
+ def has_byte_hits(self): return self.has_byte_hits_
+
+ def items(self): return self.items_
+
+ def set_items(self, x):
+ self.has_items_ = 1
+ self.items_ = x
+
+ def clear_items(self):
+ if self.has_items_:
+ self.has_items_ = 0
+ self.items_ = 0
+
+ def has_items(self): return self.has_items_
+
+ def bytes(self): return self.bytes_
+
+ def set_bytes(self, x):
+ self.has_bytes_ = 1
+ self.bytes_ = x
+
+ def clear_bytes(self):
+ if self.has_bytes_:
+ self.has_bytes_ = 0
+ self.bytes_ = 0
+
+ def has_bytes(self): return self.has_bytes_
+
+ def oldest_item_age(self): return self.oldest_item_age_
+
+ def set_oldest_item_age(self, x):
+ self.has_oldest_item_age_ = 1
+ self.oldest_item_age_ = x
+
+ def clear_oldest_item_age(self):
+ if self.has_oldest_item_age_:
+ self.has_oldest_item_age_ = 0
+ self.oldest_item_age_ = 0
+
+ def has_oldest_item_age(self): return self.has_oldest_item_age_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_hits()): self.set_hits(x.hits())
+ if (x.has_misses()): self.set_misses(x.misses())
+ if (x.has_byte_hits()): self.set_byte_hits(x.byte_hits())
+ if (x.has_items()): self.set_items(x.items())
+ if (x.has_bytes()): self.set_bytes(x.bytes())
+ if (x.has_oldest_item_age()): self.set_oldest_item_age(x.oldest_item_age())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_hits_ != x.has_hits_: return 0
+ if self.has_hits_ and self.hits_ != x.hits_: return 0
+ if self.has_misses_ != x.has_misses_: return 0
+ if self.has_misses_ and self.misses_ != x.misses_: return 0
+ if self.has_byte_hits_ != x.has_byte_hits_: return 0
+ if self.has_byte_hits_ and self.byte_hits_ != x.byte_hits_: return 0
+ if self.has_items_ != x.has_items_: return 0
+ if self.has_items_ and self.items_ != x.items_: return 0
+ if self.has_bytes_ != x.has_bytes_: return 0
+ if self.has_bytes_ and self.bytes_ != x.bytes_: return 0
+ if self.has_oldest_item_age_ != x.has_oldest_item_age_: return 0
+ if self.has_oldest_item_age_ and self.oldest_item_age_ != x.oldest_item_age_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_hits_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: hits not set.')
+ if (not self.has_misses_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: misses not set.')
+ if (not self.has_byte_hits_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: byte_hits not set.')
+ if (not self.has_items_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: items not set.')
+ if (not self.has_bytes_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: bytes not set.')
+ if (not self.has_oldest_item_age_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: oldest_item_age not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.hits_)
+ n += self.lengthVarInt64(self.misses_)
+ n += self.lengthVarInt64(self.byte_hits_)
+ n += self.lengthVarInt64(self.items_)
+ n += self.lengthVarInt64(self.bytes_)
+ return n + 10
+
+ def Clear(self):
+ self.clear_hits()
+ self.clear_misses()
+ self.clear_byte_hits()
+ self.clear_items()
+ self.clear_bytes()
+ self.clear_oldest_item_age()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarUint64(self.hits_)
+ out.putVarInt32(16)
+ out.putVarUint64(self.misses_)
+ out.putVarInt32(24)
+ out.putVarUint64(self.byte_hits_)
+ out.putVarInt32(32)
+ out.putVarUint64(self.items_)
+ out.putVarInt32(40)
+ out.putVarUint64(self.bytes_)
+ out.putVarInt32(53)
+ out.put32(self.oldest_item_age_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_hits(d.getVarUint64())
+ continue
+ if tt == 16:
+ self.set_misses(d.getVarUint64())
+ continue
+ if tt == 24:
+ self.set_byte_hits(d.getVarUint64())
+ continue
+ if tt == 32:
+ self.set_items(d.getVarUint64())
+ continue
+ if tt == 40:
+ self.set_bytes(d.getVarUint64())
+ continue
+ if tt == 53:
+ self.set_oldest_item_age(d.get32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_hits_: res+=prefix+("hits: %s\n" % self.DebugFormatInt64(self.hits_))
+ if self.has_misses_: res+=prefix+("misses: %s\n" % self.DebugFormatInt64(self.misses_))
+ if self.has_byte_hits_: res+=prefix+("byte_hits: %s\n" % self.DebugFormatInt64(self.byte_hits_))
+ if self.has_items_: res+=prefix+("items: %s\n" % self.DebugFormatInt64(self.items_))
+ if self.has_bytes_: res+=prefix+("bytes: %s\n" % self.DebugFormatInt64(self.bytes_))
+ if self.has_oldest_item_age_: res+=prefix+("oldest_item_age: %s\n" % self.DebugFormatFixed32(self.oldest_item_age_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ khits = 1
+ kmisses = 2
+ kbyte_hits = 3
+ kitems = 4
+ kbytes = 5
+ koldest_item_age = 6
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "hits",
+ 2: "misses",
+ 3: "byte_hits",
+ 4: "items",
+ 5: "bytes",
+ 6: "oldest_item_age",
+ }, 6)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.NUMERIC,
+ 5: ProtocolBuffer.Encoder.NUMERIC,
+ 6: ProtocolBuffer.Encoder.FLOAT,
+ }, 6, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class MemcacheStatsResponse(ProtocolBuffer.ProtocolMessage):
+ has_stats_ = 0
+ stats_ = None
+
+ def __init__(self, contents=None):
+ self.lazy_init_lock_ = thread.allocate_lock()
+ if contents is not None: self.MergeFromString(contents)
+
+ def stats(self):
+ if self.stats_ is None:
+ self.lazy_init_lock_.acquire()
+ try:
+ if self.stats_ is None: self.stats_ = MergedNamespaceStats()
+ finally:
+ self.lazy_init_lock_.release()
+ return self.stats_
+
+ def mutable_stats(self): self.has_stats_ = 1; return self.stats()
+
+ def clear_stats(self):
+ if self.has_stats_:
+ self.has_stats_ = 0;
+ if self.stats_ is not None: self.stats_.Clear()
+
+ def has_stats(self): return self.has_stats_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_stats()): self.mutable_stats().MergeFrom(x.stats())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_stats_ != x.has_stats_: return 0
+ if self.has_stats_ and self.stats_ != x.stats_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (self.has_stats_ and not self.stats_.IsInitialized(debug_strs)): initialized = 0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_stats_): n += 1 + self.lengthString(self.stats_.ByteSize())
+ return n + 0
+
+ def Clear(self):
+ self.clear_stats()
+
+ def OutputUnchecked(self, out):
+ if (self.has_stats_):
+ out.putVarInt32(10)
+ out.putVarInt32(self.stats_.ByteSize())
+ self.stats_.OutputUnchecked(out)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ length = d.getVarInt32()
+ tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
+ d.skip(length)
+ self.mutable_stats().TryMerge(tmp)
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_stats_:
+ res+=prefix+"stats <\n"
+ res+=self.stats_.__str__(prefix + " ", printElemNumber)
+ res+=prefix+">\n"
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kstats = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "stats",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['MemcacheServiceError','MemcacheGetRequest','MemcacheGetResponse','MemcacheGetResponse_Item','MemcacheSetRequest','MemcacheSetRequest_Item','MemcacheSetResponse','MemcacheDeleteRequest','MemcacheDeleteRequest_Item','MemcacheDeleteResponse','MemcacheIncrementRequest','MemcacheIncrementResponse','MemcacheFlushRequest','MemcacheFlushResponse','MemcacheStatsRequest','MergedNamespaceStats','MemcacheStatsResponse']
diff --git a/google_appengine/google/appengine/api/memcache/memcache_service_pb.pyc b/google_appengine/google/appengine/api/memcache/memcache_service_pb.pyc
new file mode 100644
index 0000000..e7f4872
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/memcache/memcache_stub.py b/google_appengine/google/appengine/api/memcache/memcache_stub.py
new file mode 100755
index 0000000..8d03bf2
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_stub.py
@@ -0,0 +1,293 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the memcache API, keeping all data in process memory."""
+
+
+
+import logging
+import time
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import memcache
+from google.appengine.api.memcache import memcache_service_pb
+
+MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
+MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
+MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
+MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
+
+
+class CacheEntry(object):
+ """An entry in the cache."""
+
+ def __init__(self, value, expiration, flags, gettime):
+ """Initializer.
+
+ Args:
+ value: String containing the data for this entry.
+ expiration: Number containing the expiration time or offset in seconds
+ for this entry.
+ flags: Opaque flags used by the memcache implementation.
+ gettime: Used for testing. Function that works like time.time().
+ """
+ assert isinstance(value, basestring)
+ assert len(value) <= memcache.MAX_VALUE_SIZE
+ assert isinstance(expiration, (int, long))
+
+ self._gettime = gettime
+ self.value = value
+ self.flags = flags
+ self.created_time = self._gettime()
+ self.will_expire = expiration != 0
+ self.locked = False
+ self._SetExpiration(expiration)
+
+ def _SetExpiration(self, expiration):
+ """Sets the expiration for this entry.
+
+ Args:
+ expiration: Number containing the expiration time or offset in seconds
+ for this entry. If expiration is above one month, then it's considered
+ an absolute time since the UNIX epoch.
+ """
+ if expiration > (86400 * 30):
+ self.expiration_time = expiration
+ else:
+ self.expiration_time = self._gettime() + expiration
+
+ def CheckExpired(self):
+ """Returns True if this entry has expired; False otherwise."""
+ return self.will_expire and self._gettime() >= self.expiration_time
+
+ def ExpireAndLock(self, timeout):
+ """Marks this entry as deleted and locks it for the expiration time.
+
+ Used to implement memcache's delete timeout behavior.
+
+ Args:
+ timeout: Parameter originally passed to memcache.delete or
+ memcache.delete_multi to control deletion timeout.
+ """
+ self.will_expire = True
+ self.locked = True
+ self._SetExpiration(timeout)
+
+ def CheckLocked(self):
+ """Returns True if this entry was deleted but has not yet timed out."""
+ return self.locked and not self.CheckExpired()
+
+
+class MemcacheServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only memcache service stub.
+
+ This stub keeps all data in the local process' memory, not in any
+ external servers.
+ """
+
+ def __init__(self, gettime=time.time, service_name='memcache'):
+ """Initializer.
+
+ Args:
+ gettime: time.time()-like function used for testing.
+ service_name: Service name expected for all calls.
+ """
+ super(MemcacheServiceStub, self).__init__(service_name)
+ self._gettime = gettime
+ self._ResetStats()
+
+ self._the_cache = {}
+
+ def _ResetStats(self):
+ """Resets statistics information."""
+ self._hits = 0
+ self._misses = 0
+ self._byte_hits = 0
+ self._cache_creation_time = self._gettime()
+
+ def _GetKey(self, namespace, key):
+ """Retrieves a CacheEntry from the cache if it hasn't expired.
+
+ Does not take deletion timeout into account.
+
+ Args:
+ namespace: The namespace that keys are stored under.
+ key: The key to retrieve from the cache.
+
+ Returns:
+ The corresponding CacheEntry instance, or None if it was not found or
+ has already expired.
+ """
+ namespace_dict = self._the_cache.get(namespace, None)
+ if namespace_dict is None:
+ return None
+ entry = namespace_dict.get(key, None)
+ if entry is None:
+ return None
+ elif entry.CheckExpired():
+ del namespace_dict[key]
+ return None
+ else:
+ return entry
+
+ def _Dynamic_Get(self, request, response):
+ """Implementation of MemcacheService::Get().
+
+ Args:
+ request: A MemcacheGetRequest.
+ response: A MemcacheGetResponse.
+ """
+ namespace = request.name_space()
+ keys = set(request.key_list())
+ for key in keys:
+ entry = self._GetKey(namespace, key)
+ if entry is None or entry.CheckLocked():
+ self._misses += 1
+ continue
+ self._hits += 1
+ self._byte_hits += len(entry.value)
+ item = response.add_item()
+ item.set_key(key)
+ item.set_value(entry.value)
+ item.set_flags(entry.flags)
+
+ def _Dynamic_Set(self, request, response):
+ """Implementation of MemcacheService::Set().
+
+ Args:
+ request: A MemcacheSetRequest.
+ response: A MemcacheSetResponse.
+ """
+ namespace = request.name_space()
+ for item in request.item_list():
+ key = item.key()
+ set_policy = item.set_policy()
+ old_entry = self._GetKey(namespace, key)
+
+ set_status = MemcacheSetResponse.NOT_STORED
+ if ((set_policy == MemcacheSetRequest.SET) or
+ (set_policy == MemcacheSetRequest.ADD and old_entry is None) or
+ (set_policy == MemcacheSetRequest.REPLACE and old_entry is not None)):
+
+ if (old_entry is None or
+ set_policy == MemcacheSetRequest.SET
+ or not old_entry.CheckLocked()):
+ if namespace not in self._the_cache:
+ self._the_cache[namespace] = {}
+ self._the_cache[namespace][key] = CacheEntry(item.value(),
+ item.expiration_time(),
+ item.flags(),
+ gettime=self._gettime)
+ set_status = MemcacheSetResponse.STORED
+
+ response.add_set_status(set_status)
+
+ def _Dynamic_Delete(self, request, response):
+ """Implementation of MemcacheService::Delete().
+
+ Args:
+ request: A MemcacheDeleteRequest.
+ response: A MemcacheDeleteResponse.
+ """
+ namespace = request.name_space()
+ for item in request.item_list():
+ key = item.key()
+ entry = self._GetKey(namespace, key)
+
+ delete_status = MemcacheDeleteResponse.DELETED
+ if entry is None:
+ delete_status = MemcacheDeleteResponse.NOT_FOUND
+ elif item.delete_time() == 0:
+ del self._the_cache[namespace][key]
+ else:
+ entry.ExpireAndLock(item.delete_time())
+
+ response.add_delete_status(delete_status)
+
+ def _Dynamic_Increment(self, request, response):
+ """Implementation of MemcacheService::Increment().
+
+ Args:
+ request: A MemcacheIncrementRequest.
+ response: A MemcacheIncrementResponse.
+ """
+ namespace = request.name_space()
+ key = request.key()
+ entry = self._GetKey(namespace, key)
+ if entry is None:
+ if not request.has_initial_value():
+ return
+ if namespace not in self._the_cache:
+ self._the_cache[namespace] = {}
+ self._the_cache[namespace][key] = CacheEntry(str(request.initial_value()),
+ expiration=0,
+ flags=0,
+ gettime=self._gettime)
+ entry = self._GetKey(namespace, key)
+ assert entry is not None
+
+ try:
+ old_value = long(entry.value)
+ if old_value < 0:
+ raise ValueError
+ except ValueError:
+ logging.error('Increment/decrement failed: Could not interpret '
+ 'value for key = "%s" as an unsigned integer.', key)
+ return
+
+ delta = request.delta()
+ if request.direction() == MemcacheIncrementRequest.DECREMENT:
+ delta = -delta
+
+ new_value = old_value + delta
+ if not (0 <= new_value < 2**64):
+ new_value = 0
+
+ entry.value = str(new_value)
+ response.set_new_value(new_value)
+
+ def _Dynamic_FlushAll(self, request, response):
+ """Implementation of MemcacheService::FlushAll().
+
+ Args:
+ request: A MemcacheFlushRequest.
+ response: A MemcacheFlushResponse.
+ """
+ self._the_cache.clear()
+ self._ResetStats()
+
+ def _Dynamic_Stats(self, request, response):
+ """Implementation of MemcacheService::Stats().
+
+ Args:
+ request: A MemcacheStatsRequest.
+ response: A MemcacheStatsResponse.
+ """
+ stats = response.mutable_stats()
+ stats.set_hits(self._hits)
+ stats.set_misses(self._misses)
+ stats.set_byte_hits(self._byte_hits)
+ items = 0
+ total_bytes = 0
+ for namespace in self._the_cache.itervalues():
+ items += len(namespace)
+ for entry in namespace.itervalues():
+ total_bytes += len(entry.value)
+ stats.set_items(items)
+ stats.set_bytes(total_bytes)
+
+ stats.set_oldest_item_age(self._gettime() - self._cache_creation_time)
diff --git a/google_appengine/google/appengine/api/memcache/memcache_stub.pyc b/google_appengine/google/appengine/api/memcache/memcache_stub.pyc
new file mode 100644
index 0000000..d16bb1c
--- /dev/null
+++ b/google_appengine/google/appengine/api/memcache/memcache_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/namespace_manager/__init__.py b/google_appengine/google/appengine/api/namespace_manager/__init__.py
new file mode 100755
index 0000000..43e68af
--- /dev/null
+++ b/google_appengine/google/appengine/api/namespace_manager/__init__.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Control the namespacing system used by various APIs.
+
+Each API call can specify an alternate namespace, but the functions
+here can be used to change the default namespace. The default is set
+before user code begins executing.
+"""
+
+
+
+import os
+
+ENV_DEFAULT_NAMESPACE = 'HTTP_X_APPENGINE_DEFAULT_NAMESPACE'
+ENV_CURRENT_NAMESPACE = '__INTERNAL_CURRENT_NAMESPACE'
+
+
+def set_request_namespace(namespace):
+ """Set the default namespace to use for future calls, for this request only.
+
+ Args:
+ namespace: A string naming the new namespace to use. The empty
+ string specifies the root namespace for this app.
+ """
+ os.environ[ENV_CURRENT_NAMESPACE] = namespace
+
+
+def get_request_namespace():
+ """Get the name of the current default namespace.
+
+ The empty string indicates that the root namespace is the default.
+ """
+ return os.getenv(ENV_CURRENT_NAMESPACE, '')
+
+
+def _enable_request_namespace():
+ """Automatically enable namespace to default for domain.
+
+ Calling this function will automatically default the namespace to the
+ chosen Google Apps domain for the current request.
+ """
+ if ENV_CURRENT_NAMESPACE not in os.environ:
+ if ENV_DEFAULT_NAMESPACE in os.environ:
+ os.environ[ENV_CURRENT_NAMESPACE] = os.environ[ENV_DEFAULT_NAMESPACE]
+ else:
+ os.environ[ENV_CURRENT_NAMESPACE] = ''
+
+
+def _add_name_space(request, namespace=None):
+ """Add a name_space field to a request.
+
+ Args:
+ request: A protocol buffer supporting the set_name_space() operation.
+ namespace: The name of the namespace part. If None, use the
+ default namespace.
+ """
+ if namespace is None:
+ request.set_name_space(get_request_namespace())
+ else:
+ request.set_name_space(namespace)
diff --git a/google_appengine/google/appengine/api/namespace_manager/__init__.pyc b/google_appengine/google/appengine/api/namespace_manager/__init__.pyc
new file mode 100644
index 0000000..5bb0673
--- /dev/null
+++ b/google_appengine/google/appengine/api/namespace_manager/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/queueinfo.py b/google_appengine/google/appengine/api/queueinfo.py
new file mode 100755
index 0000000..bdaa358
--- /dev/null
+++ b/google_appengine/google/appengine/api/queueinfo.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""QueueInfo tools.
+
+A library for working with QueueInfo records, describing task queue entries
+for an application. Supports loading the records from queue.yaml.
+
+A queue has two required parameters and one optional one. The required
+parameters are 'name' (must be unique for an appid) and 'rate' (the rate
+at which jobs in the queue are run). There is an optional parameter
+'bucket_size' that will allow tokens to be 'saved up' (for more on the
+algorithm, see http://en.wikipedia.org/wiki/Token_Bucket). rate is expressed
+as number/unit, with number being an int or a float, and unit being one of
+'s' (seconds), 'm' (minutes), 'h' (hours) or 'd' (days). bucket_size is
+an integer.
+
+An example of the use of bucket_size rate: the free email quota is 2000/d,
+and the maximum you can send in a single minute is 11. So we can define a
+queue for sending email like this:
+
+queue:
+- name: mail_queue
+ rate: 2000/d
+ bucket_size: 10
+
+If this queue had been idle for a while before some jobs were submitted to it,
+the first 10 jobs submitted would be run immediately, then subsequent ones
+would be run once every 40s or so. The limit of 2000 per day would still apply.
+"""
+
+
+
+from google.appengine.api import validation
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_object
+
+_NAME_REGEX = r'^[A-Za-z0-9-]{0,499}$'
+_RATE_REGEX = r'^(0|[0-9]+(\.[0-9]*)?/[smhd])'
+
+QUEUE = 'queue'
+
+NAME = 'name'
+RATE = 'rate'
+BUCKET_SIZE = 'bucket_size'
+
+
+class MalformedQueueConfiguration(Exception):
+ """Configuration file for Task Queue is malformed."""
+
+
+class QueueEntry(validation.Validated):
+ """A queue entry describes a single task queue."""
+ ATTRIBUTES = {
+ NAME: _NAME_REGEX,
+ RATE: _RATE_REGEX,
+ BUCKET_SIZE: validation.Optional(validation.TYPE_INT),
+ }
+
+
+class QueueInfoExternal(validation.Validated):
+ """QueueInfoExternal describes all queue entries for an application."""
+ ATTRIBUTES = {
+ QUEUE: validation.Optional(validation.Repeated(QueueEntry))
+ }
+
+
+def LoadSingleQueue(queue_info):
+ """Load a queue.yaml file or string and return a QueueInfoExternal object.
+
+ Args:
+ queue_info: the contents of a queue.yaml file, as a string.
+
+ Returns:
+ A QueueInfoExternal object.
+ """
+ builder = yaml_object.ObjectBuilder(QueueInfoExternal)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+ listener.Parse(queue_info)
+
+ queue_info = handler.GetResults()
+ if len(queue_info) < 1:
+ raise MalformedQueueConfiguration('Empty queue configuration.')
+ if len(queue_info) > 1:
+ raise MalformedQueueConfiguration('Multiple queue: sections '
+ 'in configuration.')
+ return queue_info[0]
+
+
+def ParseRate(rate):
+ """Parses a rate string in the form number/unit, or the literal 0.
+
+ The unit is one of s (seconds), m (minutes), h (hours) or d (days).
+
+ Args:
+ rate: the rate string.
+
+ Returns:
+ a floating point number representing the rate/second.
+
+ Raises:
+ MalformedQueueConfiguration: if the rate is invalid
+ """
+ if rate == "0":
+ return 0.0
+ elements = rate.split('/')
+ if len(elements) != 2:
+ raise MalformedQueueConfiguration('Rate "%s" is invalid.' % rate)
+ number, unit = elements
+ try:
+ number = float(number)
+ except ValueError:
+ raise MalformedQueueConfiguration('Rate "%s" is invalid:'
+ ' "%s" is not a number.' %
+ (rate, number))
+ if unit not in 'smhd':
+ raise MalformedQueueConfiguration('Rate "%s" is invalid:'
+ ' "%s" is not one of s, m, h, d.' %
+ (rate, unit))
+ if unit == 's':
+ return number
+ if unit == 'm':
+ return number/60
+ if unit == 'h':
+ return number/(60 * 60)
+ if unit == 'd':
+ return number/(24 * 60 * 60)
diff --git a/google_appengine/google/appengine/api/queueinfo.pyc b/google_appengine/google/appengine/api/queueinfo.pyc
new file mode 100644
index 0000000..74dd348
--- /dev/null
+++ b/google_appengine/google/appengine/api/queueinfo.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/quota.py b/google_appengine/google/appengine/api/quota.py
new file mode 100755
index 0000000..3168eb2
--- /dev/null
+++ b/google_appengine/google/appengine/api/quota.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Access to quota usage for this application."""
+
+
+
+
+try:
+ from google3.apphosting.runtime import _apphosting_runtime___python__apiproxy
+except ImportError:
+ _apphosting_runtime___python__apiproxy = None
+
+def get_request_cpu_usage():
+ """Get the amount of CPU used so far for the current request.
+
+ Returns the number of megacycles used so far for the current
+ request. Does not include CPU used by API calls.
+
+ Does nothing when used in the dev_appserver.
+ """
+
+ if _apphosting_runtime___python__apiproxy:
+ return _apphosting_runtime___python__apiproxy.get_request_cpu_usage()
+ return 0
+
+def get_request_api_cpu_usage():
+ """Get the amount of CPU used so far by API calls during the current request.
+
+ Returns the number of megacycles used so far by API calls for the current
+ request. Does not include CPU used by code in the request itself.
+
+ Does nothing when used in the dev_appserver.
+ """
+
+ if _apphosting_runtime___python__apiproxy:
+ return _apphosting_runtime___python__apiproxy.get_request_api_cpu_usage()
+ return 0
+
+MCYCLES_PER_SECOND = 1200.0
+"""Megacycles to CPU seconds. Convert by using a 1.2 GHz 64-bit x86 CPU."""
+
+def megacycles_to_cpu_seconds(mcycles):
+ """Convert an input value in megacycles to CPU-seconds.
+
+ Returns a double representing the CPU-seconds the input megacycle value
+ converts to.
+ """
+ return mcycles / MCYCLES_PER_SECOND
+
+def cpu_seconds_to_megacycles(cpu_secs):
+ """Convert an input value in CPU-seconds to megacycles.
+
+ Returns an integer representing the megacycles the input CPU-seconds value
+ converts to.
+ """
+ return int(cpu_secs * MCYCLES_PER_SECOND)
diff --git a/google_appengine/google/appengine/api/urlfetch.py b/google_appengine/google/appengine/api/urlfetch.py
new file mode 100755
index 0000000..8d9e836
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch.py
@@ -0,0 +1,361 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""URL downloading API.
+
+Methods defined in this module:
+ Fetch(): fetchs a given URL using an HTTP GET or POST
+"""
+
+
+
+
+
+import os
+import UserDict
+import urllib2
+import urlparse
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import urlfetch_service_pb
+from google.appengine.api.urlfetch_errors import *
+from google.appengine.runtime import apiproxy_errors
+
+MAX_REDIRECTS = 5
+
+GET = 1
+POST = 2
+HEAD = 3
+PUT = 4
+DELETE = 5
+
+
+_URL_STRING_MAP = {
+ 'GET': GET,
+ 'POST': POST,
+ 'HEAD': HEAD,
+ 'PUT': PUT,
+ 'DELETE': DELETE,
+}
+
+
+_VALID_METHODS = frozenset(_URL_STRING_MAP.values())
+
+
+class _CaselessDict(UserDict.IterableUserDict):
+ """Case insensitive dictionary.
+
+ This class was lifted from os.py and slightly modified.
+ """
+
+ def __init__(self):
+ UserDict.IterableUserDict.__init__(self)
+ self.caseless_keys = {}
+
+ def __setitem__(self, key, item):
+ """Set dictionary item.
+
+ Args:
+ key: Key of new item. Key is case insensitive, so "d['Key'] = value "
+ will replace previous values set by "d['key'] = old_value".
+ item: Item to store.
+ """
+ caseless_key = key.lower()
+ if caseless_key in self.caseless_keys:
+ del self.data[self.caseless_keys[caseless_key]]
+ self.caseless_keys[caseless_key] = key
+ self.data[key] = item
+
+ def __getitem__(self, key):
+ """Get dictionary item.
+
+ Args:
+ key: Key of item to get. Key is case insensitive, so "d['Key']" is the
+ same as "d['key']".
+
+ Returns:
+ Item associated with key.
+ """
+ return self.data[self.caseless_keys[key.lower()]]
+
+ def __delitem__(self, key):
+ """Remove item from dictionary.
+
+ Args:
+ key: Key of item to remove. Key is case insensitive, so "del d['Key']" is
+ the same as "del d['key']"
+ """
+ caseless_key = key.lower()
+ del self.data[self.caseless_keys[caseless_key]]
+ del self.caseless_keys[caseless_key]
+
+ def has_key(self, key):
+ """Determine if dictionary has item with specific key.
+
+ Args:
+ key: Key to check for presence. Key is case insensitive, so
+ "d.has_key('Key')" evaluates to the same value as "d.has_key('key')".
+
+ Returns:
+ True if dictionary contains key, else False.
+ """
+ return key.lower() in self.caseless_keys
+
+ def __contains__(self, key):
+ """Same as 'has_key', but used for 'in' operator.'"""
+ return self.has_key(key)
+
+ def get(self, key, failobj=None):
+ """Get dictionary item, defaulting to another value if it does not exist.
+
+ Args:
+ key: Key of item to get. Key is case insensitive, so "d['Key']" is the
+ same as "d['key']".
+ failobj: Value to return if key not in dictionary.
+ """
+ try:
+ cased_key = self.caseless_keys[key.lower()]
+ except KeyError:
+ return failobj
+ return self.data[cased_key]
+
+ def update(self, dict=None, **kwargs):
+ """Update dictionary using values from another dictionary and keywords.
+
+ Args:
+ dict: Dictionary to update from.
+ kwargs: Keyword arguments to update from.
+ """
+ if dict:
+ try:
+ keys = dict.keys()
+ except AttributeError:
+ for k, v in dict:
+ self[k] = v
+ else:
+ for k in keys:
+ self[k] = dict[k]
+ if kwargs:
+ self.update(kwargs)
+
+ def copy(self):
+ """Make a shallow, case sensitive copy of self."""
+ return dict(self)
+
+
+def _is_fetching_self(url, method):
+ """Checks if the fetch is for the same URL from which it originated.
+
+ Args:
+ url: str, The URL being fetched.
+ method: value from _VALID_METHODS.
+
+ Returns:
+ boolean indicating whether or not it seems that the app is trying to fetch
+ itself.
+ """
+ if (method != GET or
+ "HTTP_HOST" not in os.environ or
+ "PATH_INFO" not in os.environ):
+ return False
+
+ scheme, host_port, path, query, fragment = urlparse.urlsplit(url)
+
+ if host_port == os.environ['HTTP_HOST']:
+ current_path = urllib2.unquote(os.environ['PATH_INFO'])
+ desired_path = urllib2.unquote(path)
+
+ if (current_path == desired_path or
+ (current_path in ('', '/') and desired_path in ('', '/'))):
+ return True
+
+ return False
+
+
+def create_rpc(deadline=None, callback=None):
+ """Creates an RPC object for use with the urlfetch API.
+
+ Args:
+ deadline: Optional deadline in seconds for the operation; the default
+ is a system-specific deadline (typically 5 seconds).
+ callback: Optional callable to invoke on completion.
+
+ Returns:
+ An apiproxy_stub_map.UserRPC object specialized for this service.
+ """
+ return apiproxy_stub_map.UserRPC('urlfetch', deadline, callback)
+
+
+def fetch(url, payload=None, method=GET, headers={},
+ allow_truncated=False, follow_redirects=True,
+ deadline=None):
+ """Fetches the given HTTP URL, blocking until the result is returned.
+
+ Other optional parameters are:
+ method: GET, POST, HEAD, PUT, or DELETE
+ payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE).
+ this is ignored if the method is not POST or PUT.
+ headers: dictionary of HTTP headers to send with the request
+ allow_truncated: if true, truncate large responses and return them without
+ error. Otherwise, ResponseTooLargeError is raised when a response is
+ truncated.
+ follow_redirects: if true (the default), redirects are
+ transparently followed and the response (if less than 5
+ redirects) contains the final destination's payload and the
+ response status is 200. You lose, however, the redirect chain
+ information. If false, you see the HTTP response yourself,
+ including the 'Location' header, and redirects are not
+ followed.
+ deadline: deadline in seconds for the operation.
+
+ We use a HTTP/1.1 compliant proxy to fetch the result.
+
+ The returned data structure has the following fields:
+ content: string containing the response from the server
+ status_code: HTTP status code returned by the server
+ headers: dictionary of headers returned by the server
+
+ If the URL is an empty string or obviously invalid, we throw an
+ urlfetch.InvalidURLError. If the server cannot be contacted, we throw a
+ urlfetch.DownloadError. Note that HTTP errors are returned as a part
+ of the returned structure, so HTTP errors like 404 do not result in an
+ exception.
+ """
+ rpc = create_rpc(deadline=deadline)
+ make_fetch_call(rpc, url, payload, method, headers,
+ allow_truncated, follow_redirects)
+ return rpc.get_result()
+
+
+def make_fetch_call(rpc, url, payload=None, method=GET, headers={},
+ allow_truncated=False, follow_redirects=True):
+ """Executes the RPC call to fetch a given HTTP URL.
+
+ The first argument is a UserRPC instance. See urlfetch.fetch for a
+ thorough description of remaining arguments.
+ """
+ assert rpc.service == 'urlfetch', repr(rpc.service)
+ if isinstance(method, basestring):
+ method = method.upper()
+ method = _URL_STRING_MAP.get(method, method)
+ if method not in _VALID_METHODS:
+ raise InvalidMethodError('Invalid method %s.' % str(method))
+
+ if _is_fetching_self(url, method):
+ raise InvalidURLError("App cannot fetch the same URL as the one used for "
+ "the request.")
+
+ request = urlfetch_service_pb.URLFetchRequest()
+ response = urlfetch_service_pb.URLFetchResponse()
+ request.set_url(url)
+
+ if method == GET:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
+ elif method == POST:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
+ elif method == HEAD:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
+ elif method == PUT:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
+ elif method == DELETE:
+ request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
+
+ if payload and (method == POST or method == PUT):
+ request.set_payload(payload)
+
+ for key, value in headers.iteritems():
+ header_proto = request.add_header()
+ header_proto.set_key(key)
+ header_proto.set_value(str(value))
+
+ request.set_followredirects(follow_redirects)
+
+ if rpc.deadline is not None:
+ request.set_deadline(rpc.deadline)
+
+ rpc.make_call('Fetch', request, response, _get_fetch_result, allow_truncated)
+
+
+def _get_fetch_result(rpc):
+ """Check success, handle exceptions, and return converted RPC result.
+
+ This method waits for the RPC if it has not yet finished, and calls the
+ post-call hooks on the first invocation.
+
+ Args:
+ rpc: A UserRPC object.
+
+ Raises:
+ InvalidURLError if the url was invalid.
+ DownloadError if there was a problem fetching the url.
+ ResponseTooLargeError if the response was either truncated (and
+ allow_truncated=False was passed to make_fetch_call()), or if it
+ was too big for us to download.
+
+ Returns:
+ A _URLFetchResult object.
+ """
+ assert rpc.service == 'urlfetch', repr(rpc.service)
+ assert rpc.method == 'Fetch', repr(rpc.method)
+ try:
+ rpc.check_success()
+ except apiproxy_errors.ApplicationError, err:
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
+ raise InvalidURLError(str(err))
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
+ raise DownloadError(str(err))
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
+ raise DownloadError(str(err))
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
+ raise ResponseTooLargeError(None)
+ if (err.application_error ==
+ urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
+ raise DownloadError(str(err))
+ raise err
+
+ response = rpc.response
+ allow_truncated = rpc.user_data
+ result = _URLFetchResult(response)
+ if response.contentwastruncated() and not allow_truncated:
+ raise ResponseTooLargeError(result)
+ return result
+
+
+Fetch = fetch
+
+
+class _URLFetchResult(object):
+ """A Pythonic representation of our fetch response protocol buffer.
+ """
+
+ def __init__(self, response_proto):
+ """Constructor.
+
+ Args:
+ response_proto: the URLFetchResponse proto buffer to wrap.
+ """
+ self.__pb = response_proto
+ self.content = response_proto.content()
+ self.status_code = response_proto.statuscode()
+ self.content_was_truncated = response_proto.contentwastruncated()
+ self.headers = _CaselessDict()
+ for header_proto in response_proto.header_list():
+ self.headers[header_proto.key()] = header_proto.value()
diff --git a/google_appengine/google/appengine/api/urlfetch.pyc b/google_appengine/google/appengine/api/urlfetch.pyc
new file mode 100644
index 0000000..3b53f6e
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/urlfetch_errors.py b/google_appengine/google/appengine/api/urlfetch_errors.py
new file mode 100755
index 0000000..e71ca5d
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_errors.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""Errors used in the urlfetch API
+developers.
+"""
+
+
+
+
+
+
+class Error(Exception):
+ """Base URL fetcher error type."""
+
+
+class InvalidURLError(Error):
+ """Raised when the URL given is empty or invalid.
+
+ Only http: and https: URLs are allowed. The maximum URL length
+ allowed is 2048 characters. The login/pass portion is not
+ allowed. In deployed applications, only ports 80 and 443 for http
+ and https respectively are allowed.
+ """
+
+
+class DownloadError(Error):
+ """Raised when the we could not fetch the URL for any reason.
+
+ Note that this exception is only raised when we could not contact the
+ server. HTTP errors (e.g., 404) are returned in as the status_code field
+ in the return value of Fetch, and no exception is raised.
+ """
+
+
+class ResponseTooLargeError(Error):
+ """Raised when the response was too large and was truncated."""
+ def __init__(self, response):
+ self.response = response
+
+
+class InvalidMethodError(Error):
+ """Raised when an invalid value for 'method' is provided"""
+
+
+class InvalidMethodError(Error):
+ """Raised when an invalid value for 'method' is provided"""
diff --git a/google_appengine/google/appengine/api/urlfetch_errors.pyc b/google_appengine/google/appengine/api/urlfetch_errors.pyc
new file mode 100644
index 0000000..1d41770
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/urlfetch_service_pb.py b/google_appengine/google/appengine/api/urlfetch_service_pb.py
new file mode 100644
index 0000000..bf513a3
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_service_pb.py
@@ -0,0 +1,823 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class URLFetchServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ INVALID_URL = 1
+ FETCH_ERROR = 2
+ UNSPECIFIED_ERROR = 3
+ RESPONSE_TOO_LARGE = 4
+ DEADLINE_EXCEEDED = 5
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "INVALID_URL",
+ 2: "FETCH_ERROR",
+ 3: "UNSPECIFIED_ERROR",
+ 4: "RESPONSE_TOO_LARGE",
+ 5: "DEADLINE_EXCEEDED",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class URLFetchRequest_Header(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(42)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 28: break
+ if tt == 34:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+class URLFetchRequest(ProtocolBuffer.ProtocolMessage):
+
+ GET = 1
+ POST = 2
+ HEAD = 3
+ PUT = 4
+ DELETE = 5
+
+ _RequestMethod_NAMES = {
+ 1: "GET",
+ 2: "POST",
+ 3: "HEAD",
+ 4: "PUT",
+ 5: "DELETE",
+ }
+
+ def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
+ RequestMethod_Name = classmethod(RequestMethod_Name)
+
+ has_method_ = 0
+ method_ = 0
+ has_url_ = 0
+ url_ = ""
+ has_payload_ = 0
+ payload_ = ""
+ has_followredirects_ = 0
+ followredirects_ = 1
+ has_deadline_ = 0
+ deadline_ = 0.0
+
+ def __init__(self, contents=None):
+ self.header_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def method(self): return self.method_
+
+ def set_method(self, x):
+ self.has_method_ = 1
+ self.method_ = x
+
+ def clear_method(self):
+ if self.has_method_:
+ self.has_method_ = 0
+ self.method_ = 0
+
+ def has_method(self): return self.has_method_
+
+ def url(self): return self.url_
+
+ def set_url(self, x):
+ self.has_url_ = 1
+ self.url_ = x
+
+ def clear_url(self):
+ if self.has_url_:
+ self.has_url_ = 0
+ self.url_ = ""
+
+ def has_url(self): return self.has_url_
+
+ def header_size(self): return len(self.header_)
+ def header_list(self): return self.header_
+
+ def header(self, i):
+ return self.header_[i]
+
+ def mutable_header(self, i):
+ return self.header_[i]
+
+ def add_header(self):
+ x = URLFetchRequest_Header()
+ self.header_.append(x)
+ return x
+
+ def clear_header(self):
+ self.header_ = []
+ def payload(self): return self.payload_
+
+ def set_payload(self, x):
+ self.has_payload_ = 1
+ self.payload_ = x
+
+ def clear_payload(self):
+ if self.has_payload_:
+ self.has_payload_ = 0
+ self.payload_ = ""
+
+ def has_payload(self): return self.has_payload_
+
+ def followredirects(self): return self.followredirects_
+
+ def set_followredirects(self, x):
+ self.has_followredirects_ = 1
+ self.followredirects_ = x
+
+ def clear_followredirects(self):
+ if self.has_followredirects_:
+ self.has_followredirects_ = 0
+ self.followredirects_ = 1
+
+ def has_followredirects(self): return self.has_followredirects_
+
+ def deadline(self): return self.deadline_
+
+ def set_deadline(self, x):
+ self.has_deadline_ = 1
+ self.deadline_ = x
+
+ def clear_deadline(self):
+ if self.has_deadline_:
+ self.has_deadline_ = 0
+ self.deadline_ = 0.0
+
+ def has_deadline(self): return self.has_deadline_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_method()): self.set_method(x.method())
+ if (x.has_url()): self.set_url(x.url())
+ for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
+ if (x.has_payload()): self.set_payload(x.payload())
+ if (x.has_followredirects()): self.set_followredirects(x.followredirects())
+ if (x.has_deadline()): self.set_deadline(x.deadline())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_method_ != x.has_method_: return 0
+ if self.has_method_ and self.method_ != x.method_: return 0
+ if self.has_url_ != x.has_url_: return 0
+ if self.has_url_ and self.url_ != x.url_: return 0
+ if len(self.header_) != len(x.header_): return 0
+ for e1, e2 in zip(self.header_, x.header_):
+ if e1 != e2: return 0
+ if self.has_payload_ != x.has_payload_: return 0
+ if self.has_payload_ and self.payload_ != x.payload_: return 0
+ if self.has_followredirects_ != x.has_followredirects_: return 0
+ if self.has_followredirects_ and self.followredirects_ != x.followredirects_: return 0
+ if self.has_deadline_ != x.has_deadline_: return 0
+ if self.has_deadline_ and self.deadline_ != x.deadline_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_method_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: method not set.')
+ if (not self.has_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: url not set.')
+ for p in self.header_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthVarInt64(self.method_)
+ n += self.lengthString(len(self.url_))
+ n += 2 * len(self.header_)
+ for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
+ if (self.has_payload_): n += 1 + self.lengthString(len(self.payload_))
+ if (self.has_followredirects_): n += 2
+ if (self.has_deadline_): n += 9
+ return n + 2
+
+ def Clear(self):
+ self.clear_method()
+ self.clear_url()
+ self.clear_header()
+ self.clear_payload()
+ self.clear_followredirects()
+ self.clear_deadline()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putVarInt32(self.method_)
+ out.putVarInt32(18)
+ out.putPrefixedString(self.url_)
+ for i in xrange(len(self.header_)):
+ out.putVarInt32(27)
+ self.header_[i].OutputUnchecked(out)
+ out.putVarInt32(28)
+ if (self.has_payload_):
+ out.putVarInt32(50)
+ out.putPrefixedString(self.payload_)
+ if (self.has_followredirects_):
+ out.putVarInt32(56)
+ out.putBoolean(self.followredirects_)
+ if (self.has_deadline_):
+ out.putVarInt32(65)
+ out.putDouble(self.deadline_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_method(d.getVarInt32())
+ continue
+ if tt == 18:
+ self.set_url(d.getPrefixedString())
+ continue
+ if tt == 27:
+ self.add_header().TryMerge(d)
+ continue
+ if tt == 50:
+ self.set_payload(d.getPrefixedString())
+ continue
+ if tt == 56:
+ self.set_followredirects(d.getBoolean())
+ continue
+ if tt == 65:
+ self.set_deadline(d.getDouble())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_method_: res+=prefix+("Method: %s\n" % self.DebugFormatInt32(self.method_))
+ if self.has_url_: res+=prefix+("Url: %s\n" % self.DebugFormatString(self.url_))
+ cnt=0
+ for e in self.header_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Header%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_payload_: res+=prefix+("Payload: %s\n" % self.DebugFormatString(self.payload_))
+ if self.has_followredirects_: res+=prefix+("FollowRedirects: %s\n" % self.DebugFormatBool(self.followredirects_))
+ if self.has_deadline_: res+=prefix+("Deadline: %s\n" % self.DebugFormat(self.deadline_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kMethod = 1
+ kUrl = 2
+ kHeaderGroup = 3
+ kHeaderKey = 4
+ kHeaderValue = 5
+ kPayload = 6
+ kFollowRedirects = 7
+ kDeadline = 8
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Method",
+ 2: "Url",
+ 3: "Header",
+ 4: "Key",
+ 5: "Value",
+ 6: "Payload",
+ 7: "FollowRedirects",
+ 8: "Deadline",
+ }, 8)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.STARTGROUP,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.STRING,
+ 7: ProtocolBuffer.Encoder.NUMERIC,
+ 8: ProtocolBuffer.Encoder.DOUBLE,
+ }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class URLFetchResponse_Header(ProtocolBuffer.ProtocolMessage):
+ has_key_ = 0
+ key_ = ""
+ has_value_ = 0
+ value_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def key(self): return self.key_
+
+ def set_key(self, x):
+ self.has_key_ = 1
+ self.key_ = x
+
+ def clear_key(self):
+ if self.has_key_:
+ self.has_key_ = 0
+ self.key_ = ""
+
+ def has_key(self): return self.has_key_
+
+ def value(self): return self.value_
+
+ def set_value(self, x):
+ self.has_value_ = 1
+ self.value_ = x
+
+ def clear_value(self):
+ if self.has_value_:
+ self.has_value_ = 0
+ self.value_ = ""
+
+ def has_value(self): return self.has_value_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_key()): self.set_key(x.key())
+ if (x.has_value()): self.set_value(x.value())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_key_ != x.has_key_: return 0
+ if self.has_key_ and self.key_ != x.key_: return 0
+ if self.has_value_ != x.has_value_: return 0
+ if self.has_value_ and self.value_ != x.value_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_key_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: key not set.')
+ if (not self.has_value_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: value not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.key_))
+ n += self.lengthString(len(self.value_))
+ return n + 2
+
+ def Clear(self):
+ self.clear_key()
+ self.clear_value()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.key_)
+ out.putVarInt32(42)
+ out.putPrefixedString(self.value_)
+
+ def TryMerge(self, d):
+ while 1:
+ tt = d.getVarInt32()
+ if tt == 28: break
+ if tt == 34:
+ self.set_key(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.set_value(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
+ if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
+ return res
+
+class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
+ has_content_ = 0
+ content_ = ""
+ has_statuscode_ = 0
+ statuscode_ = 0
+ has_contentwastruncated_ = 0
+ contentwastruncated_ = 0
+ has_externalbytessent_ = 0
+ externalbytessent_ = 0
+ has_externalbytesreceived_ = 0
+ externalbytesreceived_ = 0
+
+ def __init__(self, contents=None):
+ self.header_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def content(self): return self.content_
+
+ def set_content(self, x):
+ self.has_content_ = 1
+ self.content_ = x
+
+ def clear_content(self):
+ if self.has_content_:
+ self.has_content_ = 0
+ self.content_ = ""
+
+ def has_content(self): return self.has_content_
+
+ def statuscode(self): return self.statuscode_
+
+ def set_statuscode(self, x):
+ self.has_statuscode_ = 1
+ self.statuscode_ = x
+
+ def clear_statuscode(self):
+ if self.has_statuscode_:
+ self.has_statuscode_ = 0
+ self.statuscode_ = 0
+
+ def has_statuscode(self): return self.has_statuscode_
+
+ def header_size(self): return len(self.header_)
+ def header_list(self): return self.header_
+
+ def header(self, i):
+ return self.header_[i]
+
+ def mutable_header(self, i):
+ return self.header_[i]
+
+ def add_header(self):
+ x = URLFetchResponse_Header()
+ self.header_.append(x)
+ return x
+
+ def clear_header(self):
+ self.header_ = []
+ def contentwastruncated(self): return self.contentwastruncated_
+
+ def set_contentwastruncated(self, x):
+ self.has_contentwastruncated_ = 1
+ self.contentwastruncated_ = x
+
+ def clear_contentwastruncated(self):
+ if self.has_contentwastruncated_:
+ self.has_contentwastruncated_ = 0
+ self.contentwastruncated_ = 0
+
+ def has_contentwastruncated(self): return self.has_contentwastruncated_
+
+ def externalbytessent(self): return self.externalbytessent_
+
+ def set_externalbytessent(self, x):
+ self.has_externalbytessent_ = 1
+ self.externalbytessent_ = x
+
+ def clear_externalbytessent(self):
+ if self.has_externalbytessent_:
+ self.has_externalbytessent_ = 0
+ self.externalbytessent_ = 0
+
+ def has_externalbytessent(self): return self.has_externalbytessent_
+
+ def externalbytesreceived(self): return self.externalbytesreceived_
+
+ def set_externalbytesreceived(self, x):
+ self.has_externalbytesreceived_ = 1
+ self.externalbytesreceived_ = x
+
+ def clear_externalbytesreceived(self):
+ if self.has_externalbytesreceived_:
+ self.has_externalbytesreceived_ = 0
+ self.externalbytesreceived_ = 0
+
+ def has_externalbytesreceived(self): return self.has_externalbytesreceived_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_content()): self.set_content(x.content())
+ if (x.has_statuscode()): self.set_statuscode(x.statuscode())
+ for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
+ if (x.has_contentwastruncated()): self.set_contentwastruncated(x.contentwastruncated())
+ if (x.has_externalbytessent()): self.set_externalbytessent(x.externalbytessent())
+ if (x.has_externalbytesreceived()): self.set_externalbytesreceived(x.externalbytesreceived())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_content_ != x.has_content_: return 0
+ if self.has_content_ and self.content_ != x.content_: return 0
+ if self.has_statuscode_ != x.has_statuscode_: return 0
+ if self.has_statuscode_ and self.statuscode_ != x.statuscode_: return 0
+ if len(self.header_) != len(x.header_): return 0
+ for e1, e2 in zip(self.header_, x.header_):
+ if e1 != e2: return 0
+ if self.has_contentwastruncated_ != x.has_contentwastruncated_: return 0
+ if self.has_contentwastruncated_ and self.contentwastruncated_ != x.contentwastruncated_: return 0
+ if self.has_externalbytessent_ != x.has_externalbytessent_: return 0
+ if self.has_externalbytessent_ and self.externalbytessent_ != x.externalbytessent_: return 0
+ if self.has_externalbytesreceived_ != x.has_externalbytesreceived_: return 0
+ if self.has_externalbytesreceived_ and self.externalbytesreceived_ != x.externalbytesreceived_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_statuscode_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: statuscode not set.')
+ for p in self.header_:
+ if not p.IsInitialized(debug_strs): initialized=0
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_content_): n += 1 + self.lengthString(len(self.content_))
+ n += self.lengthVarInt64(self.statuscode_)
+ n += 2 * len(self.header_)
+ for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
+ if (self.has_contentwastruncated_): n += 2
+ if (self.has_externalbytessent_): n += 1 + self.lengthVarInt64(self.externalbytessent_)
+ if (self.has_externalbytesreceived_): n += 1 + self.lengthVarInt64(self.externalbytesreceived_)
+ return n + 1
+
+ def Clear(self):
+ self.clear_content()
+ self.clear_statuscode()
+ self.clear_header()
+ self.clear_contentwastruncated()
+ self.clear_externalbytessent()
+ self.clear_externalbytesreceived()
+
+ def OutputUnchecked(self, out):
+ if (self.has_content_):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.content_)
+ out.putVarInt32(16)
+ out.putVarInt32(self.statuscode_)
+ for i in xrange(len(self.header_)):
+ out.putVarInt32(27)
+ self.header_[i].OutputUnchecked(out)
+ out.putVarInt32(28)
+ if (self.has_contentwastruncated_):
+ out.putVarInt32(48)
+ out.putBoolean(self.contentwastruncated_)
+ if (self.has_externalbytessent_):
+ out.putVarInt32(56)
+ out.putVarInt64(self.externalbytessent_)
+ if (self.has_externalbytesreceived_):
+ out.putVarInt32(64)
+ out.putVarInt64(self.externalbytesreceived_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_content(d.getPrefixedString())
+ continue
+ if tt == 16:
+ self.set_statuscode(d.getVarInt32())
+ continue
+ if tt == 27:
+ self.add_header().TryMerge(d)
+ continue
+ if tt == 48:
+ self.set_contentwastruncated(d.getBoolean())
+ continue
+ if tt == 56:
+ self.set_externalbytessent(d.getVarInt64())
+ continue
+ if tt == 64:
+ self.set_externalbytesreceived(d.getVarInt64())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_content_: res+=prefix+("Content: %s\n" % self.DebugFormatString(self.content_))
+ if self.has_statuscode_: res+=prefix+("StatusCode: %s\n" % self.DebugFormatInt32(self.statuscode_))
+ cnt=0
+ for e in self.header_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("Header%s {\n" % elm)
+ res+=e.__str__(prefix + " ", printElemNumber)
+ res+=prefix+"}\n"
+ cnt+=1
+ if self.has_contentwastruncated_: res+=prefix+("ContentWasTruncated: %s\n" % self.DebugFormatBool(self.contentwastruncated_))
+ if self.has_externalbytessent_: res+=prefix+("ExternalBytesSent: %s\n" % self.DebugFormatInt64(self.externalbytessent_))
+ if self.has_externalbytesreceived_: res+=prefix+("ExternalBytesReceived: %s\n" % self.DebugFormatInt64(self.externalbytesreceived_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kContent = 1
+ kStatusCode = 2
+ kHeaderGroup = 3
+ kHeaderKey = 4
+ kHeaderValue = 5
+ kContentWasTruncated = 6
+ kExternalBytesSent = 7
+ kExternalBytesReceived = 8
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "Content",
+ 2: "StatusCode",
+ 3: "Header",
+ 4: "Key",
+ 5: "Value",
+ 6: "ContentWasTruncated",
+ 7: "ExternalBytesSent",
+ 8: "ExternalBytesReceived",
+ }, 8)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ 3: ProtocolBuffer.Encoder.STARTGROUP,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ 6: ProtocolBuffer.Encoder.NUMERIC,
+ 7: ProtocolBuffer.Encoder.NUMERIC,
+ 8: ProtocolBuffer.Encoder.NUMERIC,
+ }, 8, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['URLFetchServiceError','URLFetchRequest','URLFetchRequest_Header','URLFetchResponse','URLFetchResponse_Header']
diff --git a/google_appengine/google/appengine/api/urlfetch_service_pb.pyc b/google_appengine/google/appengine/api/urlfetch_service_pb.pyc
new file mode 100644
index 0000000..0c0d0e1
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/urlfetch_stub.py b/google_appengine/google/appengine/api/urlfetch_stub.py
new file mode 100755
index 0000000..d317401
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_stub.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the urlfetch API, based on httplib."""
+
+
+
+import gzip
+import httplib
+import logging
+import socket
+import StringIO
+import urllib
+import urlparse
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import urlfetch
+from google.appengine.api import urlfetch_errors
+from google.appengine.api import urlfetch_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+MAX_RESPONSE_SIZE = 2 ** 24
+
+MAX_REDIRECTS = urlfetch.MAX_REDIRECTS
+
+REDIRECT_STATUSES = frozenset([
+ httplib.MOVED_PERMANENTLY,
+ httplib.FOUND,
+ httplib.SEE_OTHER,
+ httplib.TEMPORARY_REDIRECT,
+])
+
+PORTS_ALLOWED_IN_PRODUCTION = (
+ None, '80', '443', '4443', '8080', '8081', '8082', '8083', '8084', '8085',
+ '8086', '8087', '8088', '8089', '8188', '8444', '8990')
+
+_API_CALL_DEADLINE = 5.0
+
+
+_UNTRUSTED_REQUEST_HEADERS = frozenset([
+ 'content-length',
+ 'host',
+ 'vary',
+ 'via',
+ 'x-forwarded-for',
+])
+
+class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
+ """Stub version of the urlfetch API to be used with apiproxy_stub_map."""
+
+ def __init__(self, service_name='urlfetch'):
+ """Initializer.
+
+ Args:
+ service_name: Service name expected for all calls.
+ """
+ super(URLFetchServiceStub, self).__init__(service_name)
+
+ def _Dynamic_Fetch(self, request, response):
+ """Trivial implementation of URLFetchService::Fetch().
+
+ Args:
+ request: the fetch to perform, a URLFetchRequest
+ response: the fetch response, a URLFetchResponse
+ """
+ (protocol, host, path, parameters, query, fragment) = urlparse.urlparse(request.url())
+
+ payload = None
+ if request.method() == urlfetch_service_pb.URLFetchRequest.GET:
+ method = 'GET'
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.POST:
+ method = 'POST'
+ payload = request.payload()
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.HEAD:
+ method = 'HEAD'
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.PUT:
+ method = 'PUT'
+ payload = request.payload()
+ elif request.method() == urlfetch_service_pb.URLFetchRequest.DELETE:
+ method = 'DELETE'
+ else:
+ logging.error('Invalid method: %s', request.method())
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR)
+
+ if not (protocol == 'http' or protocol == 'https'):
+ logging.error('Invalid protocol: %s', protocol)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
+
+ if not host:
+ logging.error('Missing host.')
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR)
+
+ sanitized_headers = self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS,
+ request.header_list())
+ request.clear_header()
+ request.header_list().extend(sanitized_headers)
+ deadline = _API_CALL_DEADLINE
+ if request.has_deadline():
+ deadline = request.deadline()
+
+ self._RetrieveURL(request.url(), payload, method,
+ request.header_list(), response,
+ follow_redirects=request.followredirects(),
+ deadline=deadline)
+
+ def _RetrieveURL(self, url, payload, method, headers, response,
+ follow_redirects=True, deadline=_API_CALL_DEADLINE):
+ """Retrieves a URL.
+
+ Args:
+ url: String containing the URL to access.
+ payload: Request payload to send, if any; None if no payload.
+ method: HTTP method to use (e.g., 'GET')
+ headers: List of additional header objects to use for the request.
+ response: Response object
+ follow_redirects: optional setting (defaulting to True) for whether or not
+ we should transparently follow redirects (up to MAX_REDIRECTS)
+ deadline: Number of seconds to wait for the urlfetch to finish.
+
+ Raises:
+ Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR
+ in cases where:
+ - MAX_REDIRECTS is exceeded
+ - The protocol of the redirected URL is bad or missing.
+ """
+ last_protocol = ''
+ last_host = ''
+
+ for redirect_number in xrange(MAX_REDIRECTS + 1):
+ parsed = urlparse.urlparse(url)
+ protocol, host, path, parameters, query, fragment = parsed
+
+ port = urllib.splitport(urllib.splituser(host)[1])[1]
+
+ if port not in PORTS_ALLOWED_IN_PRODUCTION:
+ logging.warning(
+ 'urlfetch received %s ; port %s is not allowed in production!' %
+ (url, port))
+
+ if protocol and not host:
+ logging.error('Missing host on redirect; target url is %s' % url)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR)
+
+ if not host and not protocol:
+ host = last_host
+ protocol = last_protocol
+
+ adjusted_headers = {
+ 'User-Agent':
+ 'AppEngine-Google; (+http://code.google.com/appengine)',
+ 'Host': host,
+ 'Accept-Encoding': 'gzip',
+ }
+ if payload is not None:
+ adjusted_headers['Content-Length'] = len(payload)
+ if method == 'POST' and payload:
+ adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded'
+
+ for header in headers:
+ if header.key().title().lower() == 'user-agent':
+ adjusted_headers['User-Agent'] = (
+ '%s %s' %
+ (header.value(), adjusted_headers['User-Agent']))
+ else:
+ adjusted_headers[header.key().title()] = header.value()
+
+ logging.debug('Making HTTP request: host = %s, '
+ 'url = %s, payload = %s, headers = %s',
+ host, url, payload, adjusted_headers)
+ try:
+ if protocol == 'http':
+ connection = httplib.HTTPConnection(host)
+ elif protocol == 'https':
+ connection = httplib.HTTPSConnection(host)
+ else:
+ error_msg = 'Redirect specified invalid protocol: "%s"' % protocol
+ logging.error(error_msg)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
+
+ last_protocol = protocol
+ last_host = host
+
+ if query != '':
+ full_path = path + '?' + query
+ else:
+ full_path = path
+
+ orig_timeout = socket.getdefaulttimeout()
+ try:
+ socket.setdefaulttimeout(deadline)
+ connection.request(method, full_path, payload, adjusted_headers)
+ http_response = connection.getresponse()
+ if method == 'HEAD':
+ http_response_data = ''
+ else:
+ http_response_data = http_response.read()
+ finally:
+ socket.setdefaulttimeout(orig_timeout)
+ connection.close()
+ except (httplib.error, socket.error, IOError), e:
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e))
+
+ if http_response.status in REDIRECT_STATUSES and follow_redirects:
+ url = http_response.getheader('Location', None)
+ if url is None:
+ error_msg = 'Redirecting response was missing "Location" header'
+ logging.error(error_msg)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
+ else:
+ response.set_statuscode(http_response.status)
+ if http_response.getheader('content-encoding') == 'gzip':
+ gzip_stream = StringIO.StringIO(http_response_data)
+ gzip_file = gzip.GzipFile(fileobj=gzip_stream)
+ http_response_data = gzip_file.read()
+ response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
+ for header_key, header_value in http_response.getheaders():
+ if (header_key.lower() == 'content-encoding' and
+ header_value == 'gzip'):
+ continue
+ if header_key.lower() == 'content-length':
+ header_value = str(len(response.content()))
+ header_proto = response.add_header()
+ header_proto.set_key(header_key)
+ header_proto.set_value(header_value)
+
+ if len(http_response_data) > MAX_RESPONSE_SIZE:
+ response.set_contentwastruncated(True)
+
+ break
+ else:
+ error_msg = 'Too many repeated redirects'
+ logging.error(error_msg)
+ raise apiproxy_errors.ApplicationError(
+ urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
+
+ def _SanitizeHttpHeaders(self, untrusted_headers, headers):
+ """Cleans "unsafe" headers from the HTTP request/response.
+
+ Args:
+ untrusted_headers: set of untrusted headers names
+ headers: list of string pairs, first is header name and the second is header's value
+ """
+ prohibited_headers = [h.key() for h in headers
+ if h.key().lower() in untrusted_headers]
+ if prohibited_headers:
+ logging.warn('Stripped prohibited headers from URLFetch request: %s',
+ prohibited_headers)
+ return (h for h in headers if h.key().lower() not in untrusted_headers)
diff --git a/google_appengine/google/appengine/api/urlfetch_stub.pyc b/google_appengine/google/appengine/api/urlfetch_stub.pyc
new file mode 100644
index 0000000..136ea22
--- /dev/null
+++ b/google_appengine/google/appengine/api/urlfetch_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/user_service_pb.py b/google_appengine/google/appengine/api/user_service_pb.py
new file mode 100644
index 0000000..1fe799b
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_pb.py
@@ -0,0 +1,491 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+from google.appengine.api.api_base_pb import *
+class UserServiceError(ProtocolBuffer.ProtocolMessage):
+
+ OK = 0
+ REDIRECT_URL_TOO_LONG = 1
+ NOT_ALLOWED = 2
+
+ _ErrorCode_NAMES = {
+ 0: "OK",
+ 1: "REDIRECT_URL_TOO_LONG",
+ 2: "NOT_ALLOWED",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLRequest(ProtocolBuffer.ProtocolMessage):
+ has_destination_url_ = 0
+ destination_url_ = ""
+ has_auth_domain_ = 0
+ auth_domain_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def destination_url(self): return self.destination_url_
+
+ def set_destination_url(self, x):
+ self.has_destination_url_ = 1
+ self.destination_url_ = x
+
+ def clear_destination_url(self):
+ if self.has_destination_url_:
+ self.has_destination_url_ = 0
+ self.destination_url_ = ""
+
+ def has_destination_url(self): return self.has_destination_url_
+
+ def auth_domain(self): return self.auth_domain_
+
+ def set_auth_domain(self, x):
+ self.has_auth_domain_ = 1
+ self.auth_domain_ = x
+
+ def clear_auth_domain(self):
+ if self.has_auth_domain_:
+ self.has_auth_domain_ = 0
+ self.auth_domain_ = ""
+
+ def has_auth_domain(self): return self.has_auth_domain_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+ if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_destination_url_ != x.has_destination_url_: return 0
+ if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+ if self.has_auth_domain_ != x.has_auth_domain_: return 0
+ if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_destination_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: destination_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.destination_url_))
+ if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_destination_url()
+ self.clear_auth_domain()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.destination_url_)
+ if (self.has_auth_domain_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.auth_domain_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_destination_url(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_auth_domain(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+ if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kdestination_url = 1
+ kauth_domain = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "destination_url",
+ 2: "auth_domain",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLoginURLResponse(ProtocolBuffer.ProtocolMessage):
+ has_login_url_ = 0
+ login_url_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def login_url(self): return self.login_url_
+
+ def set_login_url(self, x):
+ self.has_login_url_ = 1
+ self.login_url_ = x
+
+ def clear_login_url(self):
+ if self.has_login_url_:
+ self.has_login_url_ = 0
+ self.login_url_ = ""
+
+ def has_login_url(self): return self.has_login_url_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_login_url()): self.set_login_url(x.login_url())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_login_url_ != x.has_login_url_: return 0
+ if self.has_login_url_ and self.login_url_ != x.login_url_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_login_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: login_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.login_url_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_login_url()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.login_url_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_login_url(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_login_url_: res+=prefix+("login_url: %s\n" % self.DebugFormatString(self.login_url_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ klogin_url = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "login_url",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLRequest(ProtocolBuffer.ProtocolMessage):
+ has_destination_url_ = 0
+ destination_url_ = ""
+ has_auth_domain_ = 0
+ auth_domain_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def destination_url(self): return self.destination_url_
+
+ def set_destination_url(self, x):
+ self.has_destination_url_ = 1
+ self.destination_url_ = x
+
+ def clear_destination_url(self):
+ if self.has_destination_url_:
+ self.has_destination_url_ = 0
+ self.destination_url_ = ""
+
+ def has_destination_url(self): return self.has_destination_url_
+
+ def auth_domain(self): return self.auth_domain_
+
+ def set_auth_domain(self, x):
+ self.has_auth_domain_ = 1
+ self.auth_domain_ = x
+
+ def clear_auth_domain(self):
+ if self.has_auth_domain_:
+ self.has_auth_domain_ = 0
+ self.auth_domain_ = ""
+
+ def has_auth_domain(self): return self.has_auth_domain_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_destination_url()): self.set_destination_url(x.destination_url())
+ if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_destination_url_ != x.has_destination_url_: return 0
+ if self.has_destination_url_ and self.destination_url_ != x.destination_url_: return 0
+ if self.has_auth_domain_ != x.has_auth_domain_: return 0
+ if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_destination_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: destination_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.destination_url_))
+ if (self.has_auth_domain_): n += 1 + self.lengthString(len(self.auth_domain_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_destination_url()
+ self.clear_auth_domain()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.destination_url_)
+ if (self.has_auth_domain_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.auth_domain_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_destination_url(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_auth_domain(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_destination_url_: res+=prefix+("destination_url: %s\n" % self.DebugFormatString(self.destination_url_))
+ if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kdestination_url = 1
+ kauth_domain = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "destination_url",
+ 2: "auth_domain",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class CreateLogoutURLResponse(ProtocolBuffer.ProtocolMessage):
+ has_logout_url_ = 0
+ logout_url_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def logout_url(self): return self.logout_url_
+
+ def set_logout_url(self, x):
+ self.has_logout_url_ = 1
+ self.logout_url_ = x
+
+ def clear_logout_url(self):
+ if self.has_logout_url_:
+ self.has_logout_url_ = 0
+ self.logout_url_ = ""
+
+ def has_logout_url(self): return self.has_logout_url_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_logout_url()): self.set_logout_url(x.logout_url())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_logout_url_ != x.has_logout_url_: return 0
+ if self.has_logout_url_ and self.logout_url_ != x.logout_url_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_logout_url_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: logout_url not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.logout_url_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_logout_url()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.logout_url_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_logout_url(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_logout_url_: res+=prefix+("logout_url: %s\n" % self.DebugFormatString(self.logout_url_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ klogout_url = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "logout_url",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['UserServiceError','CreateLoginURLRequest','CreateLoginURLResponse','CreateLogoutURLRequest','CreateLogoutURLResponse']
diff --git a/google_appengine/google/appengine/api/user_service_pb.pyc b/google_appengine/google/appengine/api/user_service_pb.pyc
new file mode 100644
index 0000000..2d478ee
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/user_service_stub.py b/google_appengine/google/appengine/api/user_service_stub.py
new file mode 100755
index 0000000..d1542e1
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_stub.py
@@ -0,0 +1,106 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Trivial implementation of the UserService."""
+
+
+import os
+import urllib
+import urlparse
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import user_service_pb
+
+
+_DEFAULT_LOGIN_URL = 'https://www.google.com/accounts/Login?continue=%s'
+_DEFAULT_LOGOUT_URL = 'https://www.google.com/accounts/Logout?continue=%s'
+
+
+class UserServiceStub(apiproxy_stub.APIProxyStub):
+ """Trivial implementation of the UserService."""
+
+ def __init__(self,
+ login_url=_DEFAULT_LOGIN_URL,
+ logout_url=_DEFAULT_LOGOUT_URL,
+ service_name='user'):
+ """Initializer.
+
+ Args:
+ login_url: String containing the URL to use for logging in.
+ logout_url: String containing the URL to use for logging out.
+ service_name: Service name expected for all calls.
+
+ Note: Both the login_url and logout_url arguments must contain one format
+ parameter, which will be replaced with the continuation URL where the user
+ should be redirected after log-in or log-out has been completed.
+ """
+ super(UserServiceStub, self).__init__(service_name)
+ self.__num_requests = 0
+ self._login_url = login_url
+ self._logout_url = logout_url
+
+ os.environ['AUTH_DOMAIN'] = 'gmail.com'
+
+ def num_requests(self):
+ return self.__num_requests
+
+ def _Dynamic_CreateLoginURL(self, request, response):
+ """Trivial implementation of UserService.CreateLoginURL().
+
+ Args:
+ request: the URL to redirect to after login; a base.StringProto
+ response: the login URL; a base.StringProto
+ """
+ self.__num_requests += 1
+ response.set_login_url(
+ self._login_url %
+ urllib.quote(self._AddHostToContinueURL(request.destination_url())))
+
+ def _Dynamic_CreateLogoutURL(self, request, response):
+ """Trivial implementation of UserService.CreateLogoutURL().
+
+ Args:
+ request: the URL to redirect to after logout; a base.StringProto
+ response: the logout URL; a base.StringProto
+ """
+ self.__num_requests += 1
+ response.set_logout_url(
+ self._logout_url %
+ urllib.quote(self._AddHostToContinueURL(request.destination_url())))
+
+ def _AddHostToContinueURL(self, continue_url):
+ """Adds the request host to the continue url if no host is specified.
+
+ Args:
+ continue_url: the URL which may or may not have a host specified
+
+ Returns:
+ string
+ """
+ (protocol, host, path, parameters, query, fragment) = urlparse.urlparse(continue_url, 'http')
+
+ if host:
+ return continue_url
+
+ host = os.environ['SERVER_NAME']
+ if os.environ['SERVER_PORT'] != '80':
+ host = host + ":" + os.environ['SERVER_PORT']
+
+ if path == '':
+ path = '/'
+
+ return urlparse.urlunparse(
+ (protocol, host, path, parameters, query, fragment))
diff --git a/google_appengine/google/appengine/api/user_service_stub.pyc b/google_appengine/google/appengine/api/user_service_stub.pyc
new file mode 100644
index 0000000..e5083cd
--- /dev/null
+++ b/google_appengine/google/appengine/api/user_service_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/users.py b/google_appengine/google/appengine/api/users.py
new file mode 100755
index 0000000..3577510
--- /dev/null
+++ b/google_appengine/google/appengine/api/users.py
@@ -0,0 +1,230 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Python datastore class User to be used as a datastore data type.
+
+Classes defined here:
+ User: object representing a user.
+ Error: base exception type
+ UserNotFoundError: UserService exception
+ RedirectTooLongError: UserService exception
+ NotAllowedError: UserService exception
+"""
+
+
+
+
+
+
+import os
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api import user_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+class Error(Exception):
+ """Base User error type."""
+
+
+class UserNotFoundError(Error):
+ """Raised by User.__init__() when there's no email argument and no user is
+ logged in."""
+
+
+class RedirectTooLongError(Error):
+ """Raised by UserService calls if the generated redirect URL was too long.
+ """
+
+
+class NotAllowedError(Error):
+ """Raised by UserService calls if the requested redirect URL is not allowed.
+ """
+
+
+class User(object):
+ """A user.
+
+ We provide the email address, nickname, auth domain, and id for a user.
+
+ A nickname is a human-readable string which uniquely identifies a Google
+ user, akin to a username. It will be an email address for some users, but
+ not all.
+ """
+
+
+ __user_id = None
+
+ def __init__(self, email=None, _auth_domain=None, _user_id=None):
+ """Constructor.
+
+ Args:
+ email: An optional string of the user's email address. It defaults to
+ the current user's email address.
+
+ Raises:
+ UserNotFoundError: Raised if the user is not logged in and the email
+ argument is empty.
+ """
+ if _auth_domain is None:
+ _auth_domain = os.environ.get('AUTH_DOMAIN')
+ else:
+ assert email is not None
+
+ assert _auth_domain
+
+ if email is None:
+ assert 'USER_EMAIL' in os.environ
+ email = os.environ['USER_EMAIL']
+ if _user_id is None and 'USER_ID' in os.environ:
+ _user_id = os.environ['USER_ID']
+
+ if not email:
+ raise UserNotFoundError
+
+ self.__email = email
+ self.__auth_domain = _auth_domain
+ self.__user_id = _user_id or None
+
+ def nickname(self):
+ """Return this user's nickname.
+
+ The nickname will be a unique, human readable identifier for this user
+ with respect to this application. It will be an email address for some
+ users, but not all.
+ """
+ if (self.__email and self.__auth_domain and
+ self.__email.endswith('@' + self.__auth_domain)):
+ suffix_len = len(self.__auth_domain) + 1
+ return self.__email[:-suffix_len]
+ else:
+ return self.__email
+
+ def email(self):
+ """Return this user's email address."""
+ return self.__email
+
+ def user_id(self):
+ """Return either a permanent unique identifying string or None.
+
+ If the email address was set explicity, this will return None.
+ """
+ return self.__user_id
+
+ def auth_domain(self):
+ """Return this user's auth domain."""
+ return self.__auth_domain
+
+ def __unicode__(self):
+ return unicode(self.nickname())
+
+ def __str__(self):
+ return str(self.nickname())
+
+ def __repr__(self):
+ if self.__user_id:
+ return "users.User(email='%s',_user_id='%s')" % (self.email(),
+ self.user_id())
+ else:
+ return "users.User(email='%s')" % self.email()
+
+ def __hash__(self):
+ return hash((self.__email, self.__auth_domain))
+
+ def __cmp__(self, other):
+ if not isinstance(other, User):
+ return NotImplemented
+ return cmp((self.__email, self.__auth_domain),
+ (other.__email, other.__auth_domain))
+
+
+def create_login_url(dest_url):
+ """Computes the login URL for this request and specified destination URL.
+
+ Args:
+ dest_url: String that is the desired final destination URL for the user
+ once login is complete. If 'dest_url' does not have a host
+ specified, we will use the host from the current request.
+
+ Returns:
+ string
+ """
+ req = user_service_pb.CreateLoginURLRequest()
+ resp = user_service_pb.CreateLoginURLResponse()
+ req.set_destination_url(dest_url)
+ try:
+ apiproxy_stub_map.MakeSyncCall('user', 'CreateLoginURL', req, resp)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
+ raise RedirectTooLongError
+ elif (e.application_error ==
+ user_service_pb.UserServiceError.NOT_ALLOWED):
+ raise NotAllowedError
+ else:
+ raise e
+ return resp.login_url()
+
+CreateLoginURL = create_login_url
+
+
+def create_logout_url(dest_url):
+ """Computes the logout URL for this request and specified destination URL.
+
+ Args:
+ dest_url: String that is the desired final destination URL for the user
+ once logout is complete. If 'dest_url' does not have a host
+ specified, we will use the host from the current request.
+
+ Returns:
+ string
+ """
+ req = user_service_pb.CreateLogoutURLRequest()
+ resp = user_service_pb.CreateLogoutURLResponse()
+ req.set_destination_url(dest_url)
+ try:
+ apiproxy_stub_map.MakeSyncCall('user', 'CreateLogoutURL', req, resp)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
+ raise RedirectTooLongError
+ else:
+ raise e
+ return resp.logout_url()
+
+CreateLogoutURL = create_logout_url
+
+
+def get_current_user():
+ try:
+ return User()
+ except UserNotFoundError:
+ return None
+
+GetCurrentUser = get_current_user
+
+
+def is_current_user_admin():
+ """Return true if the user making this request is an admin for this
+ application, false otherwise.
+
+ We specifically make this a separate function, and not a member function of
+ the User class, because admin status is not persisted in the datastore. It
+ only exists for the user making this request right now.
+ """
+ return (os.environ.get('USER_IS_ADMIN', '0')) == '1'
+
+IsCurrentUserAdmin = is_current_user_admin
diff --git a/google_appengine/google/appengine/api/users.pyc b/google_appengine/google/appengine/api/users.pyc
new file mode 100644
index 0000000..365ddb8
--- /dev/null
+++ b/google_appengine/google/appengine/api/users.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/validation.py b/google_appengine/google/appengine/api/validation.py
new file mode 100755
index 0000000..00833e6
--- /dev/null
+++ b/google_appengine/google/appengine/api/validation.py
@@ -0,0 +1,928 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Validation tools for generic object structures.
+
+This library is used for defining classes with constrained attributes.
+Attributes are defined on the class which contains them using validators.
+Although validators can be defined by any client of this library, a number
+of standard validators are provided here.
+
+Validators can be any callable that takes a single parameter which checks
+the new value before it is assigned to the attribute. Validators are
+permitted to modify a received value so that it is appropriate for the
+attribute definition. For example, using int as a validator will cast
+a correctly formatted string to a number, or raise an exception if it
+can not. This is not recommended, however. the correct way to use a
+validator that ensure the correct type is to use the Type validator.
+
+This validation library is mainly intended for use with the YAML object
+builder. See yaml_object.py.
+"""
+
+
+
+
+
+import re
+
+import google
+import yaml
+
+
+class Error(Exception):
+ """Base class for all package errors."""
+
+
+class AttributeDefinitionError(Error):
+ """An error occurred in the definition of class attributes."""
+
+
+class ValidationError(Error):
+ """Base class for raising exceptions during validation."""
+
+ def __init__(self, message, cause=None):
+ """Initialize exception."""
+ if hasattr(cause, 'args') and cause.args:
+ Error.__init__(self, message, *cause.args)
+ else:
+ Error.__init__(self, message)
+ self.message = message
+ self.cause = cause
+
+ def __str__(self):
+ return str(self.message)
+
+
+class MissingAttribute(ValidationError):
+ """Raised when a required attribute is missing from object."""
+
+
+def AsValidator(validator):
+ """Wrap various types as instances of a validator.
+
+ Used to allow shorthand for common validator types. It
+ converts the following types to the following Validators.
+
+ strings -> Regex
+ type -> Type
+ collection -> Options
+ Validator -> Its self!
+
+ Args:
+ validator: Object to wrap in a validator.
+
+ Returns:
+ Validator instance that wraps the given value.
+
+ Raises:
+ AttributeDefinitionError if validator is not one of the above described
+ types.
+ """
+ if isinstance(validator, (str, unicode)):
+ return Regex(validator, type(validator))
+ if isinstance(validator, type):
+ return Type(validator)
+ if isinstance(validator, (list, tuple, set)):
+ return Options(*tuple(validator))
+ if isinstance(validator, Validator):
+ return validator
+ else:
+ raise AttributeDefinitionError('%s is not a valid validator' %
+ str(validator))
+
+
+class Validated(object):
+ """Base class for other classes that require validation.
+
+ A class which intends to use validated fields should sub-class itself from
+ this class. Each class should define an 'ATTRIBUTES' class variable which
+ should be a map from attribute name to its validator. For example:
+
+ class Story(Validated):
+ ATTRIBUTES = {'title': Type(str),
+ 'authors': Repeated(Type(str)),
+ 'isbn': Optional(Type(str)),
+ 'pages': Type(int),
+ }
+
+ Attributes that are not listed under ATTRIBUTES work like normal and are
+ not validated upon assignment.
+ """
+
+ ATTRIBUTES = None
+
+ def __init__(self, **attributes):
+ """Constructor for Validated classes.
+
+ This constructor can optionally assign values to the class via its
+ keyword arguments.
+
+ Raises:
+ AttributeDefinitionError when class instance is missing ATTRIBUTE
+ definition or when ATTRIBUTE is of the wrong type.
+ """
+ if not isinstance(self.ATTRIBUTES, dict):
+ raise AttributeDefinitionError(
+ 'The class %s does not define an ATTRIBUTE variable.'
+ % self.__class__)
+
+ for key in self.ATTRIBUTES.keys():
+ object.__setattr__(self, key, self.GetAttribute(key).default)
+
+ self.Set(**attributes)
+
+ @classmethod
+ def GetAttribute(self, key):
+ """Safely get the underlying attribute definition as a Validator.
+
+ Args:
+ key: Name of attribute to get.
+
+ Returns:
+ Validator associated with key or attribute value wrapped in a
+ validator.
+ """
+ return AsValidator(self.ATTRIBUTES[key])
+
+ def Set(self, **attributes):
+ """Set multiple values on Validated instance.
+
+ This method can only be used to assign validated methods.
+
+ Args:
+ attributes: Attributes to set on object.
+
+ Raises:
+ ValidationError when no validated attribute exists on class.
+ """
+ for key, value in attributes.iteritems():
+ if key not in self.ATTRIBUTES:
+ raise ValidationError('Class \'%s\' does not have attribute \'%s\''
+ % (self.__class__, key))
+ setattr(self, key, value)
+
+ def CheckInitialized(self):
+ """Checks that all required fields are initialized.
+
+ Since an instance of Validated starts off in an uninitialized state, it
+ is sometimes necessary to check that it has been fully initialized.
+ The main problem this solves is how to validate that an instance has
+ all of its required fields set. By default, Validator classes do not
+ allow None, but all attributes are initialized to None when instantiated.
+
+ Raises:
+ Exception relevant to the kind of validation. The type of the exception
+ is determined by the validator. Typically this will be ValueError or
+ TypeError.
+ """
+ for key in self.ATTRIBUTES.iterkeys():
+ try:
+ self.GetAttribute(key)(getattr(self, key))
+ except MissingAttribute, e:
+ e.message = "Missing required value '%s'." % key
+ raise e
+
+
+ def __setattr__(self, key, value):
+ """Set attribute.
+
+ Setting a value on an object of this type will only work for attributes
+ defined in ATTRIBUTES. To make other assignments possible it is necessary
+ to override this method in subclasses.
+
+ It is important that assignment is restricted in this way because
+ this validation is used as validation for parsing. Absent this restriction
+ it would be possible for method names to be overwritten.
+
+ Args:
+ key: Name of attribute to set.
+ value: Attributes new value.
+
+ Raises:
+ ValidationError when trying to assign to a value that does not exist.
+ """
+
+ if key in self.ATTRIBUTES:
+ value = self.GetAttribute(key)(value)
+ object.__setattr__(self, key, value)
+ else:
+ raise ValidationError('Class \'%s\' does not have attribute \'%s\''
+ % (self.__class__, key))
+
+ def __str__(self):
+ """Formatted view of validated object and nested values."""
+ return repr(self)
+
+ def __repr__(self):
+ """Formatted view of validated object and nested values."""
+ values = [(attr, getattr(self, attr)) for attr in self.ATTRIBUTES]
+ dent = ' '
+ value_list = []
+ for attr, value in values:
+ value_list.append('\n%s%s=%s' % (dent, attr, value))
+
+ return "<%s %s\n%s>" % (self.__class__.__name__, ' '.join(value_list), dent)
+
+ def __eq__(self, other):
+ """Equality operator.
+
+ Comparison is done by comparing all attribute values to those in the other
+ instance. Objects which are not of the same type are not equal.
+
+ Args:
+ other: Other object to compare against.
+
+ Returns:
+ True if validated objects are equal, else False.
+ """
+ if type(self) != type(other):
+ return False
+ for key in self.ATTRIBUTES.iterkeys():
+ if getattr(self, key) != getattr(other, key):
+ return False
+ return True
+
+ def __ne__(self, other):
+ """Inequality operator."""
+ return not self.__eq__(other)
+
+ def __hash__(self):
+ """Hash function for using Validated objects in sets and maps.
+
+ Hash is done by hashing all keys and values and xor'ing them together.
+
+ Returns:
+ Hash of validated object.
+ """
+ result = 0
+ for key in self.ATTRIBUTES.iterkeys():
+ value = getattr(self, key)
+ if isinstance(value, list):
+ value = tuple(value)
+ result = result ^ hash(key) ^ hash(value)
+ return result
+
+ @staticmethod
+ def _ToValue(validator, value):
+ """Convert any value to simplified collections and basic types.
+
+ Args:
+ validator: An instance of Validator that corresponds with 'value'.
+ May also be 'str' or 'int' if those were used instead of a full
+ Validator.
+ value: Value to convert to simplified collections.
+
+ Returns:
+ The value as a dictionary if it is a Validated object.
+ A list of items converted to simplified collections if value is a list
+ or a tuple.
+ Otherwise, just the value.
+ """
+ if isinstance(value, Validated):
+ return value.ToDict()
+ elif isinstance(value, (list, tuple)):
+ return [Validated._ToValue(validator, item) for item in value]
+ else:
+ if isinstance(validator, Validator):
+ return validator.ToValue(value)
+ return value
+
+ def ToDict(self):
+ """Convert Validated object to a dictionary.
+
+ Recursively traverses all of its elements and converts everything to
+ simplified collections.
+
+ Returns:
+ A dict of all attributes defined in this classes ATTRIBUTES mapped
+ to its value. This structure is recursive in that Validated objects
+ that are referenced by this object and in lists are also converted to
+ dicts.
+ """
+ result = {}
+ for name, validator in self.ATTRIBUTES.iteritems():
+ value = getattr(self, name)
+ if not(isinstance(validator, Validator) and value == validator.default):
+ result[name] = Validated._ToValue(validator, value)
+ return result
+
+ def ToYAML(self):
+ """Print validated object as simplified YAML.
+
+ Returns:
+ Object as a simplified YAML string compatible with parsing using the
+ SafeLoader.
+ """
+ return yaml.dump(self.ToDict(),
+ default_flow_style=False,
+ Dumper=yaml.SafeDumper)
+
+
+
+class Validator(object):
+ """Validator base class.
+
+ Though any callable can be used as a validator, this class encapsulates the
+ case when a specific validator needs to hold a particular state or
+ configuration.
+
+ To implement Validator sub-class, override the validate method.
+
+ This class is permitted to change the ultimate value that is set to the
+ attribute if there is a reasonable way to perform the conversion.
+ """
+
+ expected_type = object
+
+ def __init__(self, default=None):
+ """Constructor.
+
+ Args:
+ default: Default assignment is made during initialization and will
+ not pass through validation.
+ """
+ self.default = default
+
+ def __call__(self, value):
+ """Main interface to validator is call mechanism."""
+ return self.Validate(value)
+
+ def Validate(self, value):
+ """Override this method to customize sub-class behavior.
+
+ Args:
+ value: Value to validate.
+
+ Returns:
+ Value if value is valid, or a valid representation of value.
+ """
+ return value
+
+ def ToValue(self, value):
+ """Convert 'value' to a simplified collection or basic type.
+
+ Subclasses of Validator should override this method when the dumped
+ representation of 'value' is not simply <type>(value) (e.g. a regex).
+
+ Args:
+ value: An object of the same type that was returned from Validate().
+
+ Returns:
+ An instance of a builtin type (e.g. int, str, dict, etc). By default
+ it returns 'value' unmodified.
+ """
+ return value
+
+
+class Type(Validator):
+ """Verifies property is of expected type.
+
+ Can optionally convert value if it is not of the expected type.
+
+ It is possible to specify a required field of a specific type in shorthand
+ by merely providing the type. This method is slightly less efficient than
+ providing an explicit type but is not significant unless parsing a large
+ amount of information:
+
+ class Person(Validated):
+ ATTRIBUTES = {'name': unicode,
+ 'age': int,
+ }
+
+ However, in most instances it is best to use the type constants:
+
+ class Person(Validated):
+ ATTRIBUTES = {'name': TypeUnicode,
+ 'age': TypeInt,
+ }
+ """
+
+ def __init__(self, expected_type, convert=True, default=None):
+ """Initialize Type validator.
+
+ Args:
+ expected_type: Type that attribute should validate against.
+ convert: Cause conversion if value is not the right type.
+ Conversion is done by calling the constructor of the type
+ with the value as its first parameter.
+ """
+ super(Type, self).__init__(default)
+ self.expected_type = expected_type
+ self.convert = convert
+
+ def Validate(self, value):
+ """Validate that value is correct type.
+
+ Args:
+ value: Value to validate.
+
+ Returns:
+ None if value is None, value if value is of correct type, converted
+ value if the validator is configured to convert.
+
+ Raises:
+ ValidationError if value is not of the right type and validator
+ is not configured to convert.
+ """
+ if not isinstance(value, self.expected_type):
+ if value is not None and self.convert:
+ try:
+ return self.expected_type(value)
+ except ValueError, e:
+ raise ValidationError('Type conversion failed for value \'%s\'.'
+ % value,
+ e)
+ except TypeError, e:
+ raise ValidationError('Expected value of type %s, but got \'%s\'.'
+ % (self.expected_type, value))
+ else:
+ raise MissingAttribute('Missing value is required.')
+ else:
+ return value
+
+
+TYPE_BOOL = Type(bool)
+TYPE_INT = Type(int)
+TYPE_LONG = Type(long)
+TYPE_STR = Type(str)
+TYPE_UNICODE = Type(unicode)
+TYPE_FLOAT = Type(float)
+
+
+class Options(Validator):
+ """Limit field based on pre-determined values.
+
+ Options are used to make sure an enumerated set of values are the only
+ one permitted for assignment. It is possible to define aliases which
+ map multiple string values to a single original. An example of usage:
+
+ class ZooAnimal(validated.Class):
+ ATTRIBUTES = {
+ 'name': str,
+ 'kind': Options('platypus', # No aliases
+ ('rhinoceros', ['rhino']), # One alias
+ ('canine', ('dog', 'puppy')), # Two aliases
+ )
+ """
+
+ def __init__(self, *options, **kw):
+ """Initialize options.
+
+ Args:
+ options: List of allowed values.
+ """
+ if 'default' in kw:
+ default = kw['default']
+ else:
+ default = None
+
+ alias_map = {}
+ def AddAlias(alias, original):
+ """Set new alias on alias_map.
+
+ Raises:
+ AttributeDefinitionError when option already exists or if alias is
+ not of type str..
+ """
+ if not isinstance(alias, str):
+ raise AttributeDefinitionError(
+ 'All option values must be of type str.')
+ elif alias in alias_map:
+ raise AttributeDefinitionError(
+ "Option '%s' already defined for options property." % alias)
+ alias_map[alias] = original
+
+ for option in options:
+ if isinstance(option, str):
+ AddAlias(option, option)
+
+ elif isinstance(option, (list, tuple)):
+ if len(option) != 2:
+ raise AttributeDefinitionError("Alias is defined as a list of tuple "
+ "with two items. The first is the "
+ "original option, while the second "
+ "is a list or tuple of str aliases.\n"
+ "\n Example:\n"
+ " ('original', ('alias1', "
+ "'alias2'")
+ original, aliases = option
+ AddAlias(original, original)
+ if not isinstance(aliases, (list, tuple)):
+ raise AttributeDefinitionError('Alias lists must be a list or tuple')
+
+ for alias in aliases:
+ AddAlias(alias, original)
+
+ else:
+ raise AttributeDefinitionError("All options must be of type str "
+ "or of the form (str, [str...]).")
+ super(Options, self).__init__(default)
+ self.options = alias_map
+
+ def Validate(self, value):
+ """Validate options.
+
+ Returns:
+ Original value for provided alias.
+
+ Raises:
+ ValidationError when value is not one of predefined values.
+ """
+ if value is None:
+ raise ValidationError('Value for options field must not be None.')
+ value = str(value)
+ if value not in self.options:
+ raise ValidationError('Value \'%s\' not in %s.'
+ % (value, self.options))
+ return self.options[value]
+
+
+class Optional(Validator):
+ """Definition of optional attributes.
+
+ Optional values are attributes which can be set to None or left
+ unset. All values in a basic Validated class are set to None
+ at initialization. Failure to assign to non-optional values
+ will result in a validation error when calling CheckInitialized.
+ """
+
+ def __init__(self, validator, default=None):
+ """Initializer.
+
+ This constructor will make a few guesses about the value passed in
+ as the validator:
+
+ - If the validator argument is a type, it automatically creates a Type
+ validator around it.
+
+ - If the validator argument is a list or tuple, it automatically
+ creates an Options validator around it.
+
+ Args:
+ validator: Optional validation condition.
+
+ Raises:
+ AttributeDefinitionError if validator is not callable.
+ """
+ self.validator = AsValidator(validator)
+ self.expected_type = self.validator.expected_type
+ self.default = default
+
+ def Validate(self, value):
+ """Optionally require a value.
+
+ Normal validators do not accept None. This will accept none on
+ behalf of the contained validator.
+
+ Args:
+ value: Value to be validated as optional.
+
+ Returns:
+ None if value is None, else results of contained validation.
+ """
+ if value is None:
+ return None
+ return self.validator(value)
+
+
+class Regex(Validator):
+ """Regular expression validator.
+
+ Regular expression validator always converts value to string. Note that
+ matches must be exact. Partial matches will not validate. For example:
+
+ class ClassDescr(Validated):
+ ATTRIBUTES = { 'name': Regex(r'[a-zA-Z_][a-zA-Z_0-9]*'),
+ 'parent': Type(type),
+ }
+
+ Alternatively, any attribute that is defined as a string is automatically
+ interpreted to be of type Regex. It is possible to specify unicode regex
+ strings as well. This approach is slightly less efficient, but usually
+ is not significant unless parsing large amounts of data:
+
+ class ClassDescr(Validated):
+ ATTRIBUTES = { 'name': r'[a-zA-Z_][a-zA-Z_0-9]*',
+ 'parent': Type(type),
+ }
+
+ # This will raise a ValidationError exception.
+ my_class(name='AName with space', parent=AnotherClass)
+ """
+
+ def __init__(self, regex, string_type=unicode, default=None):
+ """Initialized regex validator.
+
+ Args:
+ regex: Regular expression string to use for comparison.
+
+ Raises:
+ AttributeDefinitionError if string_type is not a kind of string.
+ """
+ super(Regex, self).__init__(default)
+ if (not issubclass(string_type, basestring) or
+ string_type is basestring):
+ raise AttributeDefinitionError(
+ 'Regex fields must be a string type not %s.' % str(string_type))
+ if isinstance(regex, basestring):
+ self.re = re.compile('^%s$' % regex)
+ else:
+ raise AttributeDefinitionError(
+ 'Regular expression must be string. Found %s.' % str(regex))
+
+ self.expected_type = string_type
+
+ def Validate(self, value):
+ """Does validation of a string against a regular expression.
+
+ Args:
+ value: String to match against regular expression.
+
+ Raises:
+ ValidationError when value does not match regular expression or
+ when value does not match provided string type.
+ """
+ if issubclass(self.expected_type, str):
+ cast_value = TYPE_STR(value)
+ else:
+ cast_value = TYPE_UNICODE(value)
+
+ if self.re.match(cast_value) is None:
+ raise ValidationError('Value \'%s\' does not match expression \'%s\''
+ % (value, self.re.pattern))
+ return cast_value
+
+
+class _RegexStrValue(object):
+ """Simulates the regex object to support recomplation when necessary.
+
+ Used by the RegexStr class to dynamically build and recompile regular
+ expression attributes of a validated object. This object replaces the normal
+ object returned from re.compile which is immutable.
+
+ When the value of this object is a string, that string is simply used as the
+ regular expression when recompilation is needed. If the state of this object
+ is a list of strings, the strings are joined in to a single 'or' expression.
+ """
+
+ def __init__(self, attribute, value):
+ """Initialize recompilable regex value.
+
+ Args:
+ attribute: Attribute validator associated with this regex value.
+ value: Initial underlying python value for regex string. Either a single
+ regex string or a list of regex strings.
+ """
+ self.__attribute = attribute
+ self.__value = value
+ self.__regex = None
+
+ def __AsString(self, value):
+ """Convert a value to appropriate string.
+
+ Returns:
+ String version of value with all carriage returns and line feeds removed.
+ """
+ if issubclass(self.__attribute.expected_type, str):
+ cast_value = TYPE_STR(value)
+ else:
+ cast_value = TYPE_UNICODE(value)
+
+ cast_value = cast_value.replace('\n', '')
+ cast_value = cast_value.replace('\r', '')
+ return cast_value
+
+ def __BuildRegex(self):
+ """Build regex string from state.
+
+ Returns:
+ String version of regular expression. Sequence objects are constructed
+ as larger regular expression where each regex in the list is joined with
+ all the others as single 'or' expression.
+ """
+ if isinstance(self.__value, list):
+ value_list = self.__value
+ sequence = True
+ else:
+ value_list = [self.__value]
+ sequence = False
+
+ regex_list = []
+ for item in value_list:
+ regex_list.append(self.__AsString(item))
+
+ if sequence:
+ return '|'.join('(?:%s)' % item for item in regex_list)
+ else:
+ return regex_list[0]
+
+ def __Compile(self):
+ """Build regular expression object from state.
+
+ Returns:
+ Compiled regular expression based on internal value.
+ """
+ regex = self.__BuildRegex()
+ try:
+ return re.compile(regex)
+ except re.error, e:
+ raise ValidationError('Value \'%s\' does not compile: %s' % (regex, e), e)
+
+ @property
+ def regex(self):
+ """Compiled regular expression as described by underlying value."""
+ return self.__Compile()
+
+ def match(self, value):
+ """Match against internal regular expression.
+
+ Returns:
+ Regular expression object built from underlying value.
+ """
+ return re.match(self.__BuildRegex(), value)
+
+ def Validate(self):
+ """Ensure that regex string compiles."""
+ self.__Compile()
+
+ def __str__(self):
+ """Regular expression string as described by underlying value."""
+ return self.__BuildRegex()
+
+ def __eq__(self, other):
+ """Comparison against other regular expression string values."""
+ if isinstance(other, _RegexStrValue):
+ return self.__BuildRegex() == other.__BuildRegex()
+ return str(self) == other
+
+ def __ne__(self, other):
+ """Inequality operator for regular expression string value."""
+ return not self.__eq__(other)
+
+
+class RegexStr(Validator):
+ """Validates that a string can compile as a regex without errors.
+
+ Use this validator when the value of a field should be a regex. That
+ means that the value must be a string that can be compiled by re.compile().
+ The attribute will then be a compiled re object.
+ """
+
+ def __init__(self, string_type=unicode, default=None):
+ """Initialized regex validator.
+
+ Raises:
+ AttributeDefinitionError if string_type is not a kind of string.
+ """
+ if default is not None:
+ default = _RegexStrValue(self, default)
+ re.compile(str(default))
+ super(RegexStr, self).__init__(default)
+ if (not issubclass(string_type, basestring) or
+ string_type is basestring):
+ raise AttributeDefinitionError(
+ 'RegexStr fields must be a string type not %s.' % str(string_type))
+
+ self.expected_type = string_type
+
+ def Validate(self, value):
+ """Validates that the string compiles as a regular expression.
+
+ Because the regular expression might have been expressed as a multiline
+ string, this function also strips newlines out of value.
+
+ Args:
+ value: String to compile as a regular expression.
+
+ Raises:
+ ValueError when value does not compile as a regular expression. TypeError
+ when value does not match provided string type.
+ """
+ if isinstance(value, _RegexStrValue):
+ return value
+ value = _RegexStrValue(self, value)
+ value.Validate()
+ return value
+
+ def ToValue(self, value):
+ """Returns the RE pattern for this validator."""
+ return str(value)
+
+
+class Range(Validator):
+ """Validates that numbers fall within the correct range.
+
+ In theory this class can be emulated using Options, however error
+ messages generated from that class will not be very intelligible.
+ This class essentially does the same thing, but knows the intended
+ integer range.
+
+ Also, this range class supports floats and other types that implement
+ ordinality.
+
+ The range is inclusive, meaning 3 is considered in the range
+ in Range(1,3).
+ """
+
+ def __init__(self, minimum, maximum, range_type=int, default=None):
+ """Initializer for range.
+
+ Args:
+ minimum: Minimum for attribute.
+ maximum: Maximum for attribute.
+ range_type: Type of field. Defaults to int.
+ """
+ super(Range, self).__init__(default)
+ if not isinstance(minimum, range_type):
+ raise AttributeDefinitionError(
+ 'Minimum value must be of type %s, instead it is %s (%s).' %
+ (str(range_type), str(type(minimum)), str(minimum)))
+ if not isinstance(maximum, range_type):
+ raise AttributeDefinitionError(
+ 'Maximum value must be of type %s, instead it is %s (%s).' %
+ (str(range_type), str(type(maximum)), str(maximum)))
+
+ self.minimum = minimum
+ self.maximum = maximum
+ self.expected_type = range_type
+ self._type_validator = Type(range_type)
+
+ def Validate(self, value):
+ """Validate that value is within range.
+
+ Validates against range-type then checks the range.
+
+ Args:
+ value: Value to validate.
+
+ Raises:
+ ValidationError when value is out of range. ValidationError when value
+ is notd of the same range type.
+ """
+ cast_value = self._type_validator.Validate(value)
+ if cast_value < self.minimum or cast_value > self.maximum:
+ raise ValidationError('Value \'%s\' is out of range %s - %s'
+ % (str(value),
+ str(self.minimum),
+ str(self.maximum)))
+ return cast_value
+
+
+class Repeated(Validator):
+ """Repeated field validator.
+
+ Indicates that attribute is expected to be a repeated value, ie,
+ a sequence. This adds additional validation over just Type(list)
+ in that it retains information about what can be stored in the list by
+ use of its constructor field.
+ """
+
+ def __init__(self, constructor, default=None):
+ """Initializer for repeated field.
+
+ Args:
+ constructor: Type used for verifying elements of sequence attribute.
+ """
+ super(Repeated, self).__init__(default)
+ self.constructor = constructor
+ self.expected_type = list
+
+ def Validate(self, value):
+ """Do validation of sequence.
+
+ Value must be a list and all elements must be of type 'constructor'.
+
+ Args:
+ value: Value to validate.
+
+ Raises:
+ ValidationError if value is None, not a list or one of its elements is the
+ wrong type.
+ """
+ if not isinstance(value, list):
+ raise ValidationError('Repeated fields must be sequence, '
+ 'but found \'%s\'.' % value)
+
+ for item in value:
+ if isinstance(self.constructor, Validator):
+ item = self.constructor.Validate(item)
+ elif not isinstance(item, self.constructor):
+ raise ValidationError('Repeated items must be %s, but found \'%s\'.'
+ % (str(self.constructor), str(item)))
+
+ return value
diff --git a/google_appengine/google/appengine/api/validation.pyc b/google_appengine/google/appengine/api/validation.pyc
new file mode 100644
index 0000000..ccfed3e
--- /dev/null
+++ b/google_appengine/google/appengine/api/validation.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/xmpp/__init__.py b/google_appengine/google/appengine/api/xmpp/__init__.py
new file mode 100755
index 0000000..8cc477a
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/__init__.py
@@ -0,0 +1,332 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""XMPP API.
+
+This module allows AppEngine apps to interact with a bot representing that app
+on the Google Talk network.
+
+Functions defined in this module:
+ get_presence: Gets the presence for a JID.
+ send_message: Sends a chat message to any number of JIDs.
+ send_invite: Sends an invitation to chat to a JID.
+
+Classes defined in this module:
+ Message: A class to encapsulate received messages.
+"""
+
+
+
+from google.appengine.api import apiproxy_stub_map
+from google.appengine.api.xmpp import xmpp_service_pb
+from google.appengine.runtime import apiproxy_errors
+
+
+NO_ERROR = xmpp_service_pb.XmppMessageResponse.NO_ERROR
+INVALID_JID = xmpp_service_pb.XmppMessageResponse.INVALID_JID
+OTHER_ERROR = xmpp_service_pb.XmppMessageResponse.OTHER_ERROR
+
+
+MESSAGE_TYPE_NONE = ""
+MESSAGE_TYPE_CHAT = "chat"
+MESSAGE_TYPE_ERROR = "error"
+MESSAGE_TYPE_GROUPCHAT = "groupchat"
+MESSAGE_TYPE_HEADLINE = "headline"
+MESSAGE_TYPE_NORMAL = "normal"
+
+_VALID_MESSAGE_TYPES = frozenset([MESSAGE_TYPE_NONE, MESSAGE_TYPE_CHAT,
+ MESSAGE_TYPE_ERROR, MESSAGE_TYPE_GROUPCHAT,
+ MESSAGE_TYPE_HEADLINE, MESSAGE_TYPE_NORMAL])
+
+
+class Error(Exception):
+ """Base error class for this module."""
+
+
+class InvalidJidError(Error):
+ """Error that indicates a request for an invalid JID."""
+
+
+class InvalidTypeError(Error):
+ """Error that indicates a send message request has an invalid type."""
+
+
+class InvalidXmlError(Error):
+ """Error that indicates a send message request has invalid XML."""
+
+
+class NoBodyError(Error):
+ """Error that indicates a send message request has no body."""
+
+
+class InvalidMessageError(Error):
+ """Error that indicates a received message was invalid or incomplete."""
+
+
+def get_presence(jid, from_jid=None):
+ """Gets the presence for a JID.
+
+ Args:
+ jid: The JID of the contact whose presence is requested.
+ from_jid: The optional custom JID to use for sending. Currently, the default
+ is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+ of the form <anything>@<appid>.appspotchat.com.
+
+ Returns:
+ bool, Whether the user is online.
+
+ Raises:
+ InvalidJidError if any of the JIDs passed are invalid.
+ Error if an unspecified error happens processing the request.
+ """
+ if not jid:
+ raise InvalidJidError()
+
+ request = xmpp_service_pb.PresenceRequest()
+ response = xmpp_service_pb.PresenceResponse()
+
+ request.set_jid(_to_str(jid))
+ if from_jid:
+ request.set_from_jid(_to_str(from_jid))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("xmpp",
+ "GetPresence",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_JID):
+ raise InvalidJidError()
+ else:
+ raise Error()
+
+ return bool(response.is_available())
+
+
+def send_invite(jid, from_jid=None):
+ """Sends an invitation to chat to a JID.
+
+ Args:
+ jid: The JID of the contact to invite.
+ from_jid: The optional custom JID to use for sending. Currently, the default
+ is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+ of the form <anything>@<appid>.appspotchat.com.
+
+ Raises:
+ InvalidJidError if the JID passed is invalid.
+ Error if an unspecified error happens processing the request.
+ """
+ if not jid:
+ raise InvalidJidError()
+
+ request = xmpp_service_pb.XmppInviteRequest()
+ response = xmpp_service_pb.XmppInviteResponse()
+
+ request.set_jid(_to_str(jid))
+ if from_jid:
+ request.set_from_jid(_to_str(from_jid))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("xmpp",
+ "SendInvite",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_JID):
+ raise InvalidJidError()
+ else:
+ raise Error()
+
+ return
+
+
+def send_message(jids, body, from_jid=None, message_type=MESSAGE_TYPE_CHAT,
+ raw_xml=False):
+ """Sends a chat message to a list of JIDs.
+
+ Args:
+ jids: A list of JIDs to send the message to, or a single JID to send the
+ message to.
+ from_jid: The optional custom JID to use for sending. Currently, the default
+ is <appid>@appspot.com. This is supported as a value. Custom JIDs can be
+ of the form <anything>@<appid>.appspotchat.com.
+ body: The body of the message.
+ message_type: Optional type of the message. Should be one of the types
+ specified in RFC 3921, section 2.1.1. An empty string will result in a
+ message stanza without a type attribute. For convenience, all of the
+ valid types are in the MESSAGE_TYPE_* constants in this file. The
+ default is MESSAGE_TYPE_CHAT. Anything else will throw an exception.
+ raw_xml: Optionally specifies that the body should be interpreted as XML. If
+ this is false, the contents of the body will be escaped and placed inside
+ of a body element inside of the message. If this is true, the contents
+ will be made children of the message.
+
+ Returns:
+ list, A list of statuses, one for each JID, corresponding to the result of
+ sending the message to that JID. Or, if a single JID was passed in,
+ returns the status directly.
+
+ Raises:
+ InvalidJidError if there is no valid JID in the list.
+ InvalidTypeError if the type argument is invalid.
+ InvalidXmlError if the body is malformed XML and raw_xml is True.
+ NoBodyError if there is no body.
+ Error if another error occurs processing the request.
+ """
+ request = xmpp_service_pb.XmppMessageRequest()
+ response = xmpp_service_pb.XmppMessageResponse()
+
+ if not body:
+ raise NoBodyError()
+
+ if not jids:
+ raise InvalidJidError()
+
+ if not message_type in _VALID_MESSAGE_TYPES:
+ raise InvalidTypeError()
+
+ single_jid = False
+ if isinstance(jids, basestring):
+ single_jid = True
+ jids = [jids]
+
+ for jid in jids:
+ if not jid:
+ raise InvalidJidError()
+ request.add_jid(_to_str(jid))
+
+ request.set_body(_to_str(body))
+ request.set_type(_to_str(message_type))
+ request.set_raw_xml(raw_xml)
+ if from_jid:
+ request.set_from_jid(_to_str(from_jid))
+
+ try:
+ apiproxy_stub_map.MakeSyncCall("xmpp",
+ "SendMessage",
+ request,
+ response)
+ except apiproxy_errors.ApplicationError, e:
+ if (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_JID):
+ raise InvalidJidError()
+ elif (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_TYPE):
+ raise InvalidTypeError()
+ elif (e.application_error ==
+ xmpp_service_pb.XmppServiceError.INVALID_XML):
+ raise InvalidXmlError()
+ elif (e.application_error ==
+ xmpp_service_pb.XmppServiceError.NO_BODY):
+ raise NoBodyError()
+ raise Error()
+
+ if single_jid:
+ return response.status_list()[0]
+ return response.status_list()
+
+
+class Message(object):
+ """Encapsulates an XMPP message received by the application."""
+
+ def __init__(self, vars):
+ """Constructs a new XMPP Message from an HTTP request.
+
+ Args:
+ vars: A dict-like object to extract message arguments from.
+ """
+ try:
+ self.__sender = vars["from"]
+ self.__to = vars["to"]
+ self.__body = vars["body"]
+ except KeyError, e:
+ raise InvalidMessageError(e[0])
+ self.__command = None
+ self.__arg = None
+
+ @property
+ def sender(self):
+ return self.__sender
+
+ @property
+ def to(self):
+ return self.__to
+
+ @property
+ def body(self):
+ return self.__body
+
+ def __parse_command(self):
+ if self.__arg != None:
+ return
+
+ body = self.__body
+ if body.startswith('\\'):
+ body = '/' + body[1:]
+
+ self.__arg = ''
+ if body.startswith('/'):
+ parts = body.split(' ', 1)
+ self.__command = parts[0][1:]
+ if len(parts) > 1:
+ self.__arg = parts[1].strip()
+ else:
+ self.__arg = self.__body.strip()
+
+ @property
+ def command(self):
+ self.__parse_command()
+ return self.__command
+
+ @property
+ def arg(self):
+ self.__parse_command()
+ return self.__arg
+
+ def reply(self, body, message_type=MESSAGE_TYPE_CHAT, raw_xml=False,
+ send_message=send_message):
+ """Convenience function to reply to a message.
+
+ Args:
+ body: str: The body of the message
+ message_type, raw_xml: As per send_message.
+ send_message: Used for testing.
+
+ Returns:
+ A status code as per send_message.
+
+ Raises:
+ See send_message.
+ """
+ return send_message([self.sender], body, from_jid=self.to,
+ message_type=message_type, raw_xml=raw_xml)
+
+
+def _to_str(value):
+ """Helper function to make sure unicode values converted to utf-8
+
+ Args:
+ value: str or unicode to convert to utf-8.
+
+ Returns:
+ UTF-8 encoded str of value, otherwise value unchanged.
+ """
+ if isinstance(value, unicode):
+ return value.encode('utf-8')
+ return value
diff --git a/google_appengine/google/appengine/api/xmpp/__init__.pyc b/google_appengine/google/appengine/api/xmpp/__init__.pyc
new file mode 100644
index 0000000..fd06892
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/__init__.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py
new file mode 100644
index 0000000..f77e50b
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.py
@@ -0,0 +1,826 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from google.net.proto import ProtocolBuffer
+import array
+import dummy_thread as thread
+
+__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
+ unusednames=printElemNumber,debug_strs no-special"""
+
+class XmppServiceError(ProtocolBuffer.ProtocolMessage):
+
+ UNSPECIFIED_ERROR = 1
+ INVALID_JID = 2
+ NO_BODY = 3
+ INVALID_XML = 4
+ INVALID_TYPE = 5
+
+ _ErrorCode_NAMES = {
+ 1: "UNSPECIFIED_ERROR",
+ 2: "INVALID_JID",
+ 3: "NO_BODY",
+ 4: "INVALID_XML",
+ 5: "INVALID_TYPE",
+ }
+
+ def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
+ ErrorCode_Name = classmethod(ErrorCode_Name)
+
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class PresenceRequest(ProtocolBuffer.ProtocolMessage):
+ has_jid_ = 0
+ jid_ = ""
+ has_from_jid_ = 0
+ from_jid_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def jid(self): return self.jid_
+
+ def set_jid(self, x):
+ self.has_jid_ = 1
+ self.jid_ = x
+
+ def clear_jid(self):
+ if self.has_jid_:
+ self.has_jid_ = 0
+ self.jid_ = ""
+
+ def has_jid(self): return self.has_jid_
+
+ def from_jid(self): return self.from_jid_
+
+ def set_from_jid(self, x):
+ self.has_from_jid_ = 1
+ self.from_jid_ = x
+
+ def clear_from_jid(self):
+ if self.has_from_jid_:
+ self.has_from_jid_ = 0
+ self.from_jid_ = ""
+
+ def has_from_jid(self): return self.has_from_jid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_jid()): self.set_jid(x.jid())
+ if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_jid_ != x.has_jid_: return 0
+ if self.has_jid_ and self.jid_ != x.jid_: return 0
+ if self.has_from_jid_ != x.has_from_jid_: return 0
+ if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_jid_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: jid not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.jid_))
+ if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_jid()
+ self.clear_from_jid()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.jid_)
+ if (self.has_from_jid_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.from_jid_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_jid(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_from_jid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+ if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kjid = 1
+ kfrom_jid = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "jid",
+ 2: "from_jid",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class PresenceResponse(ProtocolBuffer.ProtocolMessage):
+
+ NORMAL = 0
+ AWAY = 1
+ DO_NOT_DISTURB = 2
+ CHAT = 3
+ EXTENDED_AWAY = 4
+
+ _SHOW_NAMES = {
+ 0: "NORMAL",
+ 1: "AWAY",
+ 2: "DO_NOT_DISTURB",
+ 3: "CHAT",
+ 4: "EXTENDED_AWAY",
+ }
+
+ def SHOW_Name(cls, x): return cls._SHOW_NAMES.get(x, "")
+ SHOW_Name = classmethod(SHOW_Name)
+
+ has_is_available_ = 0
+ is_available_ = 0
+ has_presence_ = 0
+ presence_ = 0
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def is_available(self): return self.is_available_
+
+ def set_is_available(self, x):
+ self.has_is_available_ = 1
+ self.is_available_ = x
+
+ def clear_is_available(self):
+ if self.has_is_available_:
+ self.has_is_available_ = 0
+ self.is_available_ = 0
+
+ def has_is_available(self): return self.has_is_available_
+
+ def presence(self): return self.presence_
+
+ def set_presence(self, x):
+ self.has_presence_ = 1
+ self.presence_ = x
+
+ def clear_presence(self):
+ if self.has_presence_:
+ self.has_presence_ = 0
+ self.presence_ = 0
+
+ def has_presence(self): return self.has_presence_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_is_available()): self.set_is_available(x.is_available())
+ if (x.has_presence()): self.set_presence(x.presence())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_is_available_ != x.has_is_available_: return 0
+ if self.has_is_available_ and self.is_available_ != x.is_available_: return 0
+ if self.has_presence_ != x.has_presence_: return 0
+ if self.has_presence_ and self.presence_ != x.presence_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_is_available_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: is_available not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ if (self.has_presence_): n += 1 + self.lengthVarInt64(self.presence_)
+ return n + 2
+
+ def Clear(self):
+ self.clear_is_available()
+ self.clear_presence()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(8)
+ out.putBoolean(self.is_available_)
+ if (self.has_presence_):
+ out.putVarInt32(16)
+ out.putVarInt32(self.presence_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.set_is_available(d.getBoolean())
+ continue
+ if tt == 16:
+ self.set_presence(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_is_available_: res+=prefix+("is_available: %s\n" % self.DebugFormatBool(self.is_available_))
+ if self.has_presence_: res+=prefix+("presence: %s\n" % self.DebugFormatInt32(self.presence_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kis_available = 1
+ kpresence = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "is_available",
+ 2: "presence",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ 2: ProtocolBuffer.Encoder.NUMERIC,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppMessageRequest(ProtocolBuffer.ProtocolMessage):
+ has_body_ = 0
+ body_ = ""
+ has_raw_xml_ = 0
+ raw_xml_ = 0
+ has_type_ = 0
+ type_ = "chat"
+ has_from_jid_ = 0
+ from_jid_ = ""
+
+ def __init__(self, contents=None):
+ self.jid_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def jid_size(self): return len(self.jid_)
+ def jid_list(self): return self.jid_
+
+ def jid(self, i):
+ return self.jid_[i]
+
+ def set_jid(self, i, x):
+ self.jid_[i] = x
+
+ def add_jid(self, x):
+ self.jid_.append(x)
+
+ def clear_jid(self):
+ self.jid_ = []
+
+ def body(self): return self.body_
+
+ def set_body(self, x):
+ self.has_body_ = 1
+ self.body_ = x
+
+ def clear_body(self):
+ if self.has_body_:
+ self.has_body_ = 0
+ self.body_ = ""
+
+ def has_body(self): return self.has_body_
+
+ def raw_xml(self): return self.raw_xml_
+
+ def set_raw_xml(self, x):
+ self.has_raw_xml_ = 1
+ self.raw_xml_ = x
+
+ def clear_raw_xml(self):
+ if self.has_raw_xml_:
+ self.has_raw_xml_ = 0
+ self.raw_xml_ = 0
+
+ def has_raw_xml(self): return self.has_raw_xml_
+
+ def type(self): return self.type_
+
+ def set_type(self, x):
+ self.has_type_ = 1
+ self.type_ = x
+
+ def clear_type(self):
+ if self.has_type_:
+ self.has_type_ = 0
+ self.type_ = "chat"
+
+ def has_type(self): return self.has_type_
+
+ def from_jid(self): return self.from_jid_
+
+ def set_from_jid(self, x):
+ self.has_from_jid_ = 1
+ self.from_jid_ = x
+
+ def clear_from_jid(self):
+ if self.has_from_jid_:
+ self.has_from_jid_ = 0
+ self.from_jid_ = ""
+
+ def has_from_jid(self): return self.has_from_jid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.jid_size()): self.add_jid(x.jid(i))
+ if (x.has_body()): self.set_body(x.body())
+ if (x.has_raw_xml()): self.set_raw_xml(x.raw_xml())
+ if (x.has_type()): self.set_type(x.type())
+ if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.jid_) != len(x.jid_): return 0
+ for e1, e2 in zip(self.jid_, x.jid_):
+ if e1 != e2: return 0
+ if self.has_body_ != x.has_body_: return 0
+ if self.has_body_ and self.body_ != x.body_: return 0
+ if self.has_raw_xml_ != x.has_raw_xml_: return 0
+ if self.has_raw_xml_ and self.raw_xml_ != x.raw_xml_: return 0
+ if self.has_type_ != x.has_type_: return 0
+ if self.has_type_ and self.type_ != x.type_: return 0
+ if self.has_from_jid_ != x.has_from_jid_: return 0
+ if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_body_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: body not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.jid_)
+ for i in xrange(len(self.jid_)): n += self.lengthString(len(self.jid_[i]))
+ n += self.lengthString(len(self.body_))
+ if (self.has_raw_xml_): n += 2
+ if (self.has_type_): n += 1 + self.lengthString(len(self.type_))
+ if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_jid()
+ self.clear_body()
+ self.clear_raw_xml()
+ self.clear_type()
+ self.clear_from_jid()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.jid_)):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.jid_[i])
+ out.putVarInt32(18)
+ out.putPrefixedString(self.body_)
+ if (self.has_raw_xml_):
+ out.putVarInt32(24)
+ out.putBoolean(self.raw_xml_)
+ if (self.has_type_):
+ out.putVarInt32(34)
+ out.putPrefixedString(self.type_)
+ if (self.has_from_jid_):
+ out.putVarInt32(42)
+ out.putPrefixedString(self.from_jid_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.add_jid(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_body(d.getPrefixedString())
+ continue
+ if tt == 24:
+ self.set_raw_xml(d.getBoolean())
+ continue
+ if tt == 34:
+ self.set_type(d.getPrefixedString())
+ continue
+ if tt == 42:
+ self.set_from_jid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.jid_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("jid%s: %s\n" % (elm, self.DebugFormatString(e)))
+ cnt+=1
+ if self.has_body_: res+=prefix+("body: %s\n" % self.DebugFormatString(self.body_))
+ if self.has_raw_xml_: res+=prefix+("raw_xml: %s\n" % self.DebugFormatBool(self.raw_xml_))
+ if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
+ if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kjid = 1
+ kbody = 2
+ kraw_xml = 3
+ ktype = 4
+ kfrom_jid = 5
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "jid",
+ 2: "body",
+ 3: "raw_xml",
+ 4: "type",
+ 5: "from_jid",
+ }, 5)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ 3: ProtocolBuffer.Encoder.NUMERIC,
+ 4: ProtocolBuffer.Encoder.STRING,
+ 5: ProtocolBuffer.Encoder.STRING,
+ }, 5, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppMessageResponse(ProtocolBuffer.ProtocolMessage):
+
+ NO_ERROR = 0
+ INVALID_JID = 1
+ OTHER_ERROR = 2
+
+ _XmppMessageStatus_NAMES = {
+ 0: "NO_ERROR",
+ 1: "INVALID_JID",
+ 2: "OTHER_ERROR",
+ }
+
+ def XmppMessageStatus_Name(cls, x): return cls._XmppMessageStatus_NAMES.get(x, "")
+ XmppMessageStatus_Name = classmethod(XmppMessageStatus_Name)
+
+
+ def __init__(self, contents=None):
+ self.status_ = []
+ if contents is not None: self.MergeFromString(contents)
+
+ def status_size(self): return len(self.status_)
+ def status_list(self): return self.status_
+
+ def status(self, i):
+ return self.status_[i]
+
+ def set_status(self, i, x):
+ self.status_[i] = x
+
+ def add_status(self, x):
+ self.status_.append(x)
+
+ def clear_status(self):
+ self.status_ = []
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ for i in xrange(x.status_size()): self.add_status(x.status(i))
+
+ def Equals(self, x):
+ if x is self: return 1
+ if len(self.status_) != len(x.status_): return 0
+ for e1, e2 in zip(self.status_, x.status_):
+ if e1 != e2: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += 1 * len(self.status_)
+ for i in xrange(len(self.status_)): n += self.lengthVarInt64(self.status_[i])
+ return n + 0
+
+ def Clear(self):
+ self.clear_status()
+
+ def OutputUnchecked(self, out):
+ for i in xrange(len(self.status_)):
+ out.putVarInt32(8)
+ out.putVarInt32(self.status_[i])
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 8:
+ self.add_status(d.getVarInt32())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ cnt=0
+ for e in self.status_:
+ elm=""
+ if printElemNumber: elm="(%d)" % cnt
+ res+=prefix+("status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
+ cnt+=1
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kstatus = 1
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "status",
+ }, 1)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.NUMERIC,
+ }, 1, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppInviteRequest(ProtocolBuffer.ProtocolMessage):
+ has_jid_ = 0
+ jid_ = ""
+ has_from_jid_ = 0
+ from_jid_ = ""
+
+ def __init__(self, contents=None):
+ if contents is not None: self.MergeFromString(contents)
+
+ def jid(self): return self.jid_
+
+ def set_jid(self, x):
+ self.has_jid_ = 1
+ self.jid_ = x
+
+ def clear_jid(self):
+ if self.has_jid_:
+ self.has_jid_ = 0
+ self.jid_ = ""
+
+ def has_jid(self): return self.has_jid_
+
+ def from_jid(self): return self.from_jid_
+
+ def set_from_jid(self, x):
+ self.has_from_jid_ = 1
+ self.from_jid_ = x
+
+ def clear_from_jid(self):
+ if self.has_from_jid_:
+ self.has_from_jid_ = 0
+ self.from_jid_ = ""
+
+ def has_from_jid(self): return self.has_from_jid_
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+ if (x.has_jid()): self.set_jid(x.jid())
+ if (x.has_from_jid()): self.set_from_jid(x.from_jid())
+
+ def Equals(self, x):
+ if x is self: return 1
+ if self.has_jid_ != x.has_jid_: return 0
+ if self.has_jid_ and self.jid_ != x.jid_: return 0
+ if self.has_from_jid_ != x.has_from_jid_: return 0
+ if self.has_from_jid_ and self.from_jid_ != x.from_jid_: return 0
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ if (not self.has_jid_):
+ initialized = 0
+ if debug_strs is not None:
+ debug_strs.append('Required field: jid not set.')
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ n += self.lengthString(len(self.jid_))
+ if (self.has_from_jid_): n += 1 + self.lengthString(len(self.from_jid_))
+ return n + 1
+
+ def Clear(self):
+ self.clear_jid()
+ self.clear_from_jid()
+
+ def OutputUnchecked(self, out):
+ out.putVarInt32(10)
+ out.putPrefixedString(self.jid_)
+ if (self.has_from_jid_):
+ out.putVarInt32(18)
+ out.putPrefixedString(self.from_jid_)
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if tt == 10:
+ self.set_jid(d.getPrefixedString())
+ continue
+ if tt == 18:
+ self.set_from_jid(d.getPrefixedString())
+ continue
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ if self.has_jid_: res+=prefix+("jid: %s\n" % self.DebugFormatString(self.jid_))
+ if self.has_from_jid_: res+=prefix+("from_jid: %s\n" % self.DebugFormatString(self.from_jid_))
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+ kjid = 1
+ kfrom_jid = 2
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ 1: "jid",
+ 2: "from_jid",
+ }, 2)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ 1: ProtocolBuffer.Encoder.STRING,
+ 2: ProtocolBuffer.Encoder.STRING,
+ }, 2, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+class XmppInviteResponse(ProtocolBuffer.ProtocolMessage):
+
+ def __init__(self, contents=None):
+ pass
+ if contents is not None: self.MergeFromString(contents)
+
+
+ def MergeFrom(self, x):
+ assert x is not self
+
+ def Equals(self, x):
+ if x is self: return 1
+ return 1
+
+ def IsInitialized(self, debug_strs=None):
+ initialized = 1
+ return initialized
+
+ def ByteSize(self):
+ n = 0
+ return n + 0
+
+ def Clear(self):
+ pass
+
+ def OutputUnchecked(self, out):
+ pass
+
+ def TryMerge(self, d):
+ while d.avail() > 0:
+ tt = d.getVarInt32()
+ if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
+ d.skipData(tt)
+
+
+ def __str__(self, prefix="", printElemNumber=0):
+ res=""
+ return res
+
+
+ def _BuildTagLookupTable(sparse, maxtag, default=None):
+ return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
+
+
+ _TEXT = _BuildTagLookupTable({
+ 0: "ErrorCode",
+ }, 0)
+
+ _TYPES = _BuildTagLookupTable({
+ 0: ProtocolBuffer.Encoder.NUMERIC,
+ }, 0, ProtocolBuffer.Encoder.MAX_TYPE)
+
+ _STYLE = """"""
+ _STYLE_CONTENT_TYPE = """"""
+
+__all__ = ['XmppServiceError','PresenceRequest','PresenceResponse','XmppMessageRequest','XmppMessageResponse','XmppInviteRequest','XmppInviteResponse']
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pyc b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pyc
new file mode 100644
index 0000000..6fc90d3
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_pb.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py
new file mode 100755
index 0000000..b97dd86
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.py
@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Stub version of the XMPP API, writes messages to logs."""
+
+
+
+
+
+import logging
+import os
+
+from google.appengine.api import apiproxy_stub
+from google.appengine.api import xmpp
+from google.appengine.api.xmpp import xmpp_service_pb
+
+
+class XmppServiceStub(apiproxy_stub.APIProxyStub):
+ """Python only xmpp service stub.
+
+ This stub does not use an XMPP network. It prints messages to the console
+ instead of sending any stanzas.
+ """
+
+ def __init__(self, log=logging.info, service_name='xmpp'):
+ """Initializer.
+
+ Args:
+ log: A logger, used for dependency injection.
+ service_name: Service name expected for all calls.
+ """
+ super(XmppServiceStub, self).__init__(service_name)
+ self.log = log
+
+ def _Dynamic_GetPresence(self, request, response):
+ """Implementation of XmppService::GetPresence.
+
+ Returns online if the first character of the JID comes before 'm' in the
+ alphabet, otherwise returns offline.
+
+ Args:
+ request: A PresenceRequest.
+ response: A PresenceResponse.
+ """
+ jid = request.jid()
+ self._GetFrom(request.from_jid())
+ if jid[0] < 'm':
+ response.set_is_available(True)
+ else:
+ response.set_is_available(False)
+
+ def _Dynamic_SendMessage(self, request, response):
+ """Implementation of XmppService::SendMessage.
+
+ Args:
+ request: An XmppMessageRequest.
+ response: An XmppMessageResponse .
+ """
+ from_jid = self._GetFrom(request.from_jid())
+ self.log('Sending an XMPP Message:')
+ self.log(' From:')
+ self.log(' ' + from_jid)
+ self.log(' Body:')
+ self.log(' ' + request.body())
+ self.log(' Type:')
+ self.log(' ' + request.type())
+ self.log(' Raw Xml:')
+ self.log(' ' + str(request.raw_xml()))
+ self.log(' To JIDs:')
+ for jid in request.jid_list():
+ self.log(' ' + jid)
+
+ for jid in request.jid_list():
+ response.add_status(xmpp_service_pb.XmppMessageResponse.NO_ERROR)
+
+ def _Dynamic_SendInvite(self, request, response):
+ """Implementation of XmppService::SendInvite.
+
+ Args:
+ request: An XmppInviteRequest.
+ response: An XmppInviteResponse .
+ """
+ from_jid = self._GetFrom(request.from_jid())
+ self.log('Sending an XMPP Invite:')
+ self.log(' From:')
+ self.log(' ' + from_jid)
+ self.log(' To: ' + request.jid())
+
+ def _GetFrom(self, requested):
+ """Validates that the from JID is valid.
+
+ Args:
+ requested: The requested from JID.
+
+ Returns:
+ string, The from JID.
+
+ Raises:
+ xmpp.InvalidJidError if the requested JID is invalid.
+ """
+
+ appid = os.environ.get('APPLICATION_ID', '')
+ if requested == None or requested == '':
+ return appid + '@appspot.com/bot'
+
+ node, domain, resource = ('', '', '')
+ at = requested.find('@')
+ if at == -1:
+ self.log('Invalid From JID: No \'@\' character found. JID: %s', requested)
+ raise xmpp.InvalidJidError()
+
+ node = requested[:at]
+ rest = requested[at+1:]
+
+ if rest.find('@') > -1:
+ self.log('Invalid From JID: Second \'@\' character found. JID: %s',
+ requested)
+ raise xmpp.InvalidJidError()
+
+ slash = rest.find('/')
+ if slash == -1:
+ domain = rest
+ resource = 'bot'
+ else:
+ domain = rest[:slash]
+ resource = rest[slash+1:]
+
+ if resource.find('/') > -1:
+ self.log('Invalid From JID: Second \'/\' character found. JID: %s',
+ requested)
+ raise xmpp.InvalidJidError()
+
+ if domain == 'appspot.com' and node == appid:
+ return node + '@' + domain + '/' + resource
+ elif domain == appid + '.appspotchat.com':
+ return node + '@' + domain + '/' + resource
+
+ self.log('Invalid From JID: Must be appid@appspot.com[/resource] or '
+ 'node@appid.appspotchat.com[/resource]. JID: %s', requested)
+ raise xmpp.InvalidJidError()
diff --git a/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pyc b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pyc
new file mode 100644
index 0000000..8a26f65
--- /dev/null
+++ b/google_appengine/google/appengine/api/xmpp/xmpp_service_stub.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_builder.py b/google_appengine/google/appengine/api/yaml_builder.py
new file mode 100755
index 0000000..71e730c
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_builder.py
@@ -0,0 +1,432 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""PyYAML event builder handler
+
+Receives events from YAML listener and forwards them to a builder
+object so that it can construct a properly structured object.
+"""
+
+
+
+
+
+from google.appengine.api import yaml_errors
+from google.appengine.api import yaml_listener
+
+import yaml
+
+_TOKEN_DOCUMENT = 'document'
+_TOKEN_SEQUENCE = 'sequence'
+_TOKEN_MAPPING = 'mapping'
+_TOKEN_KEY = 'key'
+_TOKEN_VALUES = frozenset((
+ _TOKEN_DOCUMENT,
+ _TOKEN_SEQUENCE,
+ _TOKEN_MAPPING,
+ _TOKEN_KEY))
+
+
+class Builder(object):
+ """Interface for building documents and type from YAML events.
+
+ Implement this interface to create a new builder. Builders are
+ passed to the BuilderHandler and used as a factory and assembler
+ for creating concrete representations of YAML files.
+ """
+
+ def BuildDocument(self):
+ """Build new document.
+
+ The object built by this method becomes the top level entity
+ that the builder handler constructs. The actual type is
+ determined by the sub-class of the Builder class and can essentially
+ be any type at all. This method is always called when the parser
+ encounters the start of a new document.
+
+ Returns:
+ New object instance representing concrete document which is
+ returned to user via BuilderHandler.GetResults().
+ """
+
+ def InitializeDocument(self, document, value):
+ """Initialize document with value from top level of document.
+
+ This method is called when the root document element is encountered at
+ the top level of a YAML document. It should get called immediately
+ after BuildDocument.
+
+ Receiving the None value indicates the empty document.
+
+ Args:
+ document: Document as constructed in BuildDocument.
+ value: Scalar value to initialize the document with.
+ """
+
+ def BuildMapping(self, top_value):
+ """Build a new mapping representation.
+
+ Called when StartMapping event received. Type of object is determined
+ by Builder sub-class.
+
+ Args:
+ top_value: Object which will be new mappings parant. Will be object
+ returned from previous call to BuildMapping or BuildSequence.
+
+ Returns:
+ Instance of new object that represents a mapping type in target model.
+ """
+
+ def EndMapping(self, top_value, mapping):
+ """Previously constructed mapping scope is at an end.
+
+ Called when the end of a mapping block is encountered. Useful for
+ additional clean up or end of scope validation.
+
+ Args:
+ top_value: Value which is parent of the mapping.
+ mapping: Mapping which is at the end of its scope.
+ """
+
+ def BuildSequence(self, top_value):
+ """Build a new sequence representation.
+
+ Called when StartSequence event received. Type of object is determined
+ by Builder sub-class.
+
+ Args:
+ top_value: Object which will be new sequences parant. Will be object
+ returned from previous call to BuildMapping or BuildSequence.
+
+ Returns:
+ Instance of new object that represents a sequence type in target model.
+ """
+
+ def EndSequence(self, top_value, sequence):
+ """Previously constructed sequence scope is at an end.
+
+ Called when the end of a sequence block is encountered. Useful for
+ additional clean up or end of scope validation.
+
+ Args:
+ top_value: Value which is parent of the sequence.
+ sequence: Sequence which is at the end of its scope.
+ """
+
+ def MapTo(self, subject, key, value):
+ """Map value to a mapping representation.
+
+ Implementation is defined by sub-class of Builder.
+
+ Args:
+ subject: Object that represents mapping. Value returned from
+ BuildMapping.
+ key: Key used to map value to subject. Can be any scalar value.
+ value: Value which is mapped to subject. Can be any kind of value.
+ """
+
+ def AppendTo(self, subject, value):
+ """Append value to a sequence representation.
+
+ Implementation is defined by sub-class of Builder.
+
+ Args:
+ subject: Object that represents sequence. Value returned from
+ BuildSequence
+ value: Value to be appended to subject. Can be any kind of value.
+ """
+
+
+class BuilderHandler(yaml_listener.EventHandler):
+ """PyYAML event handler used to build objects.
+
+ Maintains state information as it receives parse events so that object
+ nesting is maintained. Uses provided builder object to construct and
+ assemble objects as it goes.
+
+ As it receives events from the YAML parser, it builds a stack of data
+ representing structural tokens. As the scope of documents, mappings
+ and sequences end, those token, value pairs are popped from the top of
+ the stack so that the original scope can resume processing.
+
+ A special case is made for the _KEY token. It represents a temporary
+ value which only occurs inside mappings. It is immediately popped off
+ the stack when it's associated value is encountered in the parse stream.
+ It is necessary to do this because the YAML parser does not combine
+ key and value information in to a single event.
+ """
+
+ def __init__(self, builder):
+ """Initialization for builder handler.
+
+ Args:
+ builder: Instance of Builder class.
+
+ Raises:
+ ListenerConfigurationError when builder is not a Builder class.
+ """
+ if not isinstance(builder, Builder):
+ raise yaml_errors.ListenerConfigurationError(
+ 'Must provide builder of type yaml_listener.Builder')
+ self._builder = builder
+ self._stack = None
+ self._top = None
+ self._results = []
+
+ def _Push(self, token, value):
+ """Push values to stack at start of nesting.
+
+ When a new object scope is beginning, will push the token (type of scope)
+ along with the new objects value, the latter of which is provided through
+ the various build methods of the builder.
+
+ Args:
+ token: Token indicating the type of scope which is being created; must
+ belong to _TOKEN_VALUES.
+ value: Value to associate with given token. Construction of value is
+ determined by the builder provided to this handler at construction.
+ """
+ self._top = (token, value)
+ self._stack.append(self._top)
+
+ def _Pop(self):
+ """Pop values from stack at end of nesting.
+
+ Called to indicate the end of a nested scope.
+
+ Returns:
+ Previously pushed value at the top of the stack.
+ """
+ assert self._stack != [] and self._stack is not None
+ token, value = self._stack.pop()
+ if self._stack:
+ self._top = self._stack[-1]
+ else:
+ self._top = None
+ return value
+
+ def _HandleAnchor(self, event):
+ """Handle anchor attached to event.
+
+ Currently will raise an error if anchor is used. Anchors are used to
+ define a document wide tag to a given value (scalar, mapping or sequence).
+
+ Args:
+ event: Event which may have anchor property set.
+
+ Raises:
+ NotImplementedError if event attempts to use an anchor.
+ """
+ if hasattr(event, 'anchor') and event.anchor is not None:
+ raise NotImplementedError, 'Anchors not supported in this handler'
+
+ def _HandleValue(self, value):
+ """Handle given value based on state of parser
+
+ This method handles the various values that are created by the builder
+ at the beginning of scope events (such as mappings and sequences) or
+ when a scalar value is received.
+
+ Method is called when handler receives a parser, MappingStart or
+ SequenceStart.
+
+ Args:
+ value: Value received as scalar value or newly constructed mapping or
+ sequence instance.
+
+ Raises:
+ InternalError if the building process encounters an unexpected token.
+ This is an indication of an implementation error in BuilderHandler.
+ """
+ token, top_value = self._top
+
+ if token == _TOKEN_KEY:
+ key = self._Pop()
+ mapping_token, mapping = self._top
+ assert _TOKEN_MAPPING == mapping_token
+ self._builder.MapTo(mapping, key, value)
+
+ elif token == _TOKEN_MAPPING:
+ self._Push(_TOKEN_KEY, value)
+
+ elif token == _TOKEN_SEQUENCE:
+ self._builder.AppendTo(top_value, value)
+
+ elif token == _TOKEN_DOCUMENT:
+ self._builder.InitializeDocument(top_value, value)
+
+ else:
+ raise yaml_errors.InternalError('Unrecognized builder token:\n%s' % token)
+
+ def StreamStart(self, event, loader):
+ """Initializes internal state of handler
+
+ Args:
+ event: Ignored.
+ """
+ assert self._stack is None
+ self._stack = []
+ self._top = None
+ self._results = []
+
+ def StreamEnd(self, event, loader):
+ """Cleans up internal state of handler after parsing
+
+ Args:
+ event: Ignored.
+ """
+ assert self._stack == [] and self._top is None
+ self._stack = None
+
+ def DocumentStart(self, event, loader):
+ """Build new document.
+
+ Pushes new document on to stack.
+
+ Args:
+ event: Ignored.
+ """
+ assert self._stack == []
+ self._Push(_TOKEN_DOCUMENT, self._builder.BuildDocument())
+
+ def DocumentEnd(self, event, loader):
+ """End of document.
+
+ Args:
+ event: Ignored.
+ """
+ assert self._top[0] == _TOKEN_DOCUMENT
+ self._results.append(self._Pop())
+
+ def Alias(self, event, loader):
+ """Not implemented yet.
+
+ Args:
+ event: Ignored.
+ """
+ raise NotImplementedError('Anchors not supported in this handler')
+
+ def Scalar(self, event, loader):
+ """Handle scalar value
+
+ Since scalars are simple values that are passed directly in by the
+ parser, handle like any value with no additional processing.
+
+ Of course, key values will be handles specially. A key value is recognized
+ when the top token is _TOKEN_MAPPING.
+
+ Args:
+ event: Event containing scalar value.
+ """
+ self._HandleAnchor(event)
+ if event.tag is None and self._top[0] != _TOKEN_MAPPING:
+ try:
+ tag = loader.resolve(yaml.nodes.ScalarNode,
+ event.value, event.implicit)
+ except IndexError:
+ tag = loader.DEFAULT_SCALAR_TAG
+ else:
+ tag = event.tag
+
+ if tag is None:
+ value = event.value
+ else:
+ node = yaml.nodes.ScalarNode(tag,
+ event.value,
+ event.start_mark,
+ event.end_mark,
+ event.style)
+ value = loader.construct_object(node)
+ self._HandleValue(value)
+
+ def SequenceStart(self, event, loader):
+ """Start of sequence scope
+
+ Create a new sequence from the builder and then handle in the context
+ of its parent.
+
+ Args:
+ event: SequenceStartEvent generated by loader.
+ loader: Loader that generated event.
+ """
+ self._HandleAnchor(event)
+ token, parent = self._top
+
+ if token == _TOKEN_KEY:
+ token, parent = self._stack[-2]
+ sequence = self._builder.BuildSequence(parent)
+ self._HandleValue(sequence)
+ self._Push(_TOKEN_SEQUENCE, sequence)
+
+ def SequenceEnd(self, event, loader):
+ """End of sequence.
+
+ Args:
+ event: Ignored
+ loader: Ignored.
+ """
+ assert self._top[0] == _TOKEN_SEQUENCE
+ end_object = self._Pop()
+ top_value = self._top[1]
+ self._builder.EndSequence(top_value, end_object)
+
+ def MappingStart(self, event, loader):
+ """Start of mapping scope.
+
+ Create a mapping from builder and then handle in the context of its
+ parent.
+
+ Args:
+ event: MappingStartEvent generated by loader.
+ loader: Loader that generated event.
+ """
+ self._HandleAnchor(event)
+ token, parent = self._top
+
+ if token == _TOKEN_KEY:
+ token, parent = self._stack[-2]
+ mapping = self._builder.BuildMapping(parent)
+ self._HandleValue(mapping)
+ self._Push(_TOKEN_MAPPING, mapping)
+
+ def MappingEnd(self, event, loader):
+ """End of mapping
+
+ Args:
+ event: Ignored.
+ loader: Ignored.
+ """
+ assert self._top[0] == _TOKEN_MAPPING
+ end_object = self._Pop()
+ top_value = self._top[1]
+ self._builder.EndMapping(top_value, end_object)
+
+ def GetResults(self):
+ """Get results of document stream processing.
+
+ This method can be invoked after fully parsing the entire YAML file
+ to retrieve constructed contents of YAML file. Called after EndStream.
+
+ Returns:
+ A tuple of all document objects that were parsed from YAML stream.
+
+ Raises:
+ InternalError if the builder stack is not empty by the end of parsing.
+ """
+ if self._stack is not None:
+ raise yaml_errors.InternalError('Builder stack is not empty.')
+ return tuple(self._results)
diff --git a/google_appengine/google/appengine/api/yaml_builder.pyc b/google_appengine/google/appengine/api/yaml_builder.pyc
new file mode 100644
index 0000000..713f7b2
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_builder.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_errors.py b/google_appengine/google/appengine/api/yaml_errors.py
new file mode 100755
index 0000000..6896e2c
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_errors.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Errors used in the YAML API, which is used by app developers."""
+
+
+
+class Error(Exception):
+ """Base datastore yaml error type."""
+
+class ProtocolBufferParseError(Error):
+ """Error in protocol buffer parsing"""
+
+
+class EmptyConfigurationFile(Error):
+ """Tried to load empty configuration file."""
+
+
+class MultipleConfigurationFile(Error):
+ """Tried to load configuration file with multiple objects."""
+
+
+class UnexpectedAttribute(Error):
+ """Raised when an unexpected attribute is encounted."""
+
+
+class DuplicateAttribute(Error):
+ """Generated when an attribute is assigned to twice."""
+
+
+class ListenerConfigurationError(Error):
+ """Generated when there is a parsing problem due to configuration."""
+
+
+class IllegalEvent(Error):
+ """Raised when an unexpected event type is received by listener."""
+
+
+class InternalError(Error):
+ """Raised when an internal implementation error is detected."""
+
+
+class EventListenerError(Error):
+ """Top level exception raised by YAML listener.
+
+ Any exception raised within the process of parsing a YAML file via an
+ EventListener is caught and wrapped in an EventListenerError. The causing
+ exception is maintained, but additional useful information is saved which
+ can be used for reporting useful information to users.
+
+ Attributes:
+ cause: The original exception which caused the EventListenerError.
+ """
+
+ def __init__(self, cause):
+ """Initialize event-listener error."""
+ if hasattr(cause, 'args') and cause.args:
+ Error.__init__(self, *cause.args)
+ else:
+ Error.__init__(self, str(cause))
+ self.cause = cause
+
+
+class EventListenerYAMLError(EventListenerError):
+ """Generated specifically for yaml.error.YAMLError."""
+
+
+class EventError(EventListenerError):
+ """Generated specifically when an error occurs in event handler.
+
+ Attributes:
+ cause: The original exception which caused the EventListenerError.
+ event: Event being handled when exception occured.
+ """
+
+ def __init__(self, cause, event):
+ """Initialize event-listener error."""
+ EventListenerError.__init__(self, cause)
+ self.event = event
+
+ def __str__(self):
+ return '%s\n%s' % (self.cause, self.event.start_mark)
diff --git a/google_appengine/google/appengine/api/yaml_errors.pyc b/google_appengine/google/appengine/api/yaml_errors.pyc
new file mode 100644
index 0000000..a89c146
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_errors.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_listener.py b/google_appengine/google/appengine/api/yaml_listener.py
new file mode 100755
index 0000000..e7d978f
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_listener.py
@@ -0,0 +1,218 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""PyYAML event listener
+
+Contains class which interprets YAML events and forwards them to
+a handler object.
+"""
+
+
+from google.appengine.api import yaml_errors
+import yaml
+
+
+_EVENT_METHOD_MAP = {
+ yaml.events.StreamStartEvent: 'StreamStart',
+ yaml.events.StreamEndEvent: 'StreamEnd',
+ yaml.events.DocumentStartEvent: 'DocumentStart',
+ yaml.events.DocumentEndEvent: 'DocumentEnd',
+ yaml.events.AliasEvent: 'Alias',
+ yaml.events.ScalarEvent: 'Scalar',
+ yaml.events.SequenceStartEvent: 'SequenceStart',
+ yaml.events.SequenceEndEvent: 'SequenceEnd',
+ yaml.events.MappingStartEvent: 'MappingStart',
+ yaml.events.MappingEndEvent: 'MappingEnd',
+}
+
+
+class EventHandler(object):
+ """Handler interface for parsing YAML files.
+
+ Implement this interface to define specific YAML event handling class.
+ Implementing classes instances are passed to the constructor of
+ EventListener to act as a receiver of YAML parse events.
+ """
+ def StreamStart(self, event, loader):
+ """Handle start of stream event"""
+
+ def StreamEnd(self, event, loader):
+ """Handle end of stream event"""
+
+ def DocumentStart(self, event, loader):
+ """Handle start of document event"""
+
+ def DocumentEnd(self, event, loader):
+ """Handle end of document event"""
+
+ def Alias(self, event, loader):
+ """Handle alias event"""
+
+ def Scalar(self, event, loader):
+ """Handle scalar event"""
+
+ def SequenceStart(self, event, loader):
+ """Handle start of sequence event"""
+
+ def SequenceEnd(self, event, loader):
+ """Handle end of sequence event"""
+
+ def MappingStart(self, event, loader):
+ """Handle start of mappping event"""
+
+ def MappingEnd(self, event, loader):
+ """Handle end of mapping event"""
+
+
+class EventListener(object):
+ """Helper class to re-map PyYAML events to method calls.
+
+ By default, PyYAML generates its events via a Python generator. This class
+ is a helper that iterates over the events from the PyYAML parser and forwards
+ them to a handle class in the form of method calls. For simplicity, the
+ underlying event is forwarded to the handler as a parameter to the call.
+
+ This object does not itself produce iterable objects, but is really a mapping
+ to a given handler instance.
+
+ Example use:
+
+ class PrintDocumentHandler(object):
+ def DocumentStart(event):
+ print "A new document has been started"
+
+ EventListener(PrintDocumentHandler()).Parse('''
+ key1: value1
+ ---
+ key2: value2
+ '''
+
+ >>> A new document has been started
+ A new document has been started
+
+ In the example above, the implemented handler class (PrintDocumentHandler)
+ has a single method which reports each time a new document is started within
+ a YAML file. It is not necessary to subclass the EventListener, merely it
+ receives a PrintDocumentHandler instance. Every time a new document begins,
+ PrintDocumentHandler.DocumentStart is called with the PyYAML event passed
+ in as its parameter..
+ """
+
+ def __init__(self, event_handler):
+ """Initialize PyYAML event listener.
+
+ Constructs internal mapping directly from event type to method on actual
+ handler. This prevents reflection being used during actual parse time.
+
+ Args:
+ event_handler: Event handler that will receive mapped events. Must
+ implement at least one appropriate handler method named from
+ the values of the _EVENT_METHOD_MAP.
+
+ Raises:
+ ListenerConfigurationError if event_handler is not an EventHandler.
+ """
+ if not isinstance(event_handler, EventHandler):
+ raise yaml_errors.ListenerConfigurationError(
+ 'Must provide event handler of type yaml_listener.EventHandler')
+ self._event_method_map = {}
+ for event, method in _EVENT_METHOD_MAP.iteritems():
+ self._event_method_map[event] = getattr(event_handler, method)
+
+ def HandleEvent(self, event, loader=None):
+ """Handle individual PyYAML event.
+
+ Args:
+ event: Event to forward to method call in method call.
+
+ Raises:
+ IllegalEvent when receives an unrecognized or unsupported event type.
+ """
+ if event.__class__ not in _EVENT_METHOD_MAP:
+ raise yaml_errors.IllegalEvent(
+ "%s is not a valid PyYAML class" % event.__class__.__name__)
+ if event.__class__ in self._event_method_map:
+ self._event_method_map[event.__class__](event, loader)
+
+ def _HandleEvents(self, events):
+ """Iterate over all events and send them to handler.
+
+ This method is not meant to be called from the interface.
+
+ Only use in tests.
+
+ Args:
+ events: Iterator or generator containing events to process.
+ raises:
+ EventListenerParserError when a yaml.parser.ParserError is raised.
+ EventError when an exception occurs during the handling of an event.
+ """
+ for event in events:
+ try:
+ self.HandleEvent(*event)
+ except Exception, e:
+ event_object, loader = event
+ raise yaml_errors.EventError(e, event_object)
+
+ def _GenerateEventParameters(self,
+ stream,
+ loader_class=yaml.loader.SafeLoader):
+ """Creates a generator that yields event, loader parameter pairs.
+
+ For use as parameters to HandleEvent method for use by Parse method.
+ During testing, _GenerateEventParameters is simulated by allowing
+ the harness to pass in a list of pairs as the parameter.
+
+ A list of (event, loader) pairs must be passed to _HandleEvents otherwise
+ it is not possible to pass the loader instance to the handler.
+
+ Also responsible for instantiating the loader from the Loader
+ parameter.
+
+ Args:
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work.
+ Loader: Loader class to use as per the yaml.parse method. Used to
+ instantiate new yaml.loader instance.
+
+ Yields:
+ Tuple(event, loader) where:
+ event: Event emitted by PyYAML loader.
+ loader_class: Used for dependency injection.
+ """
+ assert loader_class is not None
+ try:
+ loader = loader_class(stream)
+ while loader.check_event():
+ yield (loader.get_event(), loader)
+ except yaml.error.YAMLError, e:
+ raise yaml_errors.EventListenerYAMLError(e)
+
+ def Parse(self, stream, loader_class=yaml.loader.SafeLoader):
+ """Call YAML parser to generate and handle all events.
+
+ Calls PyYAML parser and sends resulting generator to handle_event method
+ for processing.
+
+ Args:
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work with the YAML parser.
+ loader_class: Used for dependency injection.
+ """
+ self._HandleEvents(self._GenerateEventParameters(stream, loader_class))
diff --git a/google_appengine/google/appengine/api/yaml_listener.pyc b/google_appengine/google/appengine/api/yaml_listener.pyc
new file mode 100644
index 0000000..5e0a8e3
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_listener.pyc
Binary files differ
diff --git a/google_appengine/google/appengine/api/yaml_object.py b/google_appengine/google/appengine/api/yaml_object.py
new file mode 100755
index 0000000..767f1f3
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_object.py
@@ -0,0 +1,294 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+"""Builder for mapping YAML documents to object instances.
+
+ObjectBuilder is responsible for mapping a YAML document to classes defined
+using the validation mechanism (see google.appengine.api.validation.py).
+"""
+
+
+
+
+
+from google.appengine.api import validation
+from google.appengine.api import yaml_listener
+from google.appengine.api import yaml_builder
+from google.appengine.api import yaml_errors
+
+import yaml
+
+
+class _ObjectMapper(object):
+ """Wrapper used for mapping attributes from a yaml file to an object.
+
+ This wrapper is required because objects do not know what property they are
+ associated with a creation time, and therefore can not be instantiated
+ with the correct class until they are mapped to their parents.
+ """
+
+ def __init__(self):
+ """Object mapper starts off with empty value."""
+ self.value = None
+ self.seen = set()
+
+ def set_value(self, value):
+ """Set value of instance to map to.
+
+ Args:
+ value: Instance that this mapper maps to.
+ """
+ self.value = value
+
+ def see(self, key):
+ if key in self.seen:
+ raise yaml_errors.DuplicateAttribute("Duplicate attribute '%s'." % key)
+ self.seen.add(key)
+
+class _ObjectSequencer(object):
+ """Wrapper used for building sequences from a yaml file to a list.
+
+ This wrapper is required because objects do not know what property they are
+ associated with a creation time, and therefore can not be instantiated
+ with the correct class until they are mapped to their parents.
+ """
+
+ def __init__(self):
+ """Object sequencer starts off with empty value."""
+ self.value = []
+ self.constructor = None
+
+ def set_constructor(self, constructor):
+ """Set object used for constructing new sequence instances.
+
+ Args:
+ constructor: Callable which can accept no arguments. Must return
+ an instance of the appropriate class for the container.
+ """
+ self.constructor = constructor
+
+
+class ObjectBuilder(yaml_builder.Builder):
+ """Builder used for constructing validated objects.
+
+ Given a class that implements validation.Validated, it will parse a YAML
+ document and attempt to build an instance of the class. It does so by mapping
+ YAML keys to Python attributes. ObjectBuilder will only map YAML fields
+ to attributes defined in the Validated subclasses 'ATTRIBUTE' definitions.
+ Lists are mapped to validated. Repeated attributes and maps are mapped to
+ validated.Type properties.
+
+ For a YAML map to be compatible with a class, the class must have a
+ constructor that can be called with no parameters. If the provided type
+ does not have such a constructor a parse time error will occur.
+ """
+
+ def __init__(self, default_class):
+ """Initialize validated object builder.
+
+ Args:
+ default_class: Class that is instantiated upon the detection of a new
+ document. An instance of this class will act as the document itself.
+ """
+ self.default_class = default_class
+
+ def _GetRepeated(self, attribute):
+ """Get the ultimate type of a repeated validator.
+
+ Looks for an instance of validation.Repeated, returning its constructor.
+
+ Args:
+ attribute: Repeated validator attribute to find type for.
+
+ Returns:
+ The expected class of of the Type validator, otherwise object.
+ """
+ if isinstance(attribute, validation.Optional):
+ attribute = attribute.validator
+ if isinstance(attribute, validation.Repeated):
+ return attribute.constructor
+ return object
+
+ def BuildDocument(self):
+ """Instantiate new root validated object.
+
+ Returns:
+ New instance of validated object.
+ """
+ return self.default_class()
+
+ def BuildMapping(self, top_value):
+ """New instance of object mapper for opening map scope.
+
+ Args:
+ top_value: Parent of nested object.
+
+ Returns:
+ New instance of object mapper.
+ """
+ result = _ObjectMapper()
+ if isinstance(top_value, self.default_class):
+ result.value = top_value
+ return result
+
+ def EndMapping(self, top_value, mapping):
+ """When leaving scope, makes sure new object is initialized.
+
+ This method is mainly for picking up on any missing required attributes.
+
+ Args:
+ top_value: Parent of closing mapping object.
+ mapping: _ObjectMapper instance that is leaving scope.
+ """
+ try:
+ mapping.value.CheckInitialized()
+ except validation.ValidationError:
+ raise
+ except Exception, e:
+ try:
+ error_str = str(e)
+ except Exception:
+ error_str = '<unknown>'
+
+ raise validation.ValidationError("Invalid object:\n%s" % error_str, e)
+
+ def BuildSequence(self, top_value):
+ """New instance of object sequence.
+
+ Args:
+ top_value: Object that contains the new sequence.
+
+ Returns:
+ A new _ObjectSequencer instance.
+ """
+ return _ObjectSequencer()
+
+ def MapTo(self, subject, key, value):
+ """Map key-value pair to an objects attribute.
+
+ Args:
+ subject: _ObjectMapper of object that will receive new attribute.
+ key: Key of attribute.
+ value: Value of new attribute.
+
+ Raises:
+ UnexpectedAttribute when the key is not a validated attribute of
+ the subject value class.
+ """
+ assert subject.value is not None
+ if key not in subject.value.ATTRIBUTES:
+ raise yaml_errors.UnexpectedAttribute(
+ 'Unexpected attribute \'%s\' for object of type %s.' %
+ (key, str(subject.value.__class__)))
+
+ if isinstance(value, _ObjectMapper):
+ value.set_value(subject.value.GetAttribute(key).expected_type())
+ value = value.value
+ elif isinstance(value, _ObjectSequencer):
+ value.set_constructor(self._GetRepeated(subject.value.ATTRIBUTES[key]))
+ value = value.value
+
+ subject.see(key)
+ try:
+ setattr(subject.value, key, value)
+ except validation.ValidationError, e:
+ try:
+ error_str = str(e)
+ except Exception:
+ error_str = '<unknown>'
+
+ try:
+ value_str = str(value)
+ except Exception:
+ value_str = '<unknown>'
+
+ e.message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
+ (value_str, key, error_str))
+ raise e
+ except Exception, e:
+ try:
+ error_str = str(e)
+ except Exception:
+ error_str = '<unknown>'
+
+ try:
+ value_str = str(value)
+ except Exception:
+ value_str = '<unknown>'
+
+ message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
+ (value_str, key, error_str))
+ raise validation.ValidationError(message, e)
+
+ def AppendTo(self, subject, value):
+ """Append a value to a sequence.
+
+ Args:
+ subject: _ObjectSequence that is receiving new value.
+ value: Value that is being appended to sequence.
+ """
+ if isinstance(value, _ObjectMapper):
+ value.set_value(subject.constructor())
+ subject.value.append(value.value)
+ else:
+ subject.value.append(value)
+
+
+def BuildObjects(default_class, stream, loader=yaml.loader.SafeLoader):
+ """Build objects from stream.
+
+ Handles the basic case of loading all the objects from a stream.
+
+ Args:
+ default_class: Class that is instantiated upon the detection of a new
+ document. An instance of this class will act as the document itself.
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work with the YAML parser.
+ loader_class: Used for dependency injection.
+
+ Returns:
+ List of default_class instances parsed from the stream.
+ """
+ builder = ObjectBuilder(default_class)
+ handler = yaml_builder.BuilderHandler(builder)
+ listener = yaml_listener.EventListener(handler)
+
+ listener.Parse(stream, loader)
+ return handler.GetResults()
+
+
+def BuildSingleObject(default_class, stream, loader=yaml.loader.SafeLoader):
+ """Build object from stream.
+
+ Handles the basic case of loading a single object from a stream.
+
+ Args:
+ default_class: Class that is instantiated upon the detection of a new
+ document. An instance of this class will act as the document itself.
+ stream: String document or open file object to process as per the
+ yaml.parse method. Any object that implements a 'read()' method which
+ returns a string document will work with the YAML parser.
+ loader_class: Used for dependency injection.
+ """
+ definitions = BuildObjects(default_class, stream, loader)
+
+ if len(definitions) < 1:
+ raise yaml_errors.EmptyConfigurationFile()
+ if len(definitions) > 1:
+ raise yaml_errors.MultipleConfigurationFile()
+ return definitions[0]
diff --git a/google_appengine/google/appengine/api/yaml_object.pyc b/google_appengine/google/appengine/api/yaml_object.pyc
new file mode 100644
index 0000000..7ec78cd
--- /dev/null
+++ b/google_appengine/google/appengine/api/yaml_object.pyc
Binary files differ